view collect_issues.py @ 36:59e1659a0a0b tip

Update 'TODO.creole' * Old TODOs moved to 'doc/old_TODO.creole'
author Magnus Schieder <mschieder@intevation.de>
date Mon, 26 Nov 2018 16:52:45 +0100
parents b07588ac28b6
children
line wrap: on
line source
#!/usr/bin/env python3
""" Fetch issues from a roundup-tracker and save them in a databse.

author: Sascha L. Teichmann <sascha.teichmann@intevation.de>
author: Bernhard Reiter <bernhard@intevation.de>
author: Sean Engelhardt <sean.engelhardt@intevation.de>
author: Magnus Schieder <magnus.schieder@intevation.de>

(c) 2010, 2015, 2018 by Intevation GmbH

This is Free Software unter the terms of the
GNU GENERAL PUBLIC LICENSE Version 3 or later.
See http://www.gnu.org/licenses/gpl-3.0.txt for details

For usage see examples/.
"""

import http.cookiejar
import urllib.parse
import urllib.request
import csv
import io
import sqlite3 as db
import os
import sys


# Getting keywords and their ids.
CHECK_KEYWORD_VALUES = "keyword?@action=export_csv&@columns=id,name"
# Getting states and their ids.
CHECK_STATUS_VALUES = "status?@action=export_csv&@columns=id,name"
# Getting priorities and their ids.
CHECK_PRIO_VALUES = "priority?@action=export_csv&@columns=id,name"

# Getting the priority of each issue with the filter status, keyword ,priority
SEARCH_ROUNDUP_PRIO = "issue?@action=export_csv&@columns=priority&@filter=status,keyword,priority&@pagesize=500&@startwith=0&status={status_values}&keyword={keyword_values}&priority={priority_values}"
# Getting the status of each issue with the filter keyword, priority, status
SEARCH_ROUNDUP_STATUS = "issue?@action=export_csv&@columns=status&@filter=priority,keyword,status&@pagesize=500&@startwith=0&priority={priority_values}&keyword={keyword_values}&status={status_values}"


def connect_to_server(params, baseurl):
    enc_data = urllib.parse.urlencode(params).encode()
    cj = http.cookiejar.CookieJar()
    opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj))
    req = urllib.request.Request(url=baseurl, data=enc_data)
    opener.open(req)
    return opener


def get_csv_from_server(opener, roundup_url, sub_url):
    csv_req = urllib.request.Request(url=roundup_url+sub_url)
    f = opener.open(csv_req)
    csv_reader = csv.DictReader(io.TextIOWrapper(f))
    return csv_reader


def check_create_database(database_file, sql_create_db):
    if not os.path.isfile(database_file):
        con = None
        cur = None
        try:
            con = db.connect(database_file)
            cur = con.cursor()
            try:
                cur.execute(sql_create_db)
                con.commit()
                os.chmod(database_file, 0o644)
            except:
                con.rollback()
                raise
        finally:
            if cur:
                cur.close()
            if con:
                con.close()

def get_ids(opener, baseurl, parameter, url, include_no_prio=False):
    """Returns the IDs of the respective search parameters as string and list.
    """

    if parameter == [""]:
        return ("", [])

    parameter_csv = get_csv_from_server(opener, baseurl, url)
    parameter_dict = {}
    for x in parameter_csv:
       parameter_dict[x["name"]] = x["id"]

    if include_no_prio:
        parameter_dict["None"] = "-1"

    parameter_ids = []
    for x in parameter:
        if x not in parameter_dict:
            print('The parameter "%s" does not exist in the tracker.' % x)
            sys.exit(0)

        parameter_ids.append(parameter_dict[x])

    return (",".join(parameter_ids), parameter_ids)


def issues_to_quantities(issue_csv, columns_ids):
    """Count issues per priority.

    Returns: a list of ints, containing how often a prio/status occurred
    in the order in which they are specified in the config.
    """

    order_dict = {}
    z = 0
    for x in columns_ids:
        order_dict[x] = z
        z += 1

    quantities = [0] * z

    for issue in issue_csv:
        priority = issue[issue_csv.fieldnames[0]]

        if priority.isdigit() == True :
            quantities[order_dict[priority]] += 1
        else: # no priority set
            quantities[-1] += 1
        #print("quantities : " + str(quantities))

    return quantities


def save_issues_to_db(quantities, database_file, sql_create_db, sql_insert_in_db):
    check_create_database(database_file, sql_create_db)

    cur = None
    con = None

    try:
        con = db.connect(database_file)
        cur = con.cursor()
        try:
            cur.execute(sql_insert_in_db, quantities)
            con.commit()
        except:
            con.rollback()
            raise
    finally:
        if cur:
            cur.close()
        if con:
            con.close()


def save_stats_in_db(search, login_parmeters, baseurl, db_file, columns, sql_create_db, sql_insert_in_db, keywords, search_parameters, include_no_prio):
    try:

        opener = connect_to_server(login_parmeters, baseurl)

        keywords_ids_url, _ = get_ids(opener, baseurl, keywords,
                CHECK_KEYWORD_VALUES)

        if search == "prio":
            # search_parameters are states.
            status_ids_url, _ =  get_ids(opener, baseurl,search_parameters ,
                    CHECK_STATUS_VALUES, include_no_prio)
            prio_ids_url, columns_ids  = get_ids(opener, baseurl, columns,
                    CHECK_PRIO_VALUES, include_no_prio)
            formated_search_url = SEARCH_ROUNDUP_PRIO.format(
                    status_values=status_ids_url,
                    keyword_values=keywords_ids_url,
                    priority_values=prio_ids_url)

        elif search == "status":
            # search_parameters are priorities.
            prio_ids_url, _ = get_ids(opener, baseurl, search_parameters,
                    CHECK_PRIO_VALUES, include_no_prio)
            status_ids_url, columns_ids = get_ids(opener, baseurl, columns,
                    CHECK_STATUS_VALUES)
            formated_search_url = SEARCH_ROUNDUP_STATUS.format(
                    priority_values=prio_ids_url,
                    keyword_values=keywords_ids_url,
                    status_values=status_ids_url)

        current_issues_csv = get_csv_from_server(opener, baseurl,
                formated_search_url)

        opener.close()
        #print(baseurl + formated_search_url)

        quantities = issues_to_quantities(current_issues_csv, columns_ids)

        save_issues_to_db(quantities, db_file, sql_create_db, sql_insert_in_db)

    except urllib.error.URLError as e:
        print("No Valid Connection to server : " + baseurl + "\nerror: " + str(e))
This site is hosted by Intevation GmbH (Datenschutzerklärung und Impressum | Privacy Policy and Imprint)