view collect_issues.py @ 27:cdab667c6abb

Delete Code Duplication and Clean Up. * The search for the status does not require the "-1". * If the parameter you are looking for is not available in the tracker, an error message is issued and the program terminates, to avoid incorrect entries in the database
author Magnus Schieder <mschieder@intevation.de>
date Tue, 13 Nov 2018 21:04:22 +0100
parents 761ee2351f58
children e2864dabdb8c
line wrap: on
line source
#!/usr/bin/env python3
""" Fetch issues from a roundup-tracker and save them in a databse.

author: Sascha L. Teichmann <sascha.teichmann@intevation.de>
author: Bernhard Reiter <bernhard@intevation.de>
author: Sean Engelhardt <sean.engelhardt@intevation.de>

(c) 2010, 2015, 2018 by Intevation GmbH

This is Free Software unter the terms of the
GNU GENERAL PUBLIC LICENSE Version 3 or later.
See http://www.gnu.org/licenses/gpl-3.0.txt for details

For usage see examples/.
"""

import http.cookiejar
import urllib.parse
import urllib.request
import csv
import io
import sqlite3 as db
import os
import sys


# Getting all priority in their order.
CHECK_ROUNDUP_ORDER_PRIO = "priority?@action=export_csv&@columns=id,order"
# Getting all statuses in their order.
CHECK_ROUNDUP_ORDER_STATUS = "status?@action=export_csv&@columns=id,order"

# Getting keywords and their ids.
CHECK_KEYWORD_VALUES = "keyword?@action=export_csv&@columns=id,name"
# Getting states and their ids.
CHECK_STATUS_VALUES = "status?@action=export_csv&@columns=id,name"

# Getting the priority of each issue with the filter status and keywords
SEARCH_ROUNDUP_PRIO = "issue?@action=export_csv&@columns=priority&@filter=status,keyword&@pagesize=500&@startwith=0&status={search_values}&keyword={keyword_values}"
# Getting the status of each issue with the filter keywords
SEARCH_ROUNDUP_STATUS = "issue?@action=export_csv&@columns=status&@filter=keyword&@pagesize=500&@startwith=0&keyword={keyword_values}"


def connect_to_server(params, baseurl):
    enc_data = urllib.parse.urlencode(params).encode()
    cj = http.cookiejar.CookieJar()
    opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj))
    req = urllib.request.Request(url=baseurl, data=enc_data)
    opener.open(req)
    return opener


def get_csv_from_server(opener, roundup_url, sub_url):
    csv_req = urllib.request.Request(url=roundup_url+sub_url)
    f = opener.open(csv_req)
    csv_reader = csv.DictReader(io.TextIOWrapper(f))
    return csv_reader


def check_create_database(database_file, sql_create_db):
    if not os.path.isfile(database_file):
        con = None
        cur = None
        try:
            con = db.connect(database_file)
            cur = con.cursor()
            try:
                cur.execute(sql_create_db)
                con.commit()
                os.chmod(database_file, 0o644)
            except:
                con.rollback()
                raise
        finally:
            if cur:
                cur.close()
            if con:
                con.close()

def get_ids(opener, baseurl, parameter, url):
    if parameter == [""]:
        return ""

    parameter_csv = get_csv_from_server(opener, baseurl, url)
    parameter_dict = {}
    for x in parameter_csv:
       parameter_dict[x["name"]] = x["id"]

    parameter_ids = []
    for x in parameter:
        if x not in parameter_dict:
            print('The parameter "%s" does not exist in the tracker.' % x)
            sys.exit(0)

        parameter_ids.append(parameter_dict[x])

    return ",".join(parameter_ids)


def issues_to_quantities(issue_csv, columns, orders_csv):
    """Count issues per priority.

    Returns: a list of ints, containing how often a prio occurred [:-1]
             in order of the priorities, with the last being the "None" prio
    """

    quantities = [0] * (len(columns) +1)
    order_dict = {}

    #convert the csv-dict reader to real dict
    for row in orders_csv:
        order_dict[row["id"]] = int(float(row["order"])) # int(float()) because the order-value is indeed "1.0, 2.0" etc

    for issue in issue_csv:
        priority = issue[issue_csv.fieldnames[0]]

        if priority.isdigit() == True :
            quantities[order_dict[priority] -1 ] += 1
        else: #  no priority set
            quantities[-1] += 1

    #   print("quantities : " + str(quantities))

    return quantities


def save_issues_to_db(quantities, database_file, sql_create_db, sql_insert_in_db):
    check_create_database(database_file, sql_create_db)

    cur = None
    con = None

    try:
        con = db.connect(database_file)
        cur = con.cursor()
        try:
            cur.execute(sql_insert_in_db, quantities)
            con.commit()
        except:
            con.rollback()
            raise
    finally:
        if cur:
            cur.close()
        if con:
            con.close()


def save_stats_in_db(search, login_parmeters, baseurl, db_file, columns, sql_create_db, sql_insert_in_db, keywords, status, include_no_prio=False):
    try:

        opener = connect_to_server(login_parmeters, baseurl)

        keywords_ids_url = get_ids(opener, baseurl, keywords, CHECK_KEYWORD_VALUES)

        if search == "prio":
            order_csv = get_csv_from_server(opener, baseurl, CHECK_ROUNDUP_ORDER_PRIO)
            status_ids_url =  get_ids(opener, baseurl, status, CHECK_STATUS_VALUES)
            formated_search_url = SEARCH_ROUNDUP_PRIO.format(search_values=status_ids_url, keyword_values=keywords_ids_url)
        elif search == "status":
            order_csv = get_csv_from_server(opener, baseurl, CHECK_ROUNDUP_ORDER_STATUS)
            formated_search_url = SEARCH_ROUNDUP_STATUS.format(keyword_values=keywords_ids_url)

        current_issues_csv = get_csv_from_server(opener, baseurl, formated_search_url)

        opener.close()

        quantities = issues_to_quantities(current_issues_csv, columns, order_csv)
        if not include_no_prio:
            quantities = quantities[:-1]

        save_issues_to_db(quantities, db_file, sql_create_db, sql_insert_in_db)

    except urllib.error.URLError as e:
        print("No Valid Connection to server : " + baseurl + "\nerror: " + str(e))
This site is hosted by Intevation GmbH (Datenschutzerklärung und Impressum | Privacy Policy and Imprint)