view collect_issues.py @ 15:d0c439d1e833

Prepare for tracker entries without priority. * Extend issues_to_quantities to count entries with priority 'None'. The regular classic template enforces a priority value, but some trackers don't, e.g. the 'fast-decomposed' variant used by Intevation (https://hg.intevation.de/roundup/fast-decomposed/) uses other fields for a scrum like workflow that show the order of issues as a float value. In order to get all issues, we also have to collect them. Implement it in a backwards compatible manner for save_stats_in_db() by a keyword argument that is off by default. This is just a preparation as the database must also accept an additional column. * Cleanup some superfluous whitespace, remove an unused import and add copyright 208 year.
author Bernhard Reiter <bernhard@intevation.de>
date Fri, 06 Jul 2018 17:36:21 +0200
parents e95f7bee8643
children adca5b3780d2
line wrap: on
line source
#!/usr/bin/env python3
""" Fetch issues from a roundup-tracker and save them in a databse.

author: Sascha L. Teichmann <sascha.teichmann@intevation.de>
author: Bernhard Reiter <bernhard@intevation.de>
author: Sean Engelhardt <sean.engelhardt@intevation.de>

(c) 2010, 2015, 2018 by Intevation GmbH

This is Free Software unter the terms of the
GNU GENERAL PUBLIC LICENSE Version 3 or later.
See http://www.gnu.org/licenses/gpl-3.0.txt for details


##USAGE EXAMPLE: ##

BASE_URL_DEMO = "http://localhost:8917/demo/"
SEARCH_URL_DEMO = "issue?@action=export_csv&@columns=title,priority&@filter=status&@pagesize=50&@startwith=0&status=-1,1,2,3,4,5,6,7"

LOGIN_PARAMETERS_DEMO = (
    ("__login_name", "demo"),
    ("__login_password", "demo"),
    ("@action", "Login"),
    )

save_stats_in_db(LOGIN_PARAMETERS_DEMO, BASE_URL_DEMO, rcd.DATABASE_DEMO, rcd.COLUMNS, rcd.CREATE_DB, rcd.INSERT_NEW, SEARCH_URL_DEMO)
"""

import http.cookiejar
import urllib.parse
import urllib.request
import csv
import io
import sqlite3 as db
import os


CHECK_ROUNDUP_ORDER = "priority?@action=export_csv&@columns=id,order"
CHECK_ROUNDUP_SEARCH_VALUES = "status?@action=export_csv&@columns=id&@filter=open&open=1"
SEARCH_ROUNDUP = "issue?@action=export_csv&@columns=priority&@filter=status&@pagesize=500&@startwith=0&status=-1,{search_values}"


def connect_to_server(params, baseurl):
    enc_data = urllib.parse.urlencode(params).encode()
    cj = http.cookiejar.CookieJar()
    opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj))
    req = urllib.request.Request(url=baseurl, data=enc_data)
    opener.open(req)
    return opener


def get_csv_from_server(opener, roundup_url, sub_url):
    csv_req = urllib.request.Request(url=roundup_url+sub_url)
    f = opener.open(csv_req)
    csv_reader = csv.DictReader(io.TextIOWrapper(f))
    return csv_reader


def set_search_paramters_on_URL(url, search_param_csv):

    id_list = []

    for row in search_param_csv:
        id_list.append(row["id"])

    new_url = url.format(search_values = ",".join(id_list))

    return new_url


def check_create_database(database_file, sql_create_db):
    if not os.path.isfile(database_file):
        con = None
        cur = None
        try:
            con = db.connect(database_file)
            cur = con.cursor()
            try:
                cur.execute(sql_create_db)
                con.commit()
                os.chmod(database_file, 0o644)
            except:
                con.rollback()
                raise
        finally:
            if cur:
                cur.close()
            if con:
                con.close()


def issues_to_quantities(issue_csv, columns, orders_csv):
    """Count issues per priority.

    Returns: a list of ints, containing how often a prio occurred [:-1]
             in order of the priorities, with the last being the "None" prio
    """

    quantities = [0] * (len(columns) +1)
    order_dict = {}

    #convert the csv-dict reader to real dict
    for row in orders_csv:
        order_dict[row["id"]] = int(float(row["order"])) # int(float()) because the order-value is indeed "1.0, 2.0" etc

    for issue in issue_csv:
        priority = issue["priority"]

        if priority.isdigit() == True :
            quantities[order_dict[priority] -1 ] += 1
        else: #  no priority set
            quantities[-1] += 1

    #  print("quantities : " + str(quantities))

    return quantities


def save_issues_to_db(quantities, database_file, sql_create_db, sql_insert_in_db):
    check_create_database(database_file, sql_create_db)

    cur = None
    con = None

    try:
        con = db.connect(database_file)
        cur = con.cursor()
        try:
            cur.execute(sql_insert_in_db, quantities)
            con.commit()
        except:
            con.rollback()
            raise
    finally:
        if cur:
            cur.close()
        if con:
            con.close()


def save_stats_in_db(login_parmeters, baseurl, db_file, columns, sql_create_db, sql_insert_in_db, searchurl=False, include_no_prio=False):
    try:

        opener = connect_to_server(login_parmeters, baseurl)

        search_operators_csv = get_csv_from_server(opener, baseurl, CHECK_ROUNDUP_SEARCH_VALUES)
        order_csv = get_csv_from_server(opener, baseurl, CHECK_ROUNDUP_ORDER)

        if searchurl == False:
            formated_search_url = set_search_paramters_on_URL(SEARCH_ROUNDUP, search_operators_csv)
        else:
            formated_search_url = searchurl

        current_issues_csv = get_csv_from_server(opener, baseurl, formated_search_url)

        opener.close()

        quantities = issues_to_quantities(current_issues_csv, columns, order_csv)
        if not include_no_prio:
            quantities = quantities[:-1]

        save_issues_to_db(quantities, db_file, sql_create_db, sql_insert_in_db)

    except urllib.error.URLError as e:
        print("No Valid Connection to server : " + baseurl + "\nerror: " + str(e))
This site is hosted by Intevation GmbH (Datenschutzerklärung und Impressum | Privacy Policy and Imprint)