sean@1: #!/usr/bin/env python3 sean@1: """ Fetch issues from a roundup-tracker and save them in a databse. sean@1: sean@1: author: Sascha L. Teichmann sean@1: author: Bernhard Reiter sean@1: author: Sean Engelhardt mschieder@32: author: Magnus Schieder sean@1: bernhard@15: (c) 2010, 2015, 2018 by Intevation GmbH sean@1: sean@1: This is Free Software unter the terms of the sean@1: GNU GENERAL PUBLIC LICENSE Version 3 or later. sean@1: See http://www.gnu.org/licenses/gpl-3.0.txt for details sean@1: bernhard@17: For usage see examples/. sean@1: """ sean@1: sean@1: import http.cookiejar sean@1: import urllib.parse sean@1: import urllib.request sean@1: import csv sean@1: import io sean@1: import sqlite3 as db sean@1: import os mschieder@27: import sys sean@1: sean@1: mschieder@25: # Getting keywords and their ids. mschieder@27: CHECK_KEYWORD_VALUES = "keyword?@action=export_csv&@columns=id,name" mschieder@25: # Getting states and their ids. mschieder@27: CHECK_STATUS_VALUES = "status?@action=export_csv&@columns=id,name" mschieder@32: # Getting priorities and their ids. mschieder@28: CHECK_PRIO_VALUES = "priority?@action=export_csv&@columns=id,name" mschieder@27: mschieder@32: # Getting the priority of each issue with the filter status, keyword ,priority mschieder@28: SEARCH_ROUNDUP_PRIO = "issue?@action=export_csv&@columns=priority&@filter=status,keyword,priority&@pagesize=500&@startwith=0&status={status_values}&keyword={keyword_values}&priority={priority_values}" mschieder@32: # Getting the status of each issue with the filter keyword, priority, status mschieder@31: SEARCH_ROUNDUP_STATUS = "issue?@action=export_csv&@columns=status&@filter=priority,keyword,status&@pagesize=500&@startwith=0&priority={priority_values}&keyword={keyword_values}&status={status_values}" sean@1: sean@1: sean@1: def connect_to_server(params, baseurl): sean@1: enc_data = urllib.parse.urlencode(params).encode() sean@1: cj = http.cookiejar.CookieJar() sean@1: opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj)) sean@1: req = urllib.request.Request(url=baseurl, data=enc_data) sean@1: opener.open(req) sean@1: return opener sean@1: sean@1: sean@1: def get_csv_from_server(opener, roundup_url, sub_url): sean@1: csv_req = urllib.request.Request(url=roundup_url+sub_url) sean@1: f = opener.open(csv_req) sean@1: csv_reader = csv.DictReader(io.TextIOWrapper(f)) sean@1: return csv_reader sean@1: sean@1: sean@1: def check_create_database(database_file, sql_create_db): sean@1: if not os.path.isfile(database_file): sean@1: con = None sean@1: cur = None sean@1: try: sean@1: con = db.connect(database_file) sean@1: cur = con.cursor() sean@1: try: sean@1: cur.execute(sql_create_db) sean@1: con.commit() sean@1: os.chmod(database_file, 0o644) sean@1: except: sean@1: con.rollback() sean@1: raise sean@1: finally: sean@1: if cur: sean@1: cur.close() sean@1: if con: sean@1: con.close() sean@1: mschieder@28: def get_ids(opener, baseurl, parameter, url, include_no_prio=False): mschieder@32: """Returns the IDs of the respective search parameters as string and list. mschieder@32: """ mschieder@20: mschieder@27: if parameter == [""]: mschieder@28: return ("", []) mschieder@20: mschieder@27: parameter_csv = get_csv_from_server(opener, baseurl, url) mschieder@27: parameter_dict = {} mschieder@27: for x in parameter_csv: mschieder@27: parameter_dict[x["name"]] = x["id"] mschieder@20: mschieder@28: if include_no_prio: mschieder@28: parameter_dict["None"] = "-1" mschieder@20: mschieder@27: parameter_ids = [] mschieder@27: for x in parameter: mschieder@27: if x not in parameter_dict: mschieder@27: print('The parameter "%s" does not exist in the tracker.' % x) mschieder@27: sys.exit(0) mschieder@25: mschieder@27: parameter_ids.append(parameter_dict[x]) mschieder@20: mschieder@28: return (",".join(parameter_ids), parameter_ids) mschieder@20: sean@1: mschieder@28: def issues_to_quantities(issue_csv, columns_ids): bernhard@15: """Count issues per priority. sean@1: mschieder@32: Returns: a list of ints, containing how often a prio/status occurred mschieder@32: in the order in which they are specified in the config. bernhard@15: """ bernhard@15: sean@1: order_dict = {} mschieder@28: z = 0 mschieder@28: for x in columns_ids: mschieder@28: order_dict[x] = z mschieder@28: z += 1 sean@1: mschieder@28: quantities = [0] * z sean@1: sean@1: for issue in issue_csv: mschieder@25: priority = issue[issue_csv.fieldnames[0]] sean@1: sean@2: if priority.isdigit() == True : mschieder@33: quantities[order_dict[priority]] += 1 mschieder@33: else: # no priority set bernhard@15: quantities[-1] += 1 mschieder@33: #print("quantities : " + str(quantities)) sean@1: sean@1: return quantities sean@1: sean@1: sean@1: def save_issues_to_db(quantities, database_file, sql_create_db, sql_insert_in_db): sean@1: check_create_database(database_file, sql_create_db) sean@1: sean@1: cur = None sean@1: con = None sean@1: sean@1: try: sean@1: con = db.connect(database_file) sean@1: cur = con.cursor() sean@1: try: sean@1: cur.execute(sql_insert_in_db, quantities) sean@1: con.commit() sean@1: except: sean@1: con.rollback() sean@1: raise sean@1: finally: sean@1: if cur: sean@1: cur.close() sean@1: if con: sean@1: con.close() sean@1: sean@1: mschieder@31: def save_stats_in_db(search, login_parmeters, baseurl, db_file, columns, sql_create_db, sql_insert_in_db, keywords, search_parameters, include_no_prio): sean@1: try: sean@1: sean@1: opener = connect_to_server(login_parmeters, baseurl) sean@1: mschieder@31: keywords_ids_url, _ = get_ids(opener, baseurl, keywords, mschieder@31: CHECK_KEYWORD_VALUES) mschieder@20: mschieder@25: if search == "prio": mschieder@32: # search_parameters are states. mschieder@31: status_ids_url, _ = get_ids(opener, baseurl,search_parameters , mschieder@31: CHECK_STATUS_VALUES, include_no_prio) mschieder@31: prio_ids_url, columns_ids = get_ids(opener, baseurl, columns, mschieder@31: CHECK_PRIO_VALUES, include_no_prio) mschieder@31: formated_search_url = SEARCH_ROUNDUP_PRIO.format( mschieder@31: status_values=status_ids_url, mschieder@31: keyword_values=keywords_ids_url, mschieder@31: priority_values=prio_ids_url) sean@1: mschieder@25: elif search == "status": mschieder@31: # search_parameters are priorities. mschieder@31: prio_ids_url, _ = get_ids(opener, baseurl, search_parameters, mschieder@31: CHECK_PRIO_VALUES, include_no_prio) mschieder@31: status_ids_url, columns_ids = get_ids(opener, baseurl, columns, mschieder@31: CHECK_STATUS_VALUES) mschieder@31: formated_search_url = SEARCH_ROUNDUP_STATUS.format( mschieder@31: priority_values=prio_ids_url, mschieder@31: keyword_values=keywords_ids_url, mschieder@28: status_values=status_ids_url) sean@1: mschieder@31: current_issues_csv = get_csv_from_server(opener, baseurl, mschieder@31: formated_search_url) sean@1: sean@1: opener.close() mschieder@32: #print(baseurl + formated_search_url) sean@1: mschieder@28: quantities = issues_to_quantities(current_issues_csv, columns_ids) sean@1: bernhard@9: save_issues_to_db(quantities, db_file, sql_create_db, sql_insert_in_db) sean@1: sean@1: except urllib.error.URLError as e: sean@1: print("No Valid Connection to server : " + baseurl + "\nerror: " + str(e))