# HG changeset patch # User Magnus Schieder # Date 1542139462 -3600 # Node ID cdab667c6abbb65957d036fee415c1604fa013c0 # Parent 761ee2351f587ba2f610f64c23c60d46a6d1e67c Delete Code Duplication and Clean Up. * The search for the status does not require the "-1". * If the parameter you are looking for is not available in the tracker, an error message is issued and the program terminates, to avoid incorrect entries in the database diff -r 761ee2351f58 -r cdab667c6abb collect_issues.py --- a/collect_issues.py Tue Nov 13 17:55:10 2018 +0100 +++ b/collect_issues.py Tue Nov 13 21:04:22 2018 +0100 @@ -21,16 +21,19 @@ import io import sqlite3 as db import os +import sys # Getting all priority in their order. CHECK_ROUNDUP_ORDER_PRIO = "priority?@action=export_csv&@columns=id,order" # Getting all statuses in their order. CHECK_ROUNDUP_ORDER_STATUS = "status?@action=export_csv&@columns=id,order" + # Getting keywords and their ids. -CHECK_KEYWORD_ORDER = "keyword?@action=export_csv&@columns=id,name" +CHECK_KEYWORD_VALUES = "keyword?@action=export_csv&@columns=id,name" # Getting states and their ids. -CHECK_ROUNDUP_SEARCH_VALUES = "status?@action=export_csv&@columns=id,name&@filter=open&open=1" +CHECK_STATUS_VALUES = "status?@action=export_csv&@columns=id,name" + # Getting the priority of each issue with the filter status and keywords SEARCH_ROUNDUP_PRIO = "issue?@action=export_csv&@columns=priority&@filter=status,keyword&@pagesize=500&@startwith=0&status={search_values}&keyword={keyword_values}" # Getting the status of each issue with the filter keywords @@ -73,37 +76,24 @@ if con: con.close() -def get_keyword_ids(opener, baseurl, keywords): - if keywords == [""]: +def get_ids(opener, baseurl, parameter, url): + if parameter == [""]: return "" - keywords_csv = get_csv_from_server(opener, baseurl, CHECK_KEYWORD_ORDER) - keywords_dict = {} - for x in keywords_csv: - keywords_dict[x["name"]] = x["id"] - - keywords_ids = [] - for x in keywords: - keywords_ids.append(keywords_dict[x]) - - return ",".join(keywords_ids) - -def get_status_ids(opener, baseurl, status): + parameter_csv = get_csv_from_server(opener, baseurl, url) + parameter_dict = {} + for x in parameter_csv: + parameter_dict[x["name"]] = x["id"] - if status == [""]: - return "" - - status_csv = get_csv_from_server(opener, baseurl, CHECK_ROUNDUP_SEARCH_VALUES) + parameter_ids = [] + for x in parameter: + if x not in parameter_dict: + print('The parameter "%s" does not exist in the tracker.' % x) + sys.exit(0) - status_dict = {} - for x in status_csv: - status_dict[x["name"]] = x["id"] + parameter_ids.append(parameter_dict[x]) - staus_ids = ["-1"] - for x in status: - staus_ids.append(status_dict[x]) - - return ",".join(staus_ids) + return ",".join(parameter_ids) def issues_to_quantities(issue_csv, columns, orders_csv): @@ -160,11 +150,11 @@ opener = connect_to_server(login_parmeters, baseurl) - keywords_ids_url = get_keyword_ids(opener, baseurl, keywords) + keywords_ids_url = get_ids(opener, baseurl, keywords, CHECK_KEYWORD_VALUES) if search == "prio": order_csv = get_csv_from_server(opener, baseurl, CHECK_ROUNDUP_ORDER_PRIO) - status_ids_url = get_status_ids(opener, baseurl, status) + status_ids_url = get_ids(opener, baseurl, status, CHECK_STATUS_VALUES) formated_search_url = SEARCH_ROUNDUP_PRIO.format(search_values=status_ids_url, keyword_values=keywords_ids_url) elif search == "status": order_csv = get_csv_from_server(opener, baseurl, CHECK_ROUNDUP_ORDER_STATUS)