Mercurial > roundup-cc
diff collect_issues.py @ 20:3bb3d9a9f1b7
Filter by keywords and states.
Configuration via configuration file.
"No prio" grapf is displayed.
(./display_issues_demo.py is not yet dynamic, it needs all states)
author | Magnus Schieder <mschieder@intevation.de> |
---|---|
date | Mon, 22 Oct 2018 16:49:58 +0200 |
parents | adca5b3780d2 |
children | 10167e40a657 7161ce4e7ab1 |
line wrap: on
line diff
--- a/collect_issues.py Mon Jul 09 14:49:44 2018 +0200 +++ b/collect_issues.py Mon Oct 22 16:49:58 2018 +0200 @@ -24,8 +24,9 @@ CHECK_ROUNDUP_ORDER = "priority?@action=export_csv&@columns=id,order" -CHECK_ROUNDUP_SEARCH_VALUES = "status?@action=export_csv&@columns=id&@filter=open&open=1" -SEARCH_ROUNDUP = "issue?@action=export_csv&@columns=priority&@filter=status&@pagesize=500&@startwith=0&status=-1,{search_values}" +CHECK_KEYWORD_ORDER = "keyword?@action=export_csv&@columns=id,name" +CHECK_ROUNDUP_SEARCH_VALUES = "status?@action=export_csv&@columns=id,name&@filter=open&open=1" +SEARCH_ROUNDUP = "issue?@action=export_csv&@columns=priority&@filter=status,keyword&@pagesize=500&@startwith=0&status={search_values}&keyword={keyword_values}" def connect_to_server(params, baseurl): @@ -44,18 +45,6 @@ return csv_reader -def set_search_paramters_on_URL(url, search_param_csv): - - id_list = [] - - for row in search_param_csv: - id_list.append(row["id"]) - - new_url = url.format(search_values = ",".join(id_list)) - - return new_url - - def check_create_database(database_file, sql_create_db): if not os.path.isfile(database_file): con = None @@ -76,6 +65,38 @@ if con: con.close() +def get_keyword_ids(opener, baseurl, keywords): + if keywords == [""]: + return "" + + keywords_csv = get_csv_from_server(opener, baseurl, CHECK_KEYWORD_ORDER) + keywords_dict = {} + for x in keywords_csv: + keywords_dict[x["name"]] = x["id"] + + keywords_ids = [] + for x in keywords: + keywords_ids.append(keywords_dict[x]) + + return ",".join(keywords_ids) + +def get_status_ids(opener, baseurl, status): + + status_csv = get_csv_from_server(opener, baseurl, CHECK_ROUNDUP_SEARCH_VALUES) + + if status == [""]: + return "" + + status_dict = {} + for x in status_csv: + status_dict[x["name"]] = x["id"] + + staus_ids = ["-1"] + for x in status: + staus_ids.append(status_dict[x]) + + return ",".join(staus_ids) + def issues_to_quantities(issue_csv, columns, orders_csv): """Count issues per priority. @@ -126,18 +147,17 @@ con.close() -def save_stats_in_db(login_parmeters, baseurl, db_file, columns, sql_create_db, sql_insert_in_db, searchurl=False, include_no_prio=False): +def save_stats_in_db(login_parmeters, baseurl, db_file, columns, sql_create_db, sql_insert_in_db, keywords, status, include_no_prio=False): try: opener = connect_to_server(login_parmeters, baseurl) - search_operators_csv = get_csv_from_server(opener, baseurl, CHECK_ROUNDUP_SEARCH_VALUES) order_csv = get_csv_from_server(opener, baseurl, CHECK_ROUNDUP_ORDER) - if searchurl == False: - formated_search_url = set_search_paramters_on_URL(SEARCH_ROUNDUP, search_operators_csv) - else: - formated_search_url = searchurl + keywords_ids_url = get_keyword_ids(opener, baseurl, keywords) + + status_ids_url = get_status_ids(opener, baseurl, status) + formated_search_url = SEARCH_ROUNDUP.format(search_values=status_ids_url, keyword_values=keywords_ids_url) current_issues_csv = get_csv_from_server(opener, baseurl, formated_search_url)