# HG changeset patch # User Magnus Schieder # Date 1540219798 -7200 # Node ID 3bb3d9a9f1b71cf6c9ea6f2b80f33197ed195a06 # Parent 8ffd584065a4b7981160beebb2720900327f4ad7 Filter by keywords and states. Configuration via configuration file. "No prio" grapf is displayed. (./display_issues_demo.py is not yet dynamic, it needs all states) diff -r 8ffd584065a4 -r 3bb3d9a9f1b7 README.creole --- a/README.creole Mon Jul 09 14:49:44 2018 +0200 +++ b/README.creole Mon Oct 22 16:49:58 2018 +0200 @@ -8,16 +8,18 @@ Run ./demo.py from a roundup-tracker to have a running tracker. {{{ -cp examples/collect_demo1.py c1.py -./c1.py +cp examples/collect_demo3.py c3.py +cp examples/config3.ini . +# change config3.ini +./c3.py # create or change some issues -./c1.py +./c3.py # to inspect the database contents -sqlite3 demo1.db 'select * from issues;' +sqlite3 demo3.db 'select * from issues;' -./display_issues_demo.py >demo1.html -chromium demo1.html +./display_issues_demo.py >demo3.html +chromium demo3.html }}} diff -r 8ffd584065a4 -r 3bb3d9a9f1b7 collect_issues.py --- a/collect_issues.py Mon Jul 09 14:49:44 2018 +0200 +++ b/collect_issues.py Mon Oct 22 16:49:58 2018 +0200 @@ -24,8 +24,9 @@ CHECK_ROUNDUP_ORDER = "priority?@action=export_csv&@columns=id,order" -CHECK_ROUNDUP_SEARCH_VALUES = "status?@action=export_csv&@columns=id&@filter=open&open=1" -SEARCH_ROUNDUP = "issue?@action=export_csv&@columns=priority&@filter=status&@pagesize=500&@startwith=0&status=-1,{search_values}" +CHECK_KEYWORD_ORDER = "keyword?@action=export_csv&@columns=id,name" +CHECK_ROUNDUP_SEARCH_VALUES = "status?@action=export_csv&@columns=id,name&@filter=open&open=1" +SEARCH_ROUNDUP = "issue?@action=export_csv&@columns=priority&@filter=status,keyword&@pagesize=500&@startwith=0&status={search_values}&keyword={keyword_values}" def connect_to_server(params, baseurl): @@ -44,18 +45,6 @@ return csv_reader -def set_search_paramters_on_URL(url, search_param_csv): - - id_list = [] - - for row in search_param_csv: - id_list.append(row["id"]) - - new_url = url.format(search_values = ",".join(id_list)) - - return new_url - - def check_create_database(database_file, sql_create_db): if not os.path.isfile(database_file): con = None @@ -76,6 +65,38 @@ if con: con.close() +def get_keyword_ids(opener, baseurl, keywords): + if keywords == [""]: + return "" + + keywords_csv = get_csv_from_server(opener, baseurl, CHECK_KEYWORD_ORDER) + keywords_dict = {} + for x in keywords_csv: + keywords_dict[x["name"]] = x["id"] + + keywords_ids = [] + for x in keywords: + keywords_ids.append(keywords_dict[x]) + + return ",".join(keywords_ids) + +def get_status_ids(opener, baseurl, status): + + status_csv = get_csv_from_server(opener, baseurl, CHECK_ROUNDUP_SEARCH_VALUES) + + if status == [""]: + return "" + + status_dict = {} + for x in status_csv: + status_dict[x["name"]] = x["id"] + + staus_ids = ["-1"] + for x in status: + staus_ids.append(status_dict[x]) + + return ",".join(staus_ids) + def issues_to_quantities(issue_csv, columns, orders_csv): """Count issues per priority. @@ -126,18 +147,17 @@ con.close() -def save_stats_in_db(login_parmeters, baseurl, db_file, columns, sql_create_db, sql_insert_in_db, searchurl=False, include_no_prio=False): +def save_stats_in_db(login_parmeters, baseurl, db_file, columns, sql_create_db, sql_insert_in_db, keywords, status, include_no_prio=False): try: opener = connect_to_server(login_parmeters, baseurl) - search_operators_csv = get_csv_from_server(opener, baseurl, CHECK_ROUNDUP_SEARCH_VALUES) order_csv = get_csv_from_server(opener, baseurl, CHECK_ROUNDUP_ORDER) - if searchurl == False: - formated_search_url = set_search_paramters_on_URL(SEARCH_ROUNDUP, search_operators_csv) - else: - formated_search_url = searchurl + keywords_ids_url = get_keyword_ids(opener, baseurl, keywords) + + status_ids_url = get_status_ids(opener, baseurl, status) + formated_search_url = SEARCH_ROUNDUP.format(search_values=status_ids_url, keyword_values=keywords_ids_url) current_issues_csv = get_csv_from_server(opener, baseurl, formated_search_url) diff -r 8ffd584065a4 -r 3bb3d9a9f1b7 display_issues.py --- a/display_issues.py Mon Jul 09 14:49:44 2018 +0200 +++ b/display_issues.py Mon Oct 22 16:49:58 2018 +0200 @@ -40,7 +40,7 @@ return ", ".join(formated) -def get_webpage(graph=None): +def get_webpage(status, keywords, graph=None): if graph is None: graph = os.path.dirname(os.path.realpath(__file__)) + '/graph.html' @@ -48,17 +48,21 @@ with open(graph, "r") as html_chart_file: base_html_data = html_chart_file.read() + base_html_data = (base_html_data + .replace("status", status) + .replace("keywords", keywords) .replace("var critical=[];", "var critical=[" + make_js_object_string(rcd.data_dict["critical"]) + "]") .replace("var urgent=[];", "var urgent=[" + make_js_object_string(rcd.data_dict["urgent"]) + "]") .replace("var bug=[];", "var bug=[" + make_js_object_string(rcd.data_dict["bug"]) + "]") .replace("var feature=[];", "var feature=[" + make_js_object_string(rcd.data_dict["feature"]) + "]") .replace("var wish=[];", "var wish=[" + make_js_object_string(rcd.data_dict["wish"]) + "]") + .replace("var noPrio=[];", "var noPrio=[" + make_js_object_string(rcd.data_dict["noPrio"]) + "]") .replace("var timestamp=[];", "var timestamp=[" + make_js_object_date(rcd.data_dict["date"]) + "]")) return base_html_data -def compile_db_stats_html(db_file, sql_select, graph=None): +def compile_db_stats_html(db_file, sql_select, status="", keywords="", graph=None): con = None cur = None @@ -75,20 +79,21 @@ rcd.data_dict["bug"].append(row[3]) rcd.data_dict["feature"].append(row[4]) rcd.data_dict["wish"].append(row[5]) + rcd.data_dict["noPrio"].append(row[6]) finally: if cur: cur.close() if con: con.close() - return get_webpage(graph) + return get_webpage(status, keywords, graph) def render_webpage(content): for line in content.split("\n"): print(line) -def render_db_stats_as_html(db_file, sql_select): - render_webpage(compile_db_stats_html(db_file, sql_select)) +def render_db_stats_as_html(db_file, sql_select, status="", keywords=""): + render_webpage(compile_db_stats_html(db_file, sql_select, status, keywords)) if __name__ == '__main__': cgitb.enable() @@ -96,4 +101,4 @@ #print("Content-Type: text/html") #print() - render_db_stats_as_html("./demo1.db", rcd.SELECT_ALL) + render_db_stats_as_html("./demo3.db", rcd.SELECT_ALL) diff -r 8ffd584065a4 -r 3bb3d9a9f1b7 display_issues_demo.py --- a/display_issues_demo.py Mon Jul 09 14:49:44 2018 +0200 +++ b/display_issues_demo.py Mon Oct 22 16:49:58 2018 +0200 @@ -12,10 +12,21 @@ GNU GENERAL PUBLIC LICENSE Version 3 or later. See http://www.gnu.org/licenses/gpl-3.0.txt for details """ +import configparser +from display_issues import * -from display_issues import * +config = configparser.ConfigParser() +config.read('config3.ini') + +keywords = config.get("SEARCH", "Keywords", fallback="") +status = config.get("SEARCH", "Status", fallback="") +columns = config.get("SEARCH", "Columns", fallback="critical, urgent, bug, feature, wish") +noPrio = config.get("SEARCH", "IncludeNoPrio", fallback=False) +if noPrio: + columns += ", None" cgitb.enable() # (optional) HTML traceback to browser #render_db_stats_as_html("./demo1.db", rcd.SELECT_ALL) -render_db_stats_as_html("./demo1.db", - rcd.SELECT_WHERE.format("timestamp > date('now', '-2 month')")) +render_db_stats_as_html("./demo3.db", + rcd.build_sql_select(columns).format("timestamp > date('now', '-2 month')"), + status, keywords) diff -r 8ffd584065a4 -r 3bb3d9a9f1b7 examples/collect_demo3.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/examples/collect_demo3.py Mon Oct 22 16:49:58 2018 +0200 @@ -0,0 +1,46 @@ +#!/usr/bin/env python3 +"""Connect to roundup-tracker and save status to db for example demo1. + +Run periodically as often as you want data points to be saved. +demo1 only tracks issues with a priority. +""" + +import json +import configparser +from collect_issues import save_stats_in_db +import roundup_content_data as rcd + +COLUMNS = "critical, urgent, bug, feature, wish" + +config = configparser.ConfigParser() +config.read('config3.ini') + +base_url = config.get("URL", "BaseURL") + +user = config.get("LOGIN","Username") +password = config.get("LOGIN", "Password") + +LOGIN_PARAMETERS_DEMO = ( + ("__login_name", user), + ("__login_password", password), + ("@action", "Login"), + ) + +database_file = config.get("DB", "DatabaseFile") + +keywords = config.get("SEARCH", "Keywords", fallback="").split(", ") + +list_of_columns = config.get("SEARCH", "Columns", fallback=COLUMNS).split(", ") + +status = config.get("SEARCH", "Status", fallback="").split(", ") + +include_no_prio = config.getboolean("SEARCH", "IncludeNoPrio", fallback= False) + +if include_no_prio: + list_of_columns += ["None"] + +select_all, select_where, create_db, insert_new = \ + rcd.build_sql_commands(list_of_columns) + +save_stats_in_db(LOGIN_PARAMETERS_DEMO, base_url, database_file, + list_of_columns, create_db, insert_new, keywords, status) diff -r 8ffd584065a4 -r 3bb3d9a9f1b7 examples/config3.ini --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/examples/config3.ini Mon Oct 22 16:49:58 2018 +0200 @@ -0,0 +1,15 @@ +[URL] +BaseURL = http://localhost:8917/demo/ + +[LOGIN] +Username = demo +Password = demo + +[DB] +DatabaseFile = ./demo3.db + +[SEARCH] +Keywords = keywords1, keyword2, keyword3 +Columns = critical, urgent, bug, feature, wish +Status = unread, deferred, chatting, need-eg, in-progress, testing, done-cbb, resolved +IncludeNoPrio = True diff -r 8ffd584065a4 -r 3bb3d9a9f1b7 graph.html --- a/graph.html Mon Jul 09 14:49:44 2018 +0200 +++ b/graph.html Mon Oct 22 16:49:58 2018 +0200 @@ -74,6 +74,14 @@ fill: blue; } + .line.noprio { + stroke: grey; + } + + .line.noprio.legend { + fill: grey; + } + .grid .tick { stroke: lightgrey; opacity: 0.7; @@ -86,6 +94,9 @@ +

Filter

+

States: status

+

Keywords: keywords