# HG changeset patch # User Magnus Schieder # Date 1542042206 -3600 # Node ID 7161ce4e7ab117265518b97e588a12e04a33d9da # Parent 89469aa41fe175ed080fde04ad9ee0d5859095b1 The web-based display is dynamically generated. * All graphs are passed in one object. All graphs are passed in one object. The graphs are generated iteratiev and assigned a color. * roundup_cc.py can count how many issues are in which state. diff -r 89469aa41fe1 -r 7161ce4e7ab1 collect_issues.py --- a/collect_issues.py Fri Nov 02 17:06:45 2018 +0100 +++ b/collect_issues.py Mon Nov 12 18:03:26 2018 +0100 @@ -23,10 +23,18 @@ import os -CHECK_ROUNDUP_ORDER = "priority?@action=export_csv&@columns=id,order" +# Getting all priority in their order. +CHECK_ROUNDUP_ORDER_PRIO = "priority?@action=export_csv&@columns=id,order" +# Getting all statuses in their order. +CHECK_ROUNDUP_ORDER_STATUS = "status?@action=export_csv&@columns=id,order" +# Getting keywords and their ids. CHECK_KEYWORD_ORDER = "keyword?@action=export_csv&@columns=id,name" +# Getting states and their ids. CHECK_ROUNDUP_SEARCH_VALUES = "status?@action=export_csv&@columns=id,name&@filter=open&open=1" -SEARCH_ROUNDUP = "issue?@action=export_csv&@columns=priority&@filter=status,keyword&@pagesize=500&@startwith=0&status={search_values}&keyword={keyword_values}" +# Getting the priority of each issue with the filter status and keywords +SEARCH_ROUNDUP_PRIO = "issue?@action=export_csv&@columns=priority&@filter=status,keyword&@pagesize=500&@startwith=0&status={search_values}&keyword={keyword_values}" +# Getting the status of each issue with the filter keywords +SEARCH_ROUNDUP_STATUS = "issue?@action=export_csv&@columns=status&@filter=keyword&@pagesize=500&@startwith=0&keyword={keyword_values}" def connect_to_server(params, baseurl): @@ -82,11 +90,11 @@ def get_status_ids(opener, baseurl, status): - status_csv = get_csv_from_server(opener, baseurl, CHECK_ROUNDUP_SEARCH_VALUES) - if status == [""]: return "" + status_csv = get_csv_from_server(opener, baseurl, CHECK_ROUNDUP_SEARCH_VALUES) + status_dict = {} for x in status_csv: status_dict[x["name"]] = x["id"] @@ -113,14 +121,14 @@ order_dict[row["id"]] = int(float(row["order"])) # int(float()) because the order-value is indeed "1.0, 2.0" etc for issue in issue_csv: - priority = issue["priority"] + priority = issue[issue_csv.fieldnames[0]] if priority.isdigit() == True : quantities[order_dict[priority] -1 ] += 1 else: # no priority set quantities[-1] += 1 - # print("quantities : " + str(quantities)) + # print("quantities : " + str(quantities)) return quantities @@ -147,17 +155,22 @@ con.close() -def save_stats_in_db(login_parmeters, baseurl, db_file, columns, sql_create_db, sql_insert_in_db, keywords, status, include_no_prio=False): +def save_stats_in_db(search, login_parmeters, baseurl, db_file, columns, sql_create_db, sql_insert_in_db, keywords, status, include_no_prio=False): try: opener = connect_to_server(login_parmeters, baseurl) - order_csv = get_csv_from_server(opener, baseurl, CHECK_ROUNDUP_ORDER) - keywords_ids_url = get_keyword_ids(opener, baseurl, keywords) - status_ids_url = get_status_ids(opener, baseurl, status) - formated_search_url = SEARCH_ROUNDUP.format(search_values=status_ids_url, keyword_values=keywords_ids_url) + if search == "prio": + order_csv = get_csv_from_server(opener, baseurl, CHECK_ROUNDUP_ORDER_PRIO) + status_ids_url = get_status_ids(opener, baseurl, status) + formated_search_url = SEARCH_ROUNDUP_PRIO.format(search_values=status_ids_url, keyword_values=keywords_ids_url) + elif search == "status": + order_csv = get_csv_from_server(opener, baseurl, CHECK_ROUNDUP_ORDER_STATUS) + formated_search_url = SEARCH_ROUNDUP_STATUS.format(keyword_values=keywords_ids_url) + else: + print("Incorrect search parameter. (prio, status)") current_issues_csv = get_csv_from_server(opener, baseurl, formated_search_url) diff -r 89469aa41fe1 -r 7161ce4e7ab1 display_issues.py --- a/display_issues.py Fri Nov 02 17:06:45 2018 +0100 +++ b/display_issues.py Mon Nov 12 18:03:26 2018 +0100 @@ -51,15 +51,16 @@ if "None" not in columns: data_dict["None"] = [0] + + js_data_dickt ="{" + for col in columns.split(", "): + js_data_dickt += col + ":[" + make_js_object_string(data_dict[col]) + "]," + js_data_dickt += "}" + base_html_data = (base_html_data .replace("status", status) .replace("keywords", keywords) - .replace("var critical=[];", "var critical=[" + make_js_object_string(data_dict["critical"]) + "]") - .replace("var urgent=[];", "var urgent=[" + make_js_object_string(data_dict["urgent"]) + "]") - .replace("var bug=[];", "var bug=[" + make_js_object_string(data_dict["bug"]) + "]") - .replace("var feature=[];", "var feature=[" + make_js_object_string(data_dict["feature"]) + "]") - .replace("var wish=[];", "var wish=[" + make_js_object_string(data_dict["wish"]) + "]") - .replace("var noPrio=[];", "var noPrio=[" + make_js_object_string(data_dict["None"]) + "]") + .replace("js_data_dickt", js_data_dickt) .replace("var timestamp=[];", "var timestamp=[" + make_js_object_date(data_dict["date"]) + "]")) return base_html_data diff -r 89469aa41fe1 -r 7161ce4e7ab1 display_issues_demo.py --- a/display_issues_demo.py Fri Nov 02 17:06:45 2018 +0100 +++ b/display_issues_demo.py Mon Nov 12 18:03:26 2018 +0100 @@ -16,6 +16,8 @@ import argparse from display_issues import * +STATES = "unread, deferred, chatting, need_eg, in_progress, testing, done_cbb, resolved" + parser = argparse.ArgumentParser() parser.add_argument("config_file", type=str, metavar="[config file]") args = parser.parse_args() @@ -25,9 +27,20 @@ db = config.get("DB", "DatabaseFile") keywords = config.get("SEARCH", "Keywords", fallback="") + + + status = config.get("SEARCH", "Status", fallback="") columns = config.get("SEARCH", "Columns", fallback="critical, urgent, bug, feature, wish") noPrio = config.get("SEARCH", "IncludeNoPrio", fallback=False) +search = config.get("SEARCH", "Search", fallback="prio") +if search == "status": + if status == "": + columns = STATES + else: + columns = status + noPrio = False + if noPrio: columns += ", None" diff -r 89469aa41fe1 -r 7161ce4e7ab1 graph.html --- a/graph.html Fri Nov 02 17:06:45 2018 +0100 +++ b/graph.html Mon Nov 12 18:03:26 2018 +0100 @@ -33,55 +33,71 @@ opacity: 1; } - .line.critical { + .line.red { stroke: red; } - .line.critical.legend { + .line.red.legend { fill: red; } - .line.urgent { + .line.orange { stroke: orange; } - .line.urgent.legend { + .line.orange.legend { fill: orange; } - .line.bug { + .line.violet { stroke: violet; } - .line.bug.legend { + .line.violet.legend { fill: violet; } - .line.feature { + .line.chartreuse { stroke: chartreuse; style: stroke-dasharray; } - .line.feature.legend { + .line.chartreuse.legend { fill: chartreuse; } - .line.wish { + .line.blue { stroke: blue; } - .line.wish.legend { + .line.blue.legend { fill: blue; } - .line.noprio { + .line.grey { stroke: grey; } - .line.noprio.legend { + .line.grey.legend { fill: grey; } + .line.aqua{ + stroke: aqua; + } + + .line.aqua.legend { + fill: aqua; + } + + .line.darkgreen { + stroke: darkgreen; + } + + .line.darkgreen.legend { + fill: darkgreen; + } + .grid .tick { stroke: lightgrey; opacity: 0.7; @@ -106,14 +122,11 @@ makeChart(); }; - var critical=[]; - var urgent=[]; - var bug=[]; - var feature=[]; - var wish=[]; - var noPrio=[]; + var timestamp=[]; + var data=js_data_dickt + var linesCSS = ["red", "orange", "violet", "chartreuse", "blue", "grey", "aqua", "darkgreen"] function assignIssueToDate(issueArray, dateArray){ @@ -146,13 +159,9 @@ function getMaxIssues(){ maxIssuesOfAllArrays = []; - maxIssuesOfAllArrays.push(maxInObject(critical)); - maxIssuesOfAllArrays.push(maxInObject(urgent)); - maxIssuesOfAllArrays.push(maxInObject(bug)); - maxIssuesOfAllArrays.push(maxInObject(feature)); - maxIssuesOfAllArrays.push(maxInObject(wish)); - maxIssuesOfAllArrays.push(maxInObject(noPrio)); - + for (col in data){ + maxIssuesOfAllArrays.push(maxInObject(data[col])) + } return Math.max.apply(Math, maxIssuesOfAllArrays)+1; } @@ -201,7 +210,7 @@ return top_distance; } - function draw_legend_line(svg, width, Ypos, text, issues){ + function draw_legend_line(svg, width, Ypos, linesColour, text, issues){ svg.append("svg:text") .attr("class", "legend") .attr("x", width-30 ) @@ -210,24 +219,24 @@ svg.append("svg:text") .attr("class", "legend") - .attr("x", width+35 ) + .attr("x", width+65 ) .attr("y", Ypos) .text(issues); svg.append("rect") - .attr("class", "line " + text.toLowerCase() + " legend") + .attr("class", "line " + linesColour.toLowerCase() + " legend") .attr("x", width-30) .attr("y", Ypos-20) .attr("width", 100) .attr("height", 2); } - draw_legend_line(svg, legend_distance, set_propper_distance(distance_steps), "Critical", critical[critical.length-1].points); - draw_legend_line(svg, legend_distance, set_propper_distance(distance_steps), "Urgent", urgent[urgent.length-1].points); - draw_legend_line(svg, legend_distance, set_propper_distance(distance_steps), "Bug", bug[bug.length-1].points); - draw_legend_line(svg, legend_distance, set_propper_distance(distance_steps), "Feature", feature[feature.length-1].points); - draw_legend_line(svg, legend_distance, set_propper_distance(distance_steps), "Wish", wish[wish.length-1].points); - draw_legend_line(svg, legend_distance, set_propper_distance(distance_steps), "NoPrio", noPrio[noPrio.length-1].points); + var colourNume = 0 + for (col in data) { + graph = data[col] + draw_legend_line(svg, legend_distance, set_propper_distance(distance_steps), linesCSS[colourNume], col, graph[graph.length-1].points); + colourNume += 1 + } } @@ -237,7 +246,7 @@ //declaration var sizeOfSystemBorders = 50; - var margin = {top: 20, right: 100, bottom: 90, left: 60}, + var margin = {top: 20, right: 150, bottom: 90, left: 60}, width = (document.documentElement.clientWidth-sizeOfSystemBorders) - margin.left - margin.right, height = (document.documentElement.clientHeight-sizeOfSystemBorders) - margin.top - margin.bottom; @@ -252,12 +261,12 @@ .y(function(d) { return y(d.points); }); //lines - var criticalLine = base_line; - var urgentLine = base_line; - var bugLine = base_line; - var featureLine = base_line; - var wishLine = base_line; - var noPrioLine = base_line; + + lines = {} + for (col in data) { + lines[col] = base_line + } + var timestampLine = base_line; @@ -359,14 +368,13 @@ .attr("y", -5) .text("Issues Nach Zeit"); - - draw_line(svg, wish, "line wish", wishLine, "0, 0"); - draw_line(svg, feature, "line feature", featureLine, "3, 3"); - draw_line(svg, bug, "line bug", bugLine, "7, 7"); - draw_line(svg, urgent, "line urgent", urgentLine, "13, 13"); - draw_line(svg, critical, "line critical", criticalLine, "17, 17"); - draw_line(svg, noPrio, "line noprio", noPrioLine, "17, 17"); - + var shape = 0 + var colourNume = 0 + for (col in data){ + draw_line(svg, data[col], "line " + linesCSS[colourNume] , lines[col], shape + ", " +shape); + colourNume += 1 + shape += 3 + } makeLegend(svg, width); diff -r 89469aa41fe1 -r 7161ce4e7ab1 roundup_cc.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/roundup_cc.py Mon Nov 12 18:03:26 2018 +0100 @@ -0,0 +1,59 @@ +#!/usr/bin/env python3 +"""Connect to roundup-tracker and save status to db. + +Run periodically as often as you want data points to be saved. +""" + +import configparser +import argparse + +from collect_issues import save_stats_in_db +import roundup_content_data as rcd + +PRIO = "critical, urgent, bug, feature, wish" +STATES = "unread, deferred, chatting, need_eg, in_progress, testing, done_cbb, resolved" + +def main(): + + parser = argparse.ArgumentParser() + parser.add_argument("config_file", type=str, metavar="[config file]") + args = parser.parse_args() + + config = configparser.ConfigParser() + config.read(args.config_file) + + base_url = config.get("URL", "BaseURL") + + user = config.get("LOGIN","Username") + password = config.get("LOGIN", "Password") + + login_parameters = ( + ("__login_name", user), + ("__login_password", password), + ("@action", "Login"), + ) + + database_file = config.get("DB", "DatabaseFile") + + keywords = config.get("SEARCH", "Keywords", fallback="").split(", ") + + search = config.get("SEARCH", "Search", fallback="prio") + if search == "prio": + list_of_columns = config.get("SEARCH", "Columns", fallback=PRIO).split(", ") + status = config.get("SEARCH", "Status", fallback="").split(", ") + include_no_prio = config.getboolean("SEARCH", "IncludeNoPrio", fallback= False) + if include_no_prio: + list_of_columns += ["None"] + + elif search == "status": + list_of_columns = config.get("SEARCH", "Status", fallback=STATES).split(", ") + status = [""] + + select_all, select_where, create_db, insert_new = \ + rcd.build_sql_commands(list_of_columns) + + save_stats_in_db(search, login_parameters, base_url, database_file, + list_of_columns, create_db, insert_new, keywords, status) + +if __name__ == '__main__': + main()