comparison collect_issues.py @ 28:e2864dabdb8c

fixes a logical error in the filtering of columns. * The columns are stored in the pure order as they appear in the config. * display_issues.py has been renamed to roundup_cc_display.py.
author Magnus Schieder <mschieder@intevation.de>
date Thu, 22 Nov 2018 12:57:20 +0100
parents cdab667c6abb
children 0d6504d02a6b
comparison
equal deleted inserted replaced
27:cdab667c6abb 28:e2864dabdb8c
22 import sqlite3 as db 22 import sqlite3 as db
23 import os 23 import os
24 import sys 24 import sys
25 25
26 26
27 # Getting all priority in their order.
28 CHECK_ROUNDUP_ORDER_PRIO = "priority?@action=export_csv&@columns=id,order"
29 # Getting all statuses in their order.
30 CHECK_ROUNDUP_ORDER_STATUS = "status?@action=export_csv&@columns=id,order"
31
32 # Getting keywords and their ids. 27 # Getting keywords and their ids.
33 CHECK_KEYWORD_VALUES = "keyword?@action=export_csv&@columns=id,name" 28 CHECK_KEYWORD_VALUES = "keyword?@action=export_csv&@columns=id,name"
34 # Getting states and their ids. 29 # Getting states and their ids.
35 CHECK_STATUS_VALUES = "status?@action=export_csv&@columns=id,name" 30 CHECK_STATUS_VALUES = "status?@action=export_csv&@columns=id,name"
31 # Getting priority and their ids.
32 CHECK_PRIO_VALUES = "priority?@action=export_csv&@columns=id,name"
36 33
37 # Getting the priority of each issue with the filter status and keywords 34 # Getting the priority of each issue with the filter status ,keywords ,priority
38 SEARCH_ROUNDUP_PRIO = "issue?@action=export_csv&@columns=priority&@filter=status,keyword&@pagesize=500&@startwith=0&status={search_values}&keyword={keyword_values}" 35 SEARCH_ROUNDUP_PRIO = "issue?@action=export_csv&@columns=priority&@filter=status,keyword,priority&@pagesize=500&@startwith=0&status={status_values}&keyword={keyword_values}&priority={priority_values}"
39 # Getting the status of each issue with the filter keywords 36 # Getting the status of each issue with the filter keywords, status
40 SEARCH_ROUNDUP_STATUS = "issue?@action=export_csv&@columns=status&@filter=keyword&@pagesize=500&@startwith=0&keyword={keyword_values}" 37 SEARCH_ROUNDUP_STATUS = "issue?@action=export_csv&@columns=status&@filter=keyword&@pagesize=500&@startwith=0&keyword={keyword_values}&status={status_values}"
41 38
42 39
43 def connect_to_server(params, baseurl): 40 def connect_to_server(params, baseurl):
44 enc_data = urllib.parse.urlencode(params).encode() 41 enc_data = urllib.parse.urlencode(params).encode()
45 cj = http.cookiejar.CookieJar() 42 cj = http.cookiejar.CookieJar()
74 if cur: 71 if cur:
75 cur.close() 72 cur.close()
76 if con: 73 if con:
77 con.close() 74 con.close()
78 75
79 def get_ids(opener, baseurl, parameter, url): 76 def get_ids(opener, baseurl, parameter, url, include_no_prio=False):
80 if parameter == [""]: 77 if parameter == [""]:
81 return "" 78 return ("", [])
82 79
83 parameter_csv = get_csv_from_server(opener, baseurl, url) 80 parameter_csv = get_csv_from_server(opener, baseurl, url)
84 parameter_dict = {} 81 parameter_dict = {}
85 for x in parameter_csv: 82 for x in parameter_csv:
86 parameter_dict[x["name"]] = x["id"] 83 parameter_dict[x["name"]] = x["id"]
84
85 if include_no_prio:
86 parameter_dict["None"] = "-1"
87 87
88 parameter_ids = [] 88 parameter_ids = []
89 for x in parameter: 89 for x in parameter:
90 if x not in parameter_dict: 90 if x not in parameter_dict:
91 print('The parameter "%s" does not exist in the tracker.' % x) 91 print('The parameter "%s" does not exist in the tracker.' % x)
92 sys.exit(0) 92 sys.exit(0)
93 93
94 parameter_ids.append(parameter_dict[x]) 94 parameter_ids.append(parameter_dict[x])
95 95
96 return ",".join(parameter_ids) 96 return (",".join(parameter_ids), parameter_ids)
97 97
98 98
99 def issues_to_quantities(issue_csv, columns, orders_csv): 99 def issues_to_quantities(issue_csv, columns_ids):
100 """Count issues per priority. 100 """Count issues per priority.
101 101
102 Returns: a list of ints, containing how often a prio occurred [:-1] 102 Returns: a list of ints, containing how often a prio occurred [:-1]
103 in order of the priorities, with the last being the "None" prio 103 in order of the priorities, with the last being the "None" prio
104 """ 104 """
105 105
106 quantities = [0] * (len(columns) +1)
107 order_dict = {} 106 order_dict = {}
107 z = 0
108 for x in columns_ids:
109 order_dict[x] = z
110 z += 1
108 111
109 #convert the csv-dict reader to real dict 112 quantities = [0] * z
110 for row in orders_csv:
111 order_dict[row["id"]] = int(float(row["order"])) # int(float()) because the order-value is indeed "1.0, 2.0" etc
112 113
113 for issue in issue_csv: 114 for issue in issue_csv:
114 priority = issue[issue_csv.fieldnames[0]] 115 priority = issue[issue_csv.fieldnames[0]]
115 116
116 if priority.isdigit() == True : 117 if priority in order_dict:
117 quantities[order_dict[priority] -1 ] += 1 118 if priority.isdigit() == True :
118 else: # no priority set 119 quantities[order_dict[priority]] += 1
119 quantities[-1] += 1 120 else:
120 121 quantities[-1] += 1
121 # print("quantities : " + str(quantities)) 122 #print("quantities : " + str(quantities))
122 123
123 return quantities 124 return quantities
124 125
125 126
126 def save_issues_to_db(quantities, database_file, sql_create_db, sql_insert_in_db): 127 def save_issues_to_db(quantities, database_file, sql_create_db, sql_insert_in_db):
143 cur.close() 144 cur.close()
144 if con: 145 if con:
145 con.close() 146 con.close()
146 147
147 148
148 def save_stats_in_db(search, login_parmeters, baseurl, db_file, columns, sql_create_db, sql_insert_in_db, keywords, status, include_no_prio=False): 149 def save_stats_in_db(search, login_parmeters, baseurl, db_file, columns, sql_create_db, sql_insert_in_db, keywords, status, include_no_prio):
149 try: 150 try:
150 151
151 opener = connect_to_server(login_parmeters, baseurl) 152 opener = connect_to_server(login_parmeters, baseurl)
152 153
153 keywords_ids_url = get_ids(opener, baseurl, keywords, CHECK_KEYWORD_VALUES) 154 keywords_ids_url, _ = get_ids(opener, baseurl, keywords, CHECK_KEYWORD_VALUES)
154 155
155 if search == "prio": 156 if search == "prio":
156 order_csv = get_csv_from_server(opener, baseurl, CHECK_ROUNDUP_ORDER_PRIO) 157 status_ids_url, _ = get_ids(opener, baseurl, status, CHECK_STATUS_VALUES, include_no_prio)
157 status_ids_url = get_ids(opener, baseurl, status, CHECK_STATUS_VALUES) 158 prio_ids_url, columns_ids = get_ids(opener, baseurl, columns, CHECK_PRIO_VALUES, include_no_prio)
158 formated_search_url = SEARCH_ROUNDUP_PRIO.format(search_values=status_ids_url, keyword_values=keywords_ids_url) 159 formated_search_url = SEARCH_ROUNDUP_PRIO.format(status_values=status_ids_url,
160 keyword_values=keywords_ids_url, priority_values=prio_ids_url)
159 elif search == "status": 161 elif search == "status":
160 order_csv = get_csv_from_server(opener, baseurl, CHECK_ROUNDUP_ORDER_STATUS) 162 status_ids_url, columns_ids = get_ids(opener, baseurl, columns, CHECK_STATUS_VALUES)
161 formated_search_url = SEARCH_ROUNDUP_STATUS.format(keyword_values=keywords_ids_url) 163 formated_search_url = SEARCH_ROUNDUP_STATUS.format(keyword_values=keywords_ids_url,
164 status_values=status_ids_url)
162 165
163 current_issues_csv = get_csv_from_server(opener, baseurl, formated_search_url) 166 current_issues_csv = get_csv_from_server(opener, baseurl, formated_search_url)
164 167
165 opener.close() 168 opener.close()
169 #print(baseurl + formated_search_url)
166 170
167 quantities = issues_to_quantities(current_issues_csv, columns, order_csv) 171 quantities = issues_to_quantities(current_issues_csv, columns_ids)
168 if not include_no_prio:
169 quantities = quantities[:-1]
170 172
171 save_issues_to_db(quantities, db_file, sql_create_db, sql_insert_in_db) 173 save_issues_to_db(quantities, db_file, sql_create_db, sql_insert_in_db)
172 174
173 except urllib.error.URLError as e: 175 except urllib.error.URLError as e:
174 print("No Valid Connection to server : " + baseurl + "\nerror: " + str(e)) 176 print("No Valid Connection to server : " + baseurl + "\nerror: " + str(e))
This site is hosted by Intevation GmbH (Datenschutzerklärung und Impressum | Privacy Policy and Imprint)