Mercurial > roundup-cc
comparison collect_issues.py @ 1:2df45f6ecd81
new appereance (solid and dotted lines), resonsive layout, new legend,
new structure, cronjob-friendly
dynamic generation of search-strings, dynamic recognition of error-values,
ignores non-numeric priority-IDs
author | sean |
---|---|
date | Tue, 14 Apr 2015 13:32:12 +0200 |
parents | |
children | 3e9f4a6803d1 |
comparison
equal
deleted
inserted
replaced
0:3f139db894f1 | 1:2df45f6ecd81 |
---|---|
1 #!/usr/bin/env python3 | |
2 | |
3 """ Fetch issues from a roundup-tracker and save them in a databse. | |
4 | |
5 author: Sascha L. Teichmann <sascha.teichmann@intevation.de> | |
6 author: Bernhard Reiter <bernhard@intevation.de> | |
7 author: Sean Engelhardt <sean.engelhardt@intevation.de> | |
8 | |
9 (c) 2010,2015 by Intevation GmbH | |
10 | |
11 This is Free Software unter the terms of the | |
12 GNU GENERAL PUBLIC LICENSE Version 3 or later. | |
13 See http://www.gnu.org/licenses/gpl-3.0.txt for details | |
14 | |
15 | |
16 ##USAGE EXAMPLE: ## | |
17 | |
18 BASE_URL_DEMO = "http://localhost:8917/demo/" | |
19 SEARCH_URL_DEMO = "issue?@action=export_csv&@columns=title,priority&@filter=status&@pagesize=50&@startwith=0&status=-1,1,2,3,4,5,6,7" | |
20 | |
21 LOGIN_PARAMETERS_DEMO = ( | |
22 ("__login_name", "demo"), | |
23 ("__login_password", "demo"), | |
24 ("@action", "Login"), | |
25 ) | |
26 | |
27 save_stats_in_db(LOGIN_PARAMETERS_DEMO, BASE_URL_DEMO, rcd.DATABASE_DEMO, rcd.COLUMNS, rcd.CREATE_DB, rcd.INSERT_NEW, SEARCH_URL_DEMO) | |
28 | |
29 """ | |
30 | |
31 import http.cookiejar | |
32 import urllib.parse | |
33 import urllib.request | |
34 import csv | |
35 import io | |
36 import sqlite3 as db | |
37 import os | |
38 import roundup_content_data as rcd | |
39 | |
40 | |
41 CHECK_ROUNDUP_ORDER = "priority?@action=export_csv&@columns=id,order" | |
42 CHECK_ROUNDUP_SEARCH_VALUES = "status?@action=export_csv&@columns=id&@filter=open&open=1" | |
43 SEARCH_ROUNDUP = "issue?@action=export_csv&@columns=priority&@filter=status&@pagesize=500&@startwith=0&status=-1,{search_values}" | |
44 | |
45 | |
46 | |
47 | |
48 | |
49 | |
50 def connect_to_server(params, baseurl): | |
51 enc_data = urllib.parse.urlencode(params).encode() | |
52 cj = http.cookiejar.CookieJar() | |
53 opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj)) | |
54 req = urllib.request.Request(url=baseurl, data=enc_data) | |
55 opener.open(req) | |
56 return opener | |
57 | |
58 | |
59 def get_csv_from_server(opener, roundup_url, sub_url): | |
60 csv_req = urllib.request.Request(url=roundup_url+sub_url) | |
61 f = opener.open(csv_req) | |
62 csv_reader = csv.DictReader(io.TextIOWrapper(f)) | |
63 return csv_reader | |
64 | |
65 | |
66 def set_search_paramters_on_URL(url, search_param_csv): | |
67 | |
68 id_list = [] | |
69 | |
70 for row in search_param_csv: | |
71 id_list.append(row["id"]) | |
72 | |
73 new_url = url.format(search_values = ",".join(id_list)) | |
74 | |
75 return new_url | |
76 | |
77 | |
78 def check_create_database(database_file, sql_create_db): | |
79 if not os.path.isfile(database_file): | |
80 con = None | |
81 cur = None | |
82 try: | |
83 con = db.connect(database_file) | |
84 cur = con.cursor() | |
85 try: | |
86 cur.execute(sql_create_db) | |
87 con.commit() | |
88 os.chmod(database_file, 0o644) | |
89 except: | |
90 con.rollback() | |
91 raise | |
92 finally: | |
93 if cur: | |
94 cur.close() | |
95 if con: | |
96 con.close() | |
97 | |
98 | |
99 def represents_int(s): | |
100 try: | |
101 int(s) | |
102 return True | |
103 except ValueError: | |
104 return False | |
105 | |
106 | |
107 def issues_to_quantities(issue_csv, columns, orders_csv): | |
108 | |
109 quantities = [0] * len(columns) | |
110 order_dict = {} | |
111 | |
112 #convert the csv-dict reader to real dict | |
113 for row in orders_csv: | |
114 order_dict[row["id"]] = int(float(row["order"])) # int(float()) because the order-value is indeed "1.0, 2.0" etc | |
115 | |
116 for issue in issue_csv: | |
117 priority = issue["priority"] | |
118 | |
119 if represents_int(priority) == True : | |
120 quantities[order_dict[priority] -1 ] += 1 | |
121 | |
122 # print("quantities : " + str(quantities)) | |
123 | |
124 return quantities | |
125 | |
126 | |
127 def save_issues_to_db(quantities, database_file, sql_create_db, sql_insert_in_db): | |
128 check_create_database(database_file, sql_create_db) | |
129 | |
130 cur = None | |
131 con = None | |
132 | |
133 try: | |
134 con = db.connect(database_file) | |
135 cur = con.cursor() | |
136 try: | |
137 cur.execute(sql_insert_in_db, quantities) | |
138 con.commit() | |
139 except: | |
140 con.rollback() | |
141 raise | |
142 finally: | |
143 if cur: | |
144 cur.close() | |
145 if con: | |
146 con.close() | |
147 | |
148 | |
149 def save_stats_in_db(login_parmeters, baseurl, db_file, columns, sql_create_db, sql_insert_in_db, searchurl=False): | |
150 try: | |
151 | |
152 opener = connect_to_server(login_parmeters, baseurl) | |
153 | |
154 search_operators_csv = get_csv_from_server(opener, baseurl, CHECK_ROUNDUP_SEARCH_VALUES) | |
155 order_csv = get_csv_from_server(opener, baseurl, CHECK_ROUNDUP_ORDER) | |
156 | |
157 if searchurl == False: | |
158 formated_search_url = set_search_paramters_on_URL(SEARCH_ROUNDUP, search_operators_csv) | |
159 else: | |
160 formated_search_url = searchurl | |
161 | |
162 current_issues_csv = get_csv_from_server(opener, baseurl, formated_search_url) | |
163 | |
164 opener.close() | |
165 | |
166 quantities = issues_to_quantities(current_issues_csv, columns, order_csv) | |
167 | |
168 save_issues_to_db(quantities, db_file, sql_create_db, sql_insert_in_db) | |
169 | |
170 except urllib.error.URLError as e: | |
171 print("No Valid Connection to server : " + baseurl + "\nerror: " + str(e)) | |
172 | |
173 | |
174 | |
175 | |
176 | |
177 |