11
|
1 #!/usr/bin/env python3 |
|
2 |
|
3 """ Fetch issues from a roundup-tracker and save them in a databse. |
|
4 |
|
5 author: Sascha L. Teichmann <sascha.teichmann@intevation.de> |
|
6 author: Bernhard Reiter <bernhard@intevation.de> |
|
7 author: Sean Engelhardt <sean.engelhardt@intevation.de> |
|
8 |
|
9 (c) 2010,2015 by Intevation GmbH |
|
10 |
|
11 This is Free Software unter the terms of the |
|
12 GNU GENERAL PUBLIC LICENSE Version 3 or later. |
|
13 See http://www.gnu.org/licenses/gpl-3.0.txt for details |
|
14 |
|
15 |
|
16 ##USAGE EXAMPLE: ## |
|
17 |
|
18 BASE_URL_DEMO = "http://localhost:8917/demo/" |
|
19 SEARCH_URL_DEMO = "issue?@action=export_csv&@columns=title,priority&@filter=status&@pagesize=50&@startwith=0&status=-1,1,2,3,4,5,6,7" |
|
20 |
|
21 LOGIN_PARAMETERS_DEMO = ( |
|
22 ("__login_name", "demo"), |
|
23 ("__login_password", "demo"), |
|
24 ("@action", "Login"), |
|
25 ) |
|
26 |
|
27 save_stats_in_db(LOGIN_PARAMETERS_DEMO, BASE_URL_DEMO, rcd.DATABASE_DEMO, rcd.COLUMNS, rcd.CREATE_DB, rcd.INSERT_NEW, SEARCH_URL_DEMO) |
|
28 """ |
|
29 |
|
30 import http.cookiejar |
|
31 import urllib.parse |
|
32 import urllib.request |
|
33 import csv |
|
34 import io |
|
35 import sqlite3 as db |
|
36 import os |
|
37 import roundup_cc.roundup_content_data as rcd |
|
38 |
|
39 |
|
40 CHECK_ROUNDUP_ORDER = "priority?@action=export_csv&@columns=id,order" |
|
41 CHECK_ROUNDUP_SEARCH_VALUES = "status?@action=export_csv&@columns=id&@filter=open&open=1" |
|
42 SEARCH_ROUNDUP = "issue?@action=export_csv&@columns=priority&@filter=status&@pagesize=500&@startwith=0&status=-1,{search_values}" |
|
43 |
|
44 |
|
45 def connect_to_server(params, baseurl): |
|
46 enc_data = urllib.parse.urlencode(params).encode() |
|
47 cj = http.cookiejar.CookieJar() |
|
48 opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj)) |
|
49 req = urllib.request.Request(url=baseurl, data=enc_data) |
|
50 opener.open(req) |
|
51 return opener |
|
52 |
|
53 |
|
54 def get_csv_from_server(opener, roundup_url, sub_url): |
|
55 csv_req = urllib.request.Request(url=roundup_url+sub_url) |
|
56 f = opener.open(csv_req) |
|
57 csv_reader = csv.DictReader(io.TextIOWrapper(f)) |
|
58 return csv_reader |
|
59 |
|
60 |
|
61 def set_search_paramters_on_URL(url, search_param_csv): |
|
62 |
|
63 id_list = [] |
|
64 |
|
65 for row in search_param_csv: |
|
66 id_list.append(row["id"]) |
|
67 |
|
68 new_url = url.format(search_values = ",".join(id_list)) |
|
69 |
|
70 return new_url |
|
71 |
|
72 |
|
73 def check_create_database(database_file, sql_create_db): |
|
74 if not os.path.isfile(database_file): |
|
75 con = None |
|
76 cur = None |
|
77 try: |
|
78 con = db.connect(database_file) |
|
79 cur = con.cursor() |
|
80 try: |
|
81 cur.execute(sql_create_db) |
|
82 con.commit() |
|
83 os.chmod(database_file, 0o644) |
|
84 except: |
|
85 con.rollback() |
|
86 raise |
|
87 finally: |
|
88 if cur: |
|
89 cur.close() |
|
90 if con: |
|
91 con.close() |
|
92 |
|
93 |
|
94 def issues_to_quantities(issue_csv, columns, orders_csv): |
|
95 |
|
96 quantities = [0] * len(columns) |
|
97 order_dict = {} |
|
98 |
|
99 #convert the csv-dict reader to real dict |
|
100 for row in orders_csv: |
|
101 order_dict[row["id"]] = int(float(row["order"])) # int(float()) because the order-value is indeed "1.0, 2.0" etc |
|
102 |
|
103 for issue in issue_csv: |
|
104 priority = issue["priority"] |
|
105 |
|
106 if priority.isdigit() == True : |
|
107 quantities[order_dict[priority] -1 ] += 1 |
|
108 |
|
109 # print("quantities : " + str(quantities)) |
|
110 |
|
111 return quantities |
|
112 |
|
113 |
|
114 def save_issues_to_db(quantities, database_file, sql_create_db, sql_insert_in_db): |
|
115 check_create_database(database_file, sql_create_db) |
|
116 |
|
117 cur = None |
|
118 con = None |
|
119 |
|
120 try: |
|
121 con = db.connect(database_file) |
|
122 cur = con.cursor() |
|
123 try: |
|
124 cur.execute(sql_insert_in_db, quantities) |
|
125 con.commit() |
|
126 except: |
|
127 con.rollback() |
|
128 raise |
|
129 finally: |
|
130 if cur: |
|
131 cur.close() |
|
132 if con: |
|
133 con.close() |
|
134 |
|
135 |
|
136 def save_stats_in_db(login_parmeters, baseurl, db_file, columns, sql_create_db, sql_insert_in_db, searchurl=False): |
|
137 try: |
|
138 |
|
139 opener = connect_to_server(login_parmeters, baseurl) |
|
140 |
|
141 search_operators_csv = get_csv_from_server(opener, baseurl, CHECK_ROUNDUP_SEARCH_VALUES) |
|
142 order_csv = get_csv_from_server(opener, baseurl, CHECK_ROUNDUP_ORDER) |
|
143 |
|
144 if searchurl == False: |
|
145 formated_search_url = set_search_paramters_on_URL(SEARCH_ROUNDUP, search_operators_csv) |
|
146 else: |
|
147 formated_search_url = searchurl |
|
148 |
|
149 current_issues_csv = get_csv_from_server(opener, baseurl, formated_search_url) |
|
150 |
|
151 opener.close() |
|
152 |
|
153 quantities = issues_to_quantities(current_issues_csv, columns, order_csv) |
|
154 |
|
155 save_issues_to_db(quantities, db_file, sql_create_db, sql_insert_in_db) |
|
156 |
|
157 except urllib.error.URLError as e: |
|
158 print("No Valid Connection to server : " + baseurl + "\nerror: " + str(e)) |