sean@11: #!/usr/bin/env python3 sean@11: sean@11: """ Fetch issues from a roundup-tracker and save them in a databse. sean@11: sean@11: author: Sascha L. Teichmann sean@11: author: Bernhard Reiter sean@11: author: Sean Engelhardt sean@11: sean@11: (c) 2010,2015 by Intevation GmbH sean@11: sean@11: This is Free Software unter the terms of the sean@11: GNU GENERAL PUBLIC LICENSE Version 3 or later. sean@11: See http://www.gnu.org/licenses/gpl-3.0.txt for details sean@11: sean@11: sean@11: ##USAGE EXAMPLE: ## sean@11: sean@11: BASE_URL_DEMO = "http://localhost:8917/demo/" sean@11: SEARCH_URL_DEMO = "issue?@action=export_csv&@columns=title,priority&@filter=status&@pagesize=50&@startwith=0&status=-1,1,2,3,4,5,6,7" sean@11: sean@11: LOGIN_PARAMETERS_DEMO = ( sean@11: ("__login_name", "demo"), sean@11: ("__login_password", "demo"), sean@11: ("@action", "Login"), sean@11: ) sean@11: sean@11: save_stats_in_db(LOGIN_PARAMETERS_DEMO, BASE_URL_DEMO, rcd.DATABASE_DEMO, rcd.COLUMNS, rcd.CREATE_DB, rcd.INSERT_NEW, SEARCH_URL_DEMO) sean@11: """ sean@11: sean@11: import http.cookiejar sean@11: import urllib.parse sean@11: import urllib.request sean@11: import csv sean@11: import io sean@11: import sqlite3 as db sean@11: import os sean@11: import roundup_cc.roundup_content_data as rcd sean@11: sean@11: sean@11: CHECK_ROUNDUP_ORDER = "priority?@action=export_csv&@columns=id,order" sean@11: CHECK_ROUNDUP_SEARCH_VALUES = "status?@action=export_csv&@columns=id&@filter=open&open=1" sean@11: SEARCH_ROUNDUP = "issue?@action=export_csv&@columns=priority&@filter=status&@pagesize=500&@startwith=0&status=-1,{search_values}" sean@11: sean@11: sean@11: def connect_to_server(params, baseurl): sean@11: enc_data = urllib.parse.urlencode(params).encode() sean@11: cj = http.cookiejar.CookieJar() sean@11: opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj)) sean@11: req = urllib.request.Request(url=baseurl, data=enc_data) sean@11: opener.open(req) sean@11: return opener sean@11: sean@11: sean@11: def get_csv_from_server(opener, roundup_url, sub_url): sean@11: csv_req = urllib.request.Request(url=roundup_url+sub_url) sean@11: f = opener.open(csv_req) sean@11: csv_reader = csv.DictReader(io.TextIOWrapper(f)) sean@11: return csv_reader sean@11: sean@11: sean@11: def set_search_paramters_on_URL(url, search_param_csv): sean@11: sean@11: id_list = [] sean@11: sean@11: for row in search_param_csv: sean@11: id_list.append(row["id"]) sean@11: sean@11: new_url = url.format(search_values = ",".join(id_list)) sean@11: sean@11: return new_url sean@11: sean@11: sean@11: def check_create_database(database_file, sql_create_db): sean@11: if not os.path.isfile(database_file): sean@11: con = None sean@11: cur = None sean@11: try: sean@11: con = db.connect(database_file) sean@11: cur = con.cursor() sean@11: try: sean@11: cur.execute(sql_create_db) sean@11: con.commit() sean@11: os.chmod(database_file, 0o644) sean@11: except: sean@11: con.rollback() sean@11: raise sean@11: finally: sean@11: if cur: sean@11: cur.close() sean@11: if con: sean@11: con.close() sean@11: sean@11: sean@11: def issues_to_quantities(issue_csv, columns, orders_csv): sean@11: sean@11: quantities = [0] * len(columns) sean@11: order_dict = {} sean@11: sean@11: #convert the csv-dict reader to real dict sean@11: for row in orders_csv: sean@11: order_dict[row["id"]] = int(float(row["order"])) # int(float()) because the order-value is indeed "1.0, 2.0" etc sean@11: sean@11: for issue in issue_csv: sean@11: priority = issue["priority"] sean@11: sean@11: if priority.isdigit() == True : sean@11: quantities[order_dict[priority] -1 ] += 1 sean@11: sean@11: # print("quantities : " + str(quantities)) sean@11: sean@11: return quantities sean@11: sean@11: sean@11: def save_issues_to_db(quantities, database_file, sql_create_db, sql_insert_in_db): sean@11: check_create_database(database_file, sql_create_db) sean@11: sean@11: cur = None sean@11: con = None sean@11: sean@11: try: sean@11: con = db.connect(database_file) sean@11: cur = con.cursor() sean@11: try: sean@11: cur.execute(sql_insert_in_db, quantities) sean@11: con.commit() sean@11: except: sean@11: con.rollback() sean@11: raise sean@11: finally: sean@11: if cur: sean@11: cur.close() sean@11: if con: sean@11: con.close() sean@11: sean@11: sean@11: def save_stats_in_db(login_parmeters, baseurl, db_file, columns, sql_create_db, sql_insert_in_db, searchurl=False): sean@11: try: sean@11: sean@11: opener = connect_to_server(login_parmeters, baseurl) sean@11: sean@11: search_operators_csv = get_csv_from_server(opener, baseurl, CHECK_ROUNDUP_SEARCH_VALUES) sean@11: order_csv = get_csv_from_server(opener, baseurl, CHECK_ROUNDUP_ORDER) sean@11: sean@11: if searchurl == False: sean@11: formated_search_url = set_search_paramters_on_URL(SEARCH_ROUNDUP, search_operators_csv) sean@11: else: sean@11: formated_search_url = searchurl sean@11: sean@11: current_issues_csv = get_csv_from_server(opener, baseurl, formated_search_url) sean@11: sean@11: opener.close() sean@11: sean@11: quantities = issues_to_quantities(current_issues_csv, columns, order_csv) sean@11: sean@11: save_issues_to_db(quantities, db_file, sql_create_db, sql_insert_in_db) sean@11: sean@11: except urllib.error.URLError as e: sean@11: print("No Valid Connection to server : " + baseurl + "\nerror: " + str(e))