diff collect_issues.py @ 1:2df45f6ecd81

new appereance (solid and dotted lines), resonsive layout, new legend, new structure, cronjob-friendly dynamic generation of search-strings, dynamic recognition of error-values, ignores non-numeric priority-IDs
author sean
date Tue, 14 Apr 2015 13:32:12 +0200
parents
children 3e9f4a6803d1
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/collect_issues.py	Tue Apr 14 13:32:12 2015 +0200
@@ -0,0 +1,177 @@
+#!/usr/bin/env python3
+
+""" Fetch issues from a roundup-tracker and save them in a databse.
+
+author: Sascha L. Teichmann <sascha.teichmann@intevation.de>
+author: Bernhard Reiter <bernhard@intevation.de>
+author: Sean Engelhardt <sean.engelhardt@intevation.de>
+
+(c) 2010,2015 by Intevation GmbH
+
+This is Free Software unter the terms of the
+GNU GENERAL PUBLIC LICENSE Version 3 or later.
+See http://www.gnu.org/licenses/gpl-3.0.txt for details
+
+
+##USAGE EXAMPLE: ##
+
+BASE_URL_DEMO = "http://localhost:8917/demo/"
+SEARCH_URL_DEMO = "issue?@action=export_csv&@columns=title,priority&@filter=status&@pagesize=50&@startwith=0&status=-1,1,2,3,4,5,6,7"
+
+LOGIN_PARAMETERS_DEMO = (
+    ("__login_name", "demo"),
+    ("__login_password", "demo"),
+    ("@action", "Login"),
+    )
+
+save_stats_in_db(LOGIN_PARAMETERS_DEMO, BASE_URL_DEMO, rcd.DATABASE_DEMO, rcd.COLUMNS, rcd.CREATE_DB, rcd.INSERT_NEW, SEARCH_URL_DEMO)
+
+"""
+
+import http.cookiejar
+import urllib.parse
+import urllib.request
+import csv
+import io
+import sqlite3 as db
+import os
+import roundup_content_data as rcd
+
+
+CHECK_ROUNDUP_ORDER = "priority?@action=export_csv&@columns=id,order"
+CHECK_ROUNDUP_SEARCH_VALUES = "status?@action=export_csv&@columns=id&@filter=open&open=1"
+SEARCH_ROUNDUP = "issue?@action=export_csv&@columns=priority&@filter=status&@pagesize=500&@startwith=0&status=-1,{search_values}"
+
+
+
+
+
+
+def connect_to_server(params, baseurl):
+    enc_data = urllib.parse.urlencode(params).encode()
+    cj = http.cookiejar.CookieJar()
+    opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj))
+    req = urllib.request.Request(url=baseurl, data=enc_data)
+    opener.open(req)
+    return opener
+
+
+def get_csv_from_server(opener, roundup_url, sub_url):
+    csv_req = urllib.request.Request(url=roundup_url+sub_url)
+    f = opener.open(csv_req)
+    csv_reader = csv.DictReader(io.TextIOWrapper(f))
+    return csv_reader
+
+
+def set_search_paramters_on_URL(url, search_param_csv):
+
+    id_list = []
+
+    for row in search_param_csv:
+        id_list.append(row["id"])
+
+    new_url = url.format(search_values = ",".join(id_list))
+
+    return new_url
+
+
+def check_create_database(database_file, sql_create_db):
+    if not os.path.isfile(database_file):
+        con = None
+        cur = None
+        try:
+            con = db.connect(database_file)
+            cur = con.cursor()
+            try:
+                cur.execute(sql_create_db)
+                con.commit()
+                os.chmod(database_file, 0o644)
+            except:
+                con.rollback()
+                raise
+        finally:
+            if cur:
+                cur.close()
+            if con:
+                con.close()
+
+
+def represents_int(s):
+    try:
+        int(s)
+        return True
+    except ValueError:
+        return False
+
+
+def issues_to_quantities(issue_csv, columns, orders_csv):
+
+    quantities = [0] * len(columns)
+    order_dict = {}
+
+    #convert the csv-dict reader to real dict
+    for row in orders_csv:
+        order_dict[row["id"]] = int(float(row["order"])) # int(float()) because the order-value is indeed "1.0, 2.0" etc
+
+    for issue in issue_csv:
+        priority = issue["priority"]
+
+        if represents_int(priority) == True :
+            quantities[order_dict[priority] -1 ] += 1
+
+    # print("quantities : " + str(quantities))
+
+    return quantities
+
+
+def save_issues_to_db(quantities, database_file, sql_create_db, sql_insert_in_db):
+    check_create_database(database_file, sql_create_db)
+
+    cur = None
+    con = None
+
+    try:
+        con = db.connect(database_file)
+        cur = con.cursor()
+        try:
+            cur.execute(sql_insert_in_db, quantities)
+            con.commit()
+        except:
+            con.rollback()
+            raise
+    finally:
+        if cur:
+            cur.close()
+        if con:
+            con.close()
+
+
+def save_stats_in_db(login_parmeters, baseurl, db_file, columns, sql_create_db, sql_insert_in_db, searchurl=False):
+    try:
+
+        opener = connect_to_server(login_parmeters, baseurl)
+
+        search_operators_csv = get_csv_from_server(opener, baseurl, CHECK_ROUNDUP_SEARCH_VALUES)
+        order_csv = get_csv_from_server(opener, baseurl, CHECK_ROUNDUP_ORDER)
+
+        if searchurl == False:
+            formated_search_url = set_search_paramters_on_URL(SEARCH_ROUNDUP, search_operators_csv)
+        else:
+            formated_search_url = searchurl
+
+        current_issues_csv = get_csv_from_server(opener, baseurl, formated_search_url)
+
+        opener.close()
+
+        quantities = issues_to_quantities(current_issues_csv, columns, order_csv)
+
+        save_issues_to_db(quantities, db_file, sql_create_db, sql_insert_in_db)
+
+    except urllib.error.URLError as e:
+        print("No Valid Connection to server : " + baseurl + "\nerror: " + str(e))
+
+
+
+
+
+
This site is hosted by Intevation GmbH (Datenschutzerklärung und Impressum | Privacy Policy and Imprint)