diff --git a/lib/constants.py b/lib/constants.py index b200e0a..7587199 100644 --- a/lib/constants.py +++ b/lib/constants.py @@ -1,13 +1,16 @@ PROJECTS = [ 'AA', 'AAH', - 'ANSTRAT', - 'AAPRFE', 'AAP', - 'ACA', 'AAPBUILD', + 'AAPRFE', + 'ACA', + 'ANSTRAT', + 'HATSTRAT', + 'OCPSTRAT', 'PARTNERENG', - 'PLMCORE', + #'PLMCORE', + 'TELCOSTRAT', ] ISSUE_COLUMN_NAMES = [ diff --git a/lib/database.py b/lib/database.py index 8209c68..e7c7576 100644 --- a/lib/database.py +++ b/lib/database.py @@ -33,6 +33,7 @@ priority VARCHAR(50), data JSONB, history JSONB, + CONSTRAINT unique_issueid UNIQUE (id), CONSTRAINT unique_project_number UNIQUE (project, number) ); ''' diff --git a/lib/flaskapp.py b/lib/flaskapp.py index 3ca9708..0f1215f 100644 --- a/lib/flaskapp.py +++ b/lib/flaskapp.py @@ -26,19 +26,6 @@ atexit.register(conn.close) -CLOSED = ['done', 'obsolete'] - - -''' -datafiles = glob.glob('.data/AAH-*.json') -all_jiras = [] -for df in datafiles: - logger.info(f'loading {df}') - with open(df, 'r') as f: - all_jiras.append(json.loads(f.read())) -jiras = [x for x in all_jiras if x['fields']['status']['name'].lower() != 'closed'] -''' - app = Flask(__name__) @@ -58,6 +45,17 @@ def ui(): return render_template('main.html') +@app.route('/ui/issues') +@app.route('/ui/issues/') +def ui_issues(): + return render_template('issues.html') + + +@app.route('/ui/projects') +def ui_projects(): + return render_template('projects.html') + + @app.route('/ui/tree') def ui_tree(): return render_template('tree.html') @@ -68,13 +66,31 @@ def ui_burndown(): return render_template('burndown.html') +@app.route('/api/projects') +def projects(): + + projects = [] + with conn.cursor() as cur: + cur.execute(f"SELECT DISTINCT(project) FROM jira_issues ORDER BY project") + results = cur.fetchall() + for row in results: + projects.append(row[0]) + + return jsonify(projects) + + @app.route('/api/tickets') +@app.route('/api/tickets/') def tickets(): - #filtered = [x for x in jiras if x['fields']['status']['name'].lower() not in CLOSED] - cols = ['key', 'created', 'updated', 'created_by', 'assigned_to', 'type', 'priority', 'state', 'summary'] + projects = request.args.getlist("project") + if projects: + project = projects[0] + else: + project = 'AAH' - WHERE = "WHERE project = 'AAH' AND state != 'Closed'" + cols = ['key', 'created', 'updated', 'created_by', 'assigned_to', 'type', 'priority', 'state', 'summary'] + WHERE = f"WHERE project = '{project}' AND state != 'Closed'" filtered = [] with conn.cursor() as cur: @@ -152,24 +168,6 @@ def tickets_tree(): imap = {} - ''' - for ik,idata in issue_keys.items(): - if ik is None: - continue - if not ik.startswith('AAH-'): - continue - if ik not in imap: - imap[ik] = { - 'key': ik, - 'type': idata['type'], - 'status': idata['state'], - 'summary': idata['summary'], - 'parent_key': None, - } - elif imap[ik]['summary'] is None: - imap[ik]['summary'] = idata['summary'] - ''' - for node in nodes: #if node['child'] and not node['child'].startswith('AAH-'): # continue @@ -204,9 +202,15 @@ def tickets_tree(): @app.route('/api/tickets_burndown') @app.route('/api/tickets_burndown/') def tickets_burndown(): + + projects = request.args.getlist("project") + if not projects: + return redirect('/api/tickets_burndown/?project=AAH') + sw = StatsWrapper() + data = sw.burndown(projects, frequency='monthly') #data = sw.burndown('AAH', frequency='monthly') - data = sw.burndown('AAH', frequency='weekly') + #data = sw.burndown('AAH', frequency='weekly') data = json.loads(data) keys = list(data.keys()) keymap = [(x, x.split('T')[0]) for x in keys] diff --git a/lib/jira_wrapper.py b/lib/jira_wrapper.py index 9ee3d33..a9f61e6 100644 --- a/lib/jira_wrapper.py +++ b/lib/jira_wrapper.py @@ -16,6 +16,7 @@ import json import logging import os +import subprocess import time from datetime import timezone import jira @@ -47,6 +48,37 @@ def __init__(self, message="Failed to fetch history data."): super().__init__(self.message) +class DiskCacheWrapper: + + def __init__(self, cachedir): + self.cachedir = cachedir + + def write_issue(self, data): + fn = os.path.join(self.cachedir, 'by_id', data['id'] + '.json') + dn = os.path.dirname(fn) + if not os.path.exists(dn): + os.makedirs(dn) + with open(fn, 'w') as f: + f.write(json.dumps(data, indent=2, sort_keys=True)) + + # make a by key symlink + dn = os.path.join(self.cachedir, 'by_key') + if not os.path.exists(dn): + os.makedirs(dn) + src = '../by_id/' + os.path.basename(fn) + dst = f'{data["key"]}.json' + subprocess.run(f'rm -f {dst}; ln -s {src} {dst}', cwd=dn, shell=True) + + return fn + + def get_fn_for_issue_by_key(self, key): + path = os.path.join(self.cachedir, 'by_key', f'{key}.json') + path = os.path.abspath(path) + if not os.path.exists(path): + return None + return os.path.realpath(path) + + class DataWrapper: def __init__(self, fn): self.datafile = fn @@ -144,6 +176,7 @@ class JiraWrapper: processed = None project = None + number = None #errata = None #bugzillas = None #jira_issues = None @@ -152,6 +185,8 @@ class JiraWrapper: def __init__(self): + self.dcw = DiskCacheWrapper(self.cachedir) + self.project = None self.processed = {} @@ -179,6 +214,10 @@ def scrape(self, project=None, number=None): self.scrape_jira_issues() self.process_relationships() + def map_relationships(self, project): + self.project = project + self.process_relationships() + def store_issue_column(self, project, number, colname, value): with self.conn.cursor() as cur: sql = f''' UPDATE jira_issues SET {colname} = %s WHERE project = %s AND number = %s ''' @@ -235,7 +274,7 @@ def get_invalid_numbers(self, project): rows = cur.fetchall() return [x[0] for x in rows] - def get_issue_with_history(self, issue_key): + def get_issue_with_history(self, issue_key, fallback=False): count = 1 while True: @@ -244,10 +283,11 @@ def get_issue_with_history(self, issue_key): try: return self.jira_client.issue(issue_key, expand='changelog') except requests.exceptions.JSONDecodeError as e: - #logger.error(e) + logger.error(e) #import epdb; epdb.st() - #time.sleep(.5) - return self.jira_client.issue(issue_key) + time.sleep(.5) + #return self.jira_client.issue(issue_key) + #return self.get_issue_with_history(issue_key, fallback=True) except requests.exceptions.ChunkedEncodingError as e: logger.error(e) @@ -403,15 +443,21 @@ def scrape_jira_issues(self, github_issue_to_find=None): # get history logger.info(f'get history for {project}-{number}') history = self.get_issue_history(project, number, issue) + if history is None: + processed.append(number) + continue logger.info(f'found {len(history)} events for {project}-{number}') # write to json file ds = issue.raw ds['history'] = history + ''' fn = os.path.join(self.cachedir, ds['key'] + '.json') logger.info(f'write {fn}') with open(fn, 'w') as f: f.write(json.dumps(issue.raw)) + ''' + fn = self.dcw.write_issue(ds) # write to DB self.store_issue_to_database_by_filename(fn) @@ -484,9 +530,12 @@ def scrape_jira_issues(self, github_issue_to_find=None): ds = issue.raw history = self.get_issue_history(self.project, mn, issue) ds['history'] = history + ''' fn = os.path.join(self.cachedir, ds['key'] + '.json') with open(fn, 'w') as f: f.write(json.dumps(issue.raw)) + ''' + fn = self.dcw.write_issue(ds) # write to DB self.store_issue_to_database_by_filename(fn) @@ -515,16 +564,16 @@ def process_relationships(self): logger.info(f'processing relationships for {self.project}') - if self.number: + if self.number is not None: keys = [self.project + '-' + str(self.number)] else: known = sorted(self.get_known_numbers(self.project)) keys = [self.project + '-' + str(x) for x in known] - logger.info(f'processing relationships for {len(keys)} issue in {self.project}') + logger.info(f'processing relationships for {len(keys)} issue(s) in {self.project}') for key in keys: - fn = os.path.join(self.cachedir, key + '.json') - if not os.path.exists(fn): + fn = self.dcw.get_fn_for_issue_by_key(key) + if fn is None or not os.path.exists(fn): continue self.store_issue_relationships_to_database_by_filename(fn) @@ -623,24 +672,35 @@ def main(): parser = argparse.ArgumentParser() parser.add_argument('--serial', action='store_true', help='do not use threading') - parser.add_argument('--project', help='which project to scrape') + parser.add_argument('--project', help='which project to scrape', action='append', dest='projects') parser.add_argument('--number', help='which number scrape', type=int, default=None) + parser.add_argument('--relationships-only', action='store_true') args = parser.parse_args() projects = PROJECTS[:] - if args.project: - projects = [args.project] + if args.projects: + projects = [x for x in projects if x in args.projects] - if args.serial: + if args.serial or len(projects) == 1: # do one at a time ... for project in projects: if args.number: jw = JiraWrapper() - jw.scrape(project=project, number=args.number) + if args.relationships_only: + jw.map_relationships(project=project) + else: + jw.scrape(project=project, number=args.number) else: jw = JiraWrapper() - jw.scrape(project=project) + if args.relationships_only: + jw.map_relationships(project=project) + else: + jw.scrape(project=project) else: + + if args.relationships_only: + raise Exception('can not map relationships in parallel mode yet') + # do 4 at a time ... total = 4 args_list = projects[:] diff --git a/lib/static/js/burndown.js b/lib/static/js/burndown.js index c7bed42..7806773 100644 --- a/lib/static/js/burndown.js +++ b/lib/static/js/burndown.js @@ -52,16 +52,33 @@ class CustomDateAdapter extends Chart._adapters._date { function onLoad() { + queryString = window.location.search; + const urlParams = new URLSearchParams(queryString); + let projects = urlParams.getAll("project"); + + if (projects.length === 0) { + const url = new URL(window.location.href); + url.searchParams.set("project", "AAH"); + history.replaceState(null, '', url.toString()); + } + document.getElementById('burndown-chart').innerHTML = '
'; - fetch("/api/tickets_burndown", { + const apiParams = new URLSearchParams(); + projects.forEach(value => { + apiParams.append("project", value); + }); + const apiString = apiParams.toString(); + const apiUrl = `/api/tickets_burndown/?${apiString}`; + + fetch(apiUrl, { method: "GET", headers: {'Content-Type': 'application/json'}, }).then(res => res.json()).then(res => { console.log("Request complete! response:", res, typeof res); let newHTML = ""; - newHTML += 'ID | +created | +updated | +reporter | +assignee | +type | +priority | +status | +summary | + + + +
---|