Skip to content

Commit

Permalink
checkin
Browse files Browse the repository at this point in the history
  • Loading branch information
jctanner committed Jul 13, 2023
1 parent 6f5e8b7 commit ad9d46e
Show file tree
Hide file tree
Showing 12 changed files with 358 additions and 97 deletions.
11 changes: 7 additions & 4 deletions lib/constants.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,16 @@
PROJECTS = [
'AA',
'AAH',
'ANSTRAT',
'AAPRFE',
'AAP',
'ACA',
'AAPBUILD',
'AAPRFE',
'ACA',
'ANSTRAT',
'HATSTRAT',
'OCPSTRAT',
'PARTNERENG',
'PLMCORE',
#'PLMCORE',
'TELCOSTRAT',
]

ISSUE_COLUMN_NAMES = [
Expand Down
1 change: 1 addition & 0 deletions lib/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
priority VARCHAR(50),
data JSONB,
history JSONB,
CONSTRAINT unique_issueid UNIQUE (id),
CONSTRAINT unique_project_number UNIQUE (project, number)
);
'''
Expand Down
74 changes: 39 additions & 35 deletions lib/flaskapp.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,19 +26,6 @@
atexit.register(conn.close)


CLOSED = ['done', 'obsolete']


'''
datafiles = glob.glob('.data/AAH-*.json')
all_jiras = []
for df in datafiles:
logger.info(f'loading {df}')
with open(df, 'r') as f:
all_jiras.append(json.loads(f.read()))
jiras = [x for x in all_jiras if x['fields']['status']['name'].lower() != 'closed']
'''


app = Flask(__name__)

Expand All @@ -58,6 +45,17 @@ def ui():
return render_template('main.html')


@app.route('/ui/issues')
@app.route('/ui/issues/')
def ui_issues():
return render_template('issues.html')


@app.route('/ui/projects')
def ui_projects():
return render_template('projects.html')


@app.route('/ui/tree')
def ui_tree():
return render_template('tree.html')
Expand All @@ -68,13 +66,31 @@ def ui_burndown():
return render_template('burndown.html')


@app.route('/api/projects')
def projects():

projects = []
with conn.cursor() as cur:
cur.execute(f"SELECT DISTINCT(project) FROM jira_issues ORDER BY project")
results = cur.fetchall()
for row in results:
projects.append(row[0])

return jsonify(projects)


@app.route('/api/tickets')
@app.route('/api/tickets/')
def tickets():
#filtered = [x for x in jiras if x['fields']['status']['name'].lower() not in CLOSED]

cols = ['key', 'created', 'updated', 'created_by', 'assigned_to', 'type', 'priority', 'state', 'summary']
projects = request.args.getlist("project")
if projects:
project = projects[0]
else:
project = 'AAH'

WHERE = "WHERE project = 'AAH' AND state != 'Closed'"
cols = ['key', 'created', 'updated', 'created_by', 'assigned_to', 'type', 'priority', 'state', 'summary']
WHERE = f"WHERE project = '{project}' AND state != 'Closed'"

filtered = []
with conn.cursor() as cur:
Expand Down Expand Up @@ -152,24 +168,6 @@ def tickets_tree():

imap = {}

'''
for ik,idata in issue_keys.items():
if ik is None:
continue
if not ik.startswith('AAH-'):
continue
if ik not in imap:
imap[ik] = {
'key': ik,
'type': idata['type'],
'status': idata['state'],
'summary': idata['summary'],
'parent_key': None,
}
elif imap[ik]['summary'] is None:
imap[ik]['summary'] = idata['summary']
'''

for node in nodes:
#if node['child'] and not node['child'].startswith('AAH-'):
# continue
Expand Down Expand Up @@ -204,9 +202,15 @@ def tickets_tree():
@app.route('/api/tickets_burndown')
@app.route('/api/tickets_burndown/')
def tickets_burndown():

projects = request.args.getlist("project")
if not projects:
return redirect('/api/tickets_burndown/?project=AAH')

sw = StatsWrapper()
data = sw.burndown(projects, frequency='monthly')
#data = sw.burndown('AAH', frequency='monthly')
data = sw.burndown('AAH', frequency='weekly')
#data = sw.burndown('AAH', frequency='weekly')
data = json.loads(data)
keys = list(data.keys())
keymap = [(x, x.split('T')[0]) for x in keys]
Expand Down
88 changes: 74 additions & 14 deletions lib/jira_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import json
import logging
import os
import subprocess
import time
from datetime import timezone
import jira
Expand Down Expand Up @@ -47,6 +48,37 @@ def __init__(self, message="Failed to fetch history data."):
super().__init__(self.message)


class DiskCacheWrapper:

def __init__(self, cachedir):
self.cachedir = cachedir

def write_issue(self, data):
fn = os.path.join(self.cachedir, 'by_id', data['id'] + '.json')
dn = os.path.dirname(fn)
if not os.path.exists(dn):
os.makedirs(dn)
with open(fn, 'w') as f:
f.write(json.dumps(data, indent=2, sort_keys=True))

# make a by key symlink
dn = os.path.join(self.cachedir, 'by_key')
if not os.path.exists(dn):
os.makedirs(dn)
src = '../by_id/' + os.path.basename(fn)
dst = f'{data["key"]}.json'
subprocess.run(f'rm -f {dst}; ln -s {src} {dst}', cwd=dn, shell=True)

return fn

def get_fn_for_issue_by_key(self, key):
path = os.path.join(self.cachedir, 'by_key', f'{key}.json')
path = os.path.abspath(path)
if not os.path.exists(path):
return None
return os.path.realpath(path)


class DataWrapper:
def __init__(self, fn):
self.datafile = fn
Expand Down Expand Up @@ -144,6 +176,7 @@ class JiraWrapper:

processed = None
project = None
number = None
#errata = None
#bugzillas = None
#jira_issues = None
Expand All @@ -152,6 +185,8 @@ class JiraWrapper:

def __init__(self):

self.dcw = DiskCacheWrapper(self.cachedir)

self.project = None
self.processed = {}

Expand Down Expand Up @@ -179,6 +214,10 @@ def scrape(self, project=None, number=None):
self.scrape_jira_issues()
self.process_relationships()

def map_relationships(self, project):
self.project = project
self.process_relationships()

def store_issue_column(self, project, number, colname, value):
with self.conn.cursor() as cur:
sql = f''' UPDATE jira_issues SET {colname} = %s WHERE project = %s AND number = %s '''
Expand Down Expand Up @@ -235,7 +274,7 @@ def get_invalid_numbers(self, project):
rows = cur.fetchall()
return [x[0] for x in rows]

def get_issue_with_history(self, issue_key):
def get_issue_with_history(self, issue_key, fallback=False):

count = 1
while True:
Expand All @@ -244,10 +283,11 @@ def get_issue_with_history(self, issue_key):
try:
return self.jira_client.issue(issue_key, expand='changelog')
except requests.exceptions.JSONDecodeError as e:
#logger.error(e)
logger.error(e)
#import epdb; epdb.st()
#time.sleep(.5)
return self.jira_client.issue(issue_key)
time.sleep(.5)
#return self.jira_client.issue(issue_key)
#return self.get_issue_with_history(issue_key, fallback=True)

except requests.exceptions.ChunkedEncodingError as e:
logger.error(e)
Expand Down Expand Up @@ -403,15 +443,21 @@ def scrape_jira_issues(self, github_issue_to_find=None):
# get history
logger.info(f'get history for {project}-{number}')
history = self.get_issue_history(project, number, issue)
if history is None:
processed.append(number)
continue
logger.info(f'found {len(history)} events for {project}-{number}')

# write to json file
ds = issue.raw
ds['history'] = history
'''
fn = os.path.join(self.cachedir, ds['key'] + '.json')
logger.info(f'write {fn}')
with open(fn, 'w') as f:
f.write(json.dumps(issue.raw))
'''
fn = self.dcw.write_issue(ds)

# write to DB
self.store_issue_to_database_by_filename(fn)
Expand Down Expand Up @@ -484,9 +530,12 @@ def scrape_jira_issues(self, github_issue_to_find=None):
ds = issue.raw
history = self.get_issue_history(self.project, mn, issue)
ds['history'] = history
'''
fn = os.path.join(self.cachedir, ds['key'] + '.json')
with open(fn, 'w') as f:
f.write(json.dumps(issue.raw))
'''
fn = self.dcw.write_issue(ds)

# write to DB
self.store_issue_to_database_by_filename(fn)
Expand Down Expand Up @@ -515,16 +564,16 @@ def process_relationships(self):

logger.info(f'processing relationships for {self.project}')

if self.number:
if self.number is not None:
keys = [self.project + '-' + str(self.number)]
else:
known = sorted(self.get_known_numbers(self.project))
keys = [self.project + '-' + str(x) for x in known]

logger.info(f'processing relationships for {len(keys)} issue in {self.project}')
logger.info(f'processing relationships for {len(keys)} issue(s) in {self.project}')
for key in keys:
fn = os.path.join(self.cachedir, key + '.json')
if not os.path.exists(fn):
fn = self.dcw.get_fn_for_issue_by_key(key)
if fn is None or not os.path.exists(fn):
continue
self.store_issue_relationships_to_database_by_filename(fn)

Expand Down Expand Up @@ -623,24 +672,35 @@ def main():

parser = argparse.ArgumentParser()
parser.add_argument('--serial', action='store_true', help='do not use threading')
parser.add_argument('--project', help='which project to scrape')
parser.add_argument('--project', help='which project to scrape', action='append', dest='projects')
parser.add_argument('--number', help='which number scrape', type=int, default=None)
parser.add_argument('--relationships-only', action='store_true')
args = parser.parse_args()

projects = PROJECTS[:]
if args.project:
projects = [args.project]
if args.projects:
projects = [x for x in projects if x in args.projects]

if args.serial:
if args.serial or len(projects) == 1:
# do one at a time ...
for project in projects:
if args.number:
jw = JiraWrapper()
jw.scrape(project=project, number=args.number)
if args.relationships_only:
jw.map_relationships(project=project)
else:
jw.scrape(project=project, number=args.number)
else:
jw = JiraWrapper()
jw.scrape(project=project)
if args.relationships_only:
jw.map_relationships(project=project)
else:
jw.scrape(project=project)
else:

if args.relationships_only:
raise Exception('can not map relationships in parallel mode yet')

# do 4 at a time ...
total = 4
args_list = projects[:]
Expand Down
Loading

0 comments on commit ad9d46e

Please sign in to comment.