Skip to content

Commit

Permalink
checkin
Browse files Browse the repository at this point in the history
  • Loading branch information
jctanner committed Aug 2, 2023
1 parent ad9d46e commit d5bd29f
Show file tree
Hide file tree
Showing 13 changed files with 783 additions and 144 deletions.
4 changes: 4 additions & 0 deletions RUNAPI.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
#!/bin/bash

source .venv/bin/activate
python lib/flaskapp.py
5 changes: 5 additions & 0 deletions RUNFETCH.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
#!/bin/bash

source .venv/bin/activate
source config.sh
python lib/jira_wrapper.py
136 changes: 136 additions & 0 deletions lib/data_wrapper.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
#!/usr/bin/env python

"""
jira_tickets.py - idempotently copy the issue data from github_tickets.py to issues.redhat.com
The jira instance on issues.redhat.com does have an api, but it's shielded by sso and regular users
can not generate tokens nor do advanced data import. This script works around all of that by using
selenium to navigate through the pages and to input the data.
"""

import atexit
import argparse
import datetime
import copy
import glob
import json
import logging
import os
import subprocess
import time
from datetime import timezone
import jira

import requests

import concurrent.futures

from pprint import pprint
from logzero import logger

from constants import PROJECTS, ISSUE_COLUMN_NAMES
from database import JiraDatabaseWrapper
from database import ISSUE_INSERT_QUERY
from utils import (
sortable_key_from_ikey,
history_items_to_dict,
history_to_dict,
)


rlog = logging.getLogger('urllib3')
rlog.setLevel(logging.DEBUG)


class DataWrapper:

def __init__(self, fn):
self.datafile = fn
if not os.path.exists(self.datafile):
raise Exception(f'{self.datafile} does not exist')

with open(self.datafile, 'r') as f:
self._data = json.loads(f.read())

self.project = self._data['key'].split('-')[0]
self.number = int(self._data['key'].split('-')[-1])

ts = os.path.getctime(self.datafile)
self.fetched = datetime.datetime.fromtimestamp(ts)

self._history = copy.deepcopy(self._data['history'])
self._data.pop('history', None)

self.assigned_to = None
if self._data['fields']['assignee']:
self.assigned_to = self._data['fields']['assignee']['name']

@property
def id(self):
return self._data['id']

@property
def raw_data(self):
return self._data

@property
def data(self):
return json.dumps(self._data)

@property
def raw_history(self):
return self._history

@property
def history(self):
return json.dumps(self._history)

@property
def key(self):
return self._data['key']

@property
def url(self):
return self._data['self']

@property
def created_by(self):
return self._data['fields']['creator']['name']

@property
def type(self):
return self._data['fields']['issuetype']['name']

@property
def summary(self):
return self._data['fields']['summary']

@property
def description(self):
return self._data['fields']['description'] or ''

@property
def created(self):
return self._data['fields']['created']

@property
def updated(self):
return self._data['fields']['updated']

@property
def closed(self):

if not self.state == 'Closed':
return None

return self._data['fields']['resolutiondate']

@property
def state(self):
return self._data['fields']['status']['name']

@property
def priority(self):
if self._data['fields']['priority'] is None:
return None
return self._data['fields']['priority']['name']
24 changes: 24 additions & 0 deletions lib/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,19 @@
);
'''

ISSUE_EVENT_SCHEMA = '''
CREATE TABLE jira_issue_events (
id VARCHAR(50),
author VARCHAR(50),
project VARCHAR(50),
number INTEGER,
key VARCHAR(50),
created TIMESTAMP,
data JSONB,
CONSTRAINT unique_eventid UNIQUE (id)
);
'''

ISSUE_INSERT_QUERY = """
INSERT INTO jira_issues (
datafile,
Expand Down Expand Up @@ -140,13 +153,24 @@ def get_connection(self):
connstring = f'host={self.IP} dbname={self.DB} user={self.USER} password={self.PASS}'
return psycopg.connect(connstring)

def check_table_and_create(self, tablename):
conn = self.get_connection()
with conn.cursor() as cur:
cur.execute("select * from information_schema.tables where table_name=%s", (tablename,))
exists = bool(cur.rowcount)
if not exists:
if tablename == 'jira_issue_events':
cur.execute(ISSUE_EVENT_SCHEMA)
conn.commit()

def load_database(self):
conn = self.get_connection()

# create schema ...
with conn.cursor() as cur:
cur.execute(ISSUE_SCHEMA)
cur.execute(ISSUE_RELATIONSHIP_SCHEMA)
cur.execute(ISSUE_EVENT_SCHEMA)
conn.commit()


Expand Down
82 changes: 82 additions & 0 deletions lib/diskcache_wrapper.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
#!/usr/bin/env python

"""
jira_tickets.py - idempotently copy the issue data from github_tickets.py to issues.redhat.com
The jira instance on issues.redhat.com does have an api, but it's shielded by sso and regular users
can not generate tokens nor do advanced data import. This script works around all of that by using
selenium to navigate through the pages and to input the data.
"""

import atexit
import argparse
import datetime
import copy
import glob
import json
import logging
import os
import subprocess
import time
from datetime import timezone
import jira

import requests

import concurrent.futures

from pprint import pprint
from logzero import logger

from constants import PROJECTS, ISSUE_COLUMN_NAMES
from database import JiraDatabaseWrapper
from database import ISSUE_INSERT_QUERY
from utils import (
sortable_key_from_ikey,
history_items_to_dict,
history_to_dict,
)

from data_wrapper import DataWrapper


rlog = logging.getLogger('urllib3')
rlog.setLevel(logging.DEBUG)


class DiskCacheWrapper:

def __init__(self, cachedir):
self.cachedir = cachedir

def write_issue(self, data):
fn = os.path.join(self.cachedir, 'by_id', data['id'] + '.json')
dn = os.path.dirname(fn)
if not os.path.exists(dn):
os.makedirs(dn)
with open(fn, 'w') as f:
f.write(json.dumps(data, indent=2, sort_keys=True))

# make a by key symlink
dn = os.path.join(self.cachedir, 'by_key')
if not os.path.exists(dn):
os.makedirs(dn)
src = '../by_id/' + os.path.basename(fn)
dst = f'{data["key"]}.json'
subprocess.run(f'rm -f {dst}; ln -s {src} {dst}', cwd=dn, shell=True)

return fn

def get_fn_for_issue_by_key(self, key):
path = os.path.join(self.cachedir, 'by_key', f'{key}.json')
path = os.path.abspath(path)
if not os.path.exists(path):
return None
return os.path.realpath(path)

@property
def issue_files(self):
for root, dirs, files in os.walk(os.path.join(self.cachedir, 'by_id')):
for fn in files:
if fn.endswith('.json'):
yield os.path.join(root, fn)
5 changes: 5 additions & 0 deletions lib/exceptions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
class HistoryFetchFailedException(Exception):
def __init__(self, message="Failed to fetch history data."):
self.message = message
super().__init__(self.message)

38 changes: 38 additions & 0 deletions lib/flaskapp.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,11 @@ def ui_burndown():
return render_template('burndown.html')


@app.route('/ui/churn')
def ui_churn():
return render_template('churn.html')


@app.route('/api/projects')
def projects():

Expand Down Expand Up @@ -212,14 +217,47 @@ def tickets_burndown():
#data = sw.burndown('AAH', frequency='monthly')
#data = sw.burndown('AAH', frequency='weekly')
data = json.loads(data)

'''
keys = list(data.keys())
keymap = [(x, x.split('T')[0]) for x in keys]
print(f'keys: {keys}')
print(f'keymap: {keymap}')
for km in keymap:
data[km[1]] = data[km[0]]
data.pop(km[0], None)
'''

return jsonify(data)


@app.route('/api/tickets_churn')
@app.route('/api/tickets_churn/')
def tickets_churn():

projects = request.args.getlist("project")
if not projects:
return redirect('/api/tickets_churn/?project=AAH')
projects = [x for x in projects if x != 'null']
fields = request.args.getlist("field")
print(fields)

sw = StatsWrapper()
data = sw.churn(projects, frequency='monthly', fields=fields)
#data = sw.burndown('AAH', frequency='monthly')
#data = sw.burndown('AAH', frequency='weekly')
data = json.loads(data)
'''
keys = list(data.keys())
keymap = [(x, x.split('T')[0]) for x in keys]
for km in keymap:
data[km[1]] = data[km[0]]
data.pop(km[0], None)
'''
return jsonify(data)


if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0', port=5000)
Loading

0 comments on commit d5bd29f

Please sign in to comment.