-
Notifications
You must be signed in to change notification settings - Fork 15
/
Copy pathquerystars.py
executable file
·82 lines (71 loc) · 2.66 KB
/
querystars.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
#!/usr/bin/env python3
import json
import re
from pprint import pprint
import requests
from bs4 import BeautifulSoup
from requests.auth import HTTPBasicAuth
from credentials import credentials as c
credentials = c.require(['user', 'token'])
re_gitlab = re.compile(
r"^https://gitlab\.com/([^/]+)/([^/]+)(?:/[^#&]*?)*?([^/#&]+)?/?(?:#.*|&.*)*$"
)
re_github = re.compile(
r"^https://github\.com/([^/]+)/([^/]+)(?:/[^#&]*?)*?([^/#&]+)?/?(?:#.*|&.*)*$"
)
re_gist = re.compile(r"^https://gist.github\.com/([^/]+)/(\w+)/?(?:#.*|&.*)*$")
def getGithubStars(owner, repo, _):
auth = HTTPBasicAuth(credentials.user, credentials.token)
api_url = f"https://api.github.com/repos/{owner}/{repo}"
r = requests.get(api_url, auth=auth)
if r.status_code != 200:
return None
data = r.json()
if "stargazers_count" not in data:
print("Something went wrong: ", data)
return None
return data["stargazers_count"]
def updatestars(allscripts):
for script in allscripts.values():
stars = None
url = script["url"] or ""
match = re_github.fullmatch(url)
if match:
stars = getGithubStars(*match.groups())
if stars is None:
print("dead url", url)
script["url"] = None
continue
shared = match.groups()[2] is not None
elif re_gist.match(url):
# Github API is missing a possibility to query for stars of a gist
page = requests.get(url)
if page.status_code == 404:
print("dead url", url)
script["url"] = None
continue
soup = BeautifulSoup(page.content, "html.parser")
stars = int(soup.select_one(".social-count").text.strip())
shared = False
elif match := re_gitlab.match(url):
# TODO use gitlab api instead – if possible
page = requests.get(url)
if page.status_code == 404:
print("dead url", url)
script["url"] = None
continue
soup = BeautifulSoup(page.content, "html.parser")
stars = int(soup.select_one(".star-count").text.strip())
shared = match.groups()[2] is not None
if stars:
print("got stars:", stars, shared)
script["stars"] = stars
script["sharedrepo"] = shared
return allscripts
if __name__ == "__main__":
with open("mpv_script_directory.json") as f:
allscripts = json.load(f)
allscripts = updatestars(allscripts)
pprint(allscripts)
with open("mpv_script_directory.json", "w") as f:
json.dump(allscripts, f, indent=4)