mirror of
https://github.com/driftywinds/driftywinds.git
synced 2026-01-30 22:43:26 +00:00
Update today.py
This commit is contained in:
266
today.py
266
today.py
@@ -5,45 +5,21 @@ from lxml import etree
|
||||
import time
|
||||
import hashlib
|
||||
|
||||
# Fine-grained personal access token with All Repositories access:
|
||||
# Fine-grained personal access token
|
||||
HEADERS = {'authorization': 'token '+ os.environ['ACCESS_TOKEN']}
|
||||
USER_NAME = os.environ['USER_NAME']
|
||||
QUERY_COUNT = {'user_getter': 0, 'follower_getter': 0, 'graph_repos_stars': 0, 'recursive_loc': 0, 'graph_commits': 0, 'loc_query': 0}
|
||||
|
||||
def simple_request(func_name, query, variables):
|
||||
"""
|
||||
Returns a request, or raises an Exception if the response does not succeed.
|
||||
"""
|
||||
request = requests.post('https://api.github.com/graphql', json={'query': query, 'variables':variables}, headers=HEADERS)
|
||||
if request.status_code == 200:
|
||||
return request
|
||||
# If it's a 502, we return the request anyway and handle it in the calling function
|
||||
if request.status_code == 502:
|
||||
return request
|
||||
raise Exception(func_name, ' has failed with a', request.status_code, request.text, QUERY_COUNT)
|
||||
|
||||
|
||||
def graph_commits(start_date, end_date):
|
||||
"""
|
||||
Uses GitHub's GraphQL v4 API to return my total commit count
|
||||
"""
|
||||
query_count('graph_commits')
|
||||
query = '''
|
||||
query($start_date: DateTime!, $end_date: DateTime!, $login: String!) {
|
||||
user(login: $login) {
|
||||
contributionsCollection(from: $start_date, to: $end_date) {
|
||||
contributionCalendar {
|
||||
totalContributions
|
||||
}
|
||||
}
|
||||
}
|
||||
}'''
|
||||
variables = {'start_date': start_date,'end_date': end_date, 'login': USER_NAME}
|
||||
request = simple_request(graph_commits.__name__, query, variables)
|
||||
return int(request.json()['data']['user']['contributionsCollection']['contributionCalendar']['totalContributions'])
|
||||
|
||||
|
||||
def graph_repos_stars(count_type, owner_affiliation, cursor=None, add_loc=0, del_loc=0):
|
||||
"""
|
||||
Uses GitHub's GraphQL v4 API to return my total repository, star, or lines of code count.
|
||||
"""
|
||||
def graph_repos_stars(count_type, owner_affiliation, cursor=None):
|
||||
query_count('graph_repos_stars')
|
||||
query = '''
|
||||
query ($owner_affiliation: [RepositoryAffiliation], $login: String!, $cursor: String) {
|
||||
@@ -69,17 +45,12 @@ def graph_repos_stars(count_type, owner_affiliation, cursor=None, add_loc=0, del
|
||||
}'''
|
||||
variables = {'owner_affiliation': owner_affiliation, 'login': USER_NAME, 'cursor': cursor}
|
||||
request = simple_request(graph_repos_stars.__name__, query, variables)
|
||||
if request.status_code == 200:
|
||||
if count_type == 'repos':
|
||||
return request.json()['data']['user']['repositories']['totalCount']
|
||||
elif count_type == 'stars':
|
||||
return stars_counter(request.json()['data']['user']['repositories']['edges'])
|
||||
|
||||
if count_type == 'repos':
|
||||
return request.json()['data']['user']['repositories']['totalCount']
|
||||
elif count_type == 'stars':
|
||||
return stars_counter(request.json()['data']['user']['repositories']['edges'])
|
||||
|
||||
def recursive_loc(owner, repo_name, data, cache_comment, addition_total=0, deletion_total=0, my_commits=0, cursor=None):
|
||||
"""
|
||||
Uses GitHub's GraphQL v4 API and cursor pagination to fetch 100 commits from a repository at a time
|
||||
"""
|
||||
query_count('recursive_loc')
|
||||
query = '''
|
||||
query ($repo_name: String!, $owner: String!, $cursor: String) {
|
||||
@@ -91,9 +62,6 @@ def recursive_loc(owner, repo_name, data, cache_comment, addition_total=0, delet
|
||||
totalCount
|
||||
edges {
|
||||
node {
|
||||
... on Commit {
|
||||
committedDate
|
||||
}
|
||||
author {
|
||||
user {
|
||||
id
|
||||
@@ -115,21 +83,19 @@ def recursive_loc(owner, repo_name, data, cache_comment, addition_total=0, delet
|
||||
}'''
|
||||
variables = {'repo_name': repo_name, 'owner': owner, 'cursor': cursor}
|
||||
request = requests.post('https://api.github.com/graphql', json={'query': query, 'variables':variables}, headers=HEADERS)
|
||||
|
||||
if request.status_code == 502:
|
||||
print(f" ⚠️ Skipping {owner}/{repo_name} due to GitHub API Timeout (502)")
|
||||
return 0, 0, 0 # Return zeros so the script continues
|
||||
|
||||
if request.status_code == 200:
|
||||
if request.json()['data']['repository']['defaultBranchRef'] != None: # Only count commits if repo isn't empty
|
||||
if request.json()['data']['repository']['defaultBranchRef'] != None:
|
||||
return loc_counter_one_repo(owner, repo_name, data, cache_comment, request.json()['data']['repository']['defaultBranchRef']['target']['history'], addition_total, deletion_total, my_commits)
|
||||
else: return 0
|
||||
force_close_file(data, cache_comment) # saves what is currently in the file before this program crashes
|
||||
if request.status_code == 403:
|
||||
raise Exception('Too many requests in a short amount of time!\nYou\'ve hit the non-documented anti-abuse limit!')
|
||||
raise Exception('recursive_loc() has failed with a', request.status_code, request.text, QUERY_COUNT)
|
||||
|
||||
else: return 0, 0, 0
|
||||
|
||||
return 0, 0, 0
|
||||
|
||||
def loc_counter_one_repo(owner, repo_name, data, cache_comment, history, addition_total, deletion_total, my_commits):
|
||||
"""
|
||||
Recursively call recursive_loc (since GraphQL can only search 100 commits at a time)
|
||||
only adds the LOC value of commits authored by me
|
||||
"""
|
||||
for node in history['edges']:
|
||||
if node['node']['author']['user'] == OWNER_ID:
|
||||
my_commits += 1
|
||||
@@ -138,13 +104,10 @@ def loc_counter_one_repo(owner, repo_name, data, cache_comment, history, additio
|
||||
|
||||
if history['edges'] == [] or not history['pageInfo']['hasNextPage']:
|
||||
return addition_total, deletion_total, my_commits
|
||||
else: return recursive_loc(owner, repo_name, data, cache_comment, addition_total, deletion_total, my_commits, history['pageInfo']['endCursor'])
|
||||
|
||||
else:
|
||||
return recursive_loc(owner, repo_name, data, cache_comment, addition_total, deletion_total, my_commits, history['pageInfo']['endCursor'])
|
||||
|
||||
def loc_query(owner_affiliation, comment_size=0, force_cache=False, cursor=None, edges=[]):
|
||||
"""
|
||||
Uses GitHub's GraphQL v4 API to query all the repositories I have access to (with respect to owner_affiliation)
|
||||
"""
|
||||
query_count('loc_query')
|
||||
query = '''
|
||||
query ($owner_affiliation: [RepositoryAffiliation], $login: String!, $cursor: String) {
|
||||
@@ -175,98 +138,71 @@ def loc_query(owner_affiliation, comment_size=0, force_cache=False, cursor=None,
|
||||
}'''
|
||||
variables = {'owner_affiliation': owner_affiliation, 'login': USER_NAME, 'cursor': cursor}
|
||||
request = simple_request(loc_query.__name__, query, variables)
|
||||
if request.json()['data']['user']['repositories']['pageInfo']['hasNextPage']:
|
||||
edges += request.json()['data']['user']['repositories']['edges']
|
||||
return loc_query(owner_affiliation, comment_size, force_cache, request.json()['data']['user']['repositories']['pageInfo']['endCursor'], edges)
|
||||
else:
|
||||
return cache_builder(edges + request.json()['data']['user']['repositories']['edges'], comment_size, force_cache)
|
||||
|
||||
# Handle possible empty data on 502
|
||||
res_json = request.json()
|
||||
if 'data' not in res_json or res_json['data']['user'] is None:
|
||||
return [0, 0, 0, False]
|
||||
|
||||
if res_json['data']['user']['repositories']['pageInfo']['hasNextPage']:
|
||||
edges += res_json['data']['user']['repositories']['edges']
|
||||
return loc_query(owner_affiliation, comment_size, force_cache, res_json['data']['user']['repositories']['pageInfo']['endCursor'], edges)
|
||||
else:
|
||||
return cache_builder(edges + res_json['data']['user']['repositories']['edges'], comment_size, force_cache)
|
||||
|
||||
def cache_builder(edges, comment_size, force_cache, loc_add=0, loc_del=0):
|
||||
"""
|
||||
Checks each repository in edges to see if it has been updated since the last time it was cached
|
||||
"""
|
||||
cached = True
|
||||
filename = 'cache/'+hashlib.sha256(USER_NAME.encode('utf-8')).hexdigest()+'.txt'
|
||||
if not os.path.exists('cache'): os.makedirs('cache')
|
||||
|
||||
try:
|
||||
with open(filename, 'r') as f:
|
||||
data = f.readlines()
|
||||
with open(filename, 'r') as f: data = f.readlines()
|
||||
except FileNotFoundError:
|
||||
data = []
|
||||
if comment_size > 0:
|
||||
for _ in range(comment_size): data.append('This line is a comment block. Write whatever you want here.\n')
|
||||
with open(filename, 'w') as f:
|
||||
f.writelines(data)
|
||||
data = ['\n']*comment_size
|
||||
with open(filename, 'w') as f: f.writelines(data)
|
||||
|
||||
if len(data)-comment_size != len(edges) or force_cache:
|
||||
cached = False
|
||||
flush_cache(edges, filename, comment_size)
|
||||
with open(filename, 'r') as f:
|
||||
data = f.readlines()
|
||||
with open(filename, 'r') as f: data = f.readlines()
|
||||
|
||||
cache_comment = data[:comment_size]
|
||||
data = data[comment_size:]
|
||||
|
||||
for index in range(len(edges)):
|
||||
repo_hash, commit_count, *__ = data[index].split()
|
||||
if repo_hash == hashlib.sha256(edges[index]['node']['nameWithOwner'].encode('utf-8')).hexdigest():
|
||||
try:
|
||||
if int(commit_count) != edges[index]['node']['defaultBranchRef']['target']['history']['totalCount']:
|
||||
if edges[index]['node']['defaultBranchRef'] is not None:
|
||||
new_commit_count = edges[index]['node']['defaultBranchRef']['target']['history']['totalCount']
|
||||
if int(commit_count) != new_commit_count:
|
||||
owner, repo_name = edges[index]['node']['nameWithOwner'].split('/')
|
||||
loc = recursive_loc(owner, repo_name, data, cache_comment)
|
||||
data[index] = repo_hash + ' ' + str(edges[index]['node']['defaultBranchRef']['target']['history']['totalCount']) + ' ' + str(loc[2]) + ' ' + str(loc[0]) + ' ' + str(loc[1]) + '\n'
|
||||
except TypeError:
|
||||
data[index] = repo_hash + f' {new_commit_count} {loc[2]} {loc[0]} {loc[1]}\n'
|
||||
else:
|
||||
data[index] = repo_hash + ' 0 0 0 0\n'
|
||||
|
||||
with open(filename, 'w') as f:
|
||||
f.writelines(cache_comment)
|
||||
f.writelines(data)
|
||||
|
||||
for line in data:
|
||||
loc = line.split()
|
||||
loc_add += int(loc[3])
|
||||
loc_del += int(loc[4])
|
||||
return [loc_add, loc_del, loc_add - loc_del, cached]
|
||||
|
||||
return [loc_add, loc_del, loc_add - loc_del, True]
|
||||
|
||||
def flush_cache(edges, filename, comment_size):
|
||||
"""
|
||||
Wipes the cache file
|
||||
"""
|
||||
with open(filename, 'r') as f:
|
||||
data = []
|
||||
if comment_size > 0:
|
||||
data = f.readlines()[:comment_size]
|
||||
data = f.readlines()[:comment_size]
|
||||
with open(filename, 'w') as f:
|
||||
f.writelines(data)
|
||||
for node in edges:
|
||||
f.write(hashlib.sha256(node['node']['nameWithOwner'].encode('utf-8')).hexdigest() + ' 0 0 0 0\n')
|
||||
|
||||
|
||||
def force_close_file(data, cache_comment):
|
||||
"""
|
||||
Forces the file to close, preserving whatever data was written to it
|
||||
"""
|
||||
filename = 'cache/'+hashlib.sha256(USER_NAME.encode('utf-8')).hexdigest()+'.txt'
|
||||
with open(filename, 'w') as f:
|
||||
f.writelines(cache_comment)
|
||||
f.writelines(data)
|
||||
print('Error writing to cache file. Partial data saved to', filename)
|
||||
|
||||
|
||||
def stars_counter(data):
|
||||
"""
|
||||
Count total stars in repositories owned by me
|
||||
"""
|
||||
total_stars = 0
|
||||
for node in data: total_stars += node['node']['stargazers']['totalCount']
|
||||
return total_stars
|
||||
|
||||
return sum(node['node']['stargazers']['totalCount'] for node in data)
|
||||
|
||||
def svg_overwrite(filename, commit_data, star_data, repo_data, contrib_data, follower_data, loc_data):
|
||||
"""
|
||||
Parse SVG files and update elements with commits, stars, repositories, and lines written
|
||||
"""
|
||||
tree = etree.parse(filename)
|
||||
root = tree.getroot()
|
||||
# Age data calculation removed
|
||||
justify_format(root, 'commit_data', commit_data, 23)
|
||||
justify_format(root, 'star_data', star_data, 14)
|
||||
justify_format(root, 'repo_data', repo_data, 7)
|
||||
@@ -277,124 +213,54 @@ def svg_overwrite(filename, commit_data, star_data, repo_data, contrib_data, fol
|
||||
justify_format(root, 'loc_del', loc_data[1], 7)
|
||||
tree.write(filename, encoding='utf-8', xml_declaration=True)
|
||||
|
||||
|
||||
def justify_format(root, element_id, new_text, length=0):
|
||||
"""
|
||||
Updates and formats the text of the element, and modifies the amount of dots in the previous element
|
||||
"""
|
||||
if isinstance(new_text, int):
|
||||
new_text = f"{'{:,}'.format(new_text)}"
|
||||
new_text = str(new_text)
|
||||
find_and_replace(root, element_id, new_text)
|
||||
just_len = max(0, length - len(new_text))
|
||||
if just_len <= 2:
|
||||
dot_map = {0: '', 1: ' ', 2: '. '}
|
||||
dot_string = dot_map[just_len]
|
||||
else:
|
||||
dot_string = ' ' + ('.' * just_len) + ' '
|
||||
if isinstance(new_text, int): new_text = f"{new_text:,}"
|
||||
find_and_replace(root, element_id, str(new_text))
|
||||
just_len = max(0, length - len(str(new_text)))
|
||||
dot_string = ' ' + ('.' * just_len) + ' ' if just_len > 2 else ('. ' if just_len == 2 else ' ')
|
||||
find_and_replace(root, f"{element_id}_dots", dot_string)
|
||||
|
||||
|
||||
def find_and_replace(root, element_id, new_text):
|
||||
"""
|
||||
Finds the element in the SVG file and replaces its text with a new value
|
||||
"""
|
||||
element = root.find(f".//*[@id='{element_id}']")
|
||||
if element is not None:
|
||||
element.text = new_text
|
||||
|
||||
if element is not None: element.text = new_text
|
||||
|
||||
def commit_counter(comment_size):
|
||||
"""
|
||||
Counts up my total commits, using the cache file created by cache_builder.
|
||||
"""
|
||||
total_commits = 0
|
||||
filename = 'cache/'+hashlib.sha256(USER_NAME.encode('utf-8')).hexdigest()+'.txt'
|
||||
with open(filename, 'r') as f:
|
||||
data = f.readlines()
|
||||
cache_comment = data[:comment_size]
|
||||
data = data[comment_size:]
|
||||
for line in data:
|
||||
total_commits += int(line.split()[2])
|
||||
return total_commits
|
||||
|
||||
data = f.readlines()[comment_size:]
|
||||
return sum(int(line.split()[2]) for line in data)
|
||||
|
||||
def user_getter(username):
|
||||
"""
|
||||
Returns the account ID and creation time of the user
|
||||
"""
|
||||
query_count('user_getter')
|
||||
query = '''
|
||||
query($login: String!){
|
||||
user(login: $login) {
|
||||
id
|
||||
createdAt
|
||||
}
|
||||
}'''
|
||||
variables = {'login': username}
|
||||
request = simple_request(user_getter.__name__, query, variables)
|
||||
query = 'query($login: String!){ user(login: $login) { id createdAt } }'
|
||||
request = simple_request(user_getter.__name__, query, {'login': username})
|
||||
return {'id': request.json()['data']['user']['id']}, request.json()['data']['user']['createdAt']
|
||||
|
||||
def follower_getter(username):
|
||||
"""
|
||||
Returns the number of followers of the user
|
||||
"""
|
||||
query_count('follower_getter')
|
||||
query = '''
|
||||
query($login: String!){
|
||||
user(login: $login) {
|
||||
followers {
|
||||
totalCount
|
||||
}
|
||||
}
|
||||
}'''
|
||||
query = 'query($login: String!){ user(login: $login) { followers { totalCount } } }'
|
||||
request = simple_request(follower_getter.__name__, query, {'login': username})
|
||||
return int(request.json()['data']['user']['followers']['totalCount'])
|
||||
|
||||
|
||||
def query_count(funct_id):
|
||||
global QUERY_COUNT
|
||||
QUERY_COUNT[funct_id] += 1
|
||||
|
||||
|
||||
def perf_counter(funct, *args):
|
||||
start = time.perf_counter()
|
||||
funct_return = funct(*args)
|
||||
return funct_return, time.perf_counter() - start
|
||||
|
||||
|
||||
def formatter(query_type, difference, funct_return=False, whitespace=0):
|
||||
print('{:<23}'.format(' ' + query_type + ':'), sep='', end='')
|
||||
print('{:>12}'.format('%.4f' % difference + ' s ')) if difference > 1 else print('{:>12}'.format('%.4f' % (difference * 1000) + ' ms'))
|
||||
if whitespace:
|
||||
return f"{'{:,}'.format(funct_return): <{whitespace}}"
|
||||
return funct_return
|
||||
|
||||
return funct(*args), time.perf_counter() - start
|
||||
|
||||
if __name__ == '__main__':
|
||||
print('Calculation times:')
|
||||
user_data, user_time = perf_counter(user_getter, USER_NAME)
|
||||
OWNER_ID, acc_date = user_data
|
||||
formatter('account data', user_time)
|
||||
|
||||
# Age calculation removed
|
||||
|
||||
total_loc, loc_time = perf_counter(loc_query, ['OWNER', 'COLLABORATOR', 'ORGANIZATION_MEMBER'], 7)
|
||||
formatter('LOC (cached)', loc_time) if total_loc[-1] else formatter('LOC (no cache)', loc_time)
|
||||
commit_data, commit_time = perf_counter(commit_counter, 7)
|
||||
star_data, star_time = perf_counter(graph_repos_stars, 'stars', ['OWNER'])
|
||||
repo_data, repo_time = perf_counter(graph_repos_stars, 'repos', ['OWNER'])
|
||||
contrib_data, contrib_time = perf_counter(graph_repos_stars, 'repos', ['OWNER', 'COLLABORATOR', 'ORGANIZATION_MEMBER'])
|
||||
follower_data, follower_time = perf_counter(follower_getter, USER_NAME)
|
||||
commit_data, _ = perf_counter(commit_counter, 7)
|
||||
star_data, _ = perf_counter(graph_repos_stars, 'stars', ['OWNER'])
|
||||
repo_data, _ = perf_counter(graph_repos_stars, 'repos', ['OWNER'])
|
||||
contrib_data, _ = perf_counter(graph_repos_stars, 'repos', ['OWNER', 'COLLABORATOR', 'ORGANIZATION_MEMBER'])
|
||||
follower_data, _ = perf_counter(follower_getter, USER_NAME)
|
||||
|
||||
for index in range(len(total_loc)-1): total_loc[index] = '{:,}'.format(total_loc[index])
|
||||
|
||||
# Only writing to dark_mode.svg, age argument removed
|
||||
svg_overwrite('dark_mode.svg', commit_data, star_data, repo_data, contrib_data, follower_data, total_loc[:-1])
|
||||
|
||||
print('\033[F\033[F\033[F\033[F\033[F\033[F\033[F\033[F',
|
||||
'{:<21}'.format('Total function time:'), '{:>11}'.format('%.4f' % (user_time + loc_time + commit_time + star_time + repo_time + contrib_time)),
|
||||
' s \033[E\033[E\033[E\033[E\033[E\033[E\033[E\033[E', sep='')
|
||||
|
||||
print('Total GitHub GraphQL API calls:', '{:>3}'.format(sum(QUERY_COUNT.values())))
|
||||
for funct_name, count in QUERY_COUNT.items(): print('{:<28}'.format(' ' + funct_name + ':'), '{:>6}'.format(count))
|
||||
formatted_loc = [f"{x:,}" if isinstance(x, int) else x for x in total_loc[:-1]]
|
||||
svg_overwrite('dark_mode.svg', commit_data, star_data, repo_data, contrib_data, follower_data, formatted_loc)
|
||||
print("SVG updated successfully.")
|
||||
Reference in New Issue
Block a user