diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml
new file mode 100644
index 0000000..128475f
--- /dev/null
+++ b/.github/workflows/build.yaml
@@ -0,0 +1,39 @@
+name: README build
+permissions:
+ contents: write
+on:
+ push:
+ branches:
+ - main
+ schedule:
+ - cron: "0 4 * * *"
+ workflow_dispatch:
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 1
+ - name: Get Python 3.11
+ uses: actions/setup-python@v4
+ with:
+ python-version: '3.11'
+ cache: 'pip'
+ cache-dependency-path: cache/requirements.txt
+ - name: Install dependencies
+ run: python -m pip install -r cache/requirements.txt
+ - name: Update README file
+ env:
+ ACCESS_TOKEN: ${{ secrets.ACCESS_TOKEN }}
+ USER_NAME: ${{ secrets.USER_NAME }}
+ run: python today.py
+ - name: Commit
+ run: |-
+ git add .
+ git diff
+ git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ git commit -m "[BOT] auto update README - $(date +'%Y-%m-%d')" -a || echo "No changes to commit"
+ git push
\ No newline at end of file
diff --git a/README.md b/README.md
index 9c06fc4..7798cf3 100644
--- a/README.md
+++ b/README.md
@@ -1,54 +1,3 @@
-### Hullo
-
-I am drifty, I create modified YouTube builds and other stuff.
-
-Everything I upload can also be found in [this Telegram Group](https://003274.xyz/megalomania) as well as all other links mentioned below. This group is a one stop shop for everything drifty (android/ios both).
-
-for Android
-- [ReVanced or RVX](https://003274.xyz/yt)
-- [YT Music](https://003274.xyz/ytm)
-- [Spotify Revanced](https://003274.xyz/spotify)
-- [MiXplorer Releases](https://003274.xyz/mix)
-
-for iOS
-- [My AltStore repo](https://003274.xyz/altstore) (this has everything listed below)
-- [YTLite](https://view.drifty.win/view/app/?source=https://raw.githubusercontent.com/driftywinds/driftywinds.github.io/master/AltStore/apps.json&id=com.google.ios.youtube) ([repo](https://github.com/driftywinds/YTLite))
-- [YTMusicUltimate](https://view.drifty.win/view/app/?source=https://driftywinds.github.io/AltStore/apps.json&id=com.google.ios.youtubemusic)
-- [BHX Twitter](https://github.com/driftywinds/BHX/releases)
-- ~~[YTLitePlus](https://ios.drifty.win/view/app/?source=https://driftywinds.github.io/AltStore/apps.json&id=com.google.ios.youtubeytlp) ([repo](https://github.com/driftywinds/YTLitePlus/))~~ (Not using YTLP anymore)
-- ~~[uYouEnhanced](https://ios.drifty.win/view/app/?source=https://driftywinds.github.io/AltStore/apps.json&id=com.google.ios.youtube) ([repo](https://github.com/driftywinds/uYouEnhanced))~~ (Not using UYE anymore)
-
-Cool projects I have made and use daily: -
-- [Twitchrise Bot](https://t.me/twitchrise_bot) ([repo](https://github.com/driftywinds/twitchrise-bot))
-- [Twitchrise](https://github.com/driftywinds/twitchrise)
-- [RSSrise](https://github.com/driftywinds/rssrise)
-- [Cyan Builder for iOS sideloading](https://github.com/driftywinds/cyan-builder)
-- [Docker Image Builder (using GitHub actions) for multiple platforms](https://github.com/driftywinds/image-builder) (this needs docs, I'll get to them someday)
-
-I am also a law graduate who works on sysadmin stuff on the side.
-
-Every service/website I host on the internet: - [docs.drifty.win](https://003274.xyz/services)
-
-I also work on - [The 3274 Project](https://www.003274.xyz/) (not very fleshed out yet)
-
-
-
-
-
-If you use my builds and want to support me you can check these out : - (even as low as 0.5 USD or 40 INR would help)
-
-[](https://ko-fi.com/driftywinds)
-
-or
-
-
-
-https://upi.drifty.win/ (for Indians)
-
-
-
-
-
-
-
-
+
+
+
\ No newline at end of file
diff --git a/README_old.md b/README_old.md
new file mode 100644
index 0000000..9c06fc4
--- /dev/null
+++ b/README_old.md
@@ -0,0 +1,54 @@
+### Hullo
+
+I am drifty, I create modified YouTube builds and other stuff.
+
+Everything I upload can also be found in [this Telegram Group](https://003274.xyz/megalomania) as well as all other links mentioned below. This group is a one stop shop for everything drifty (android/ios both).
+
+for Android
+- [ReVanced or RVX](https://003274.xyz/yt)
+- [YT Music](https://003274.xyz/ytm)
+- [Spotify Revanced](https://003274.xyz/spotify)
+- [MiXplorer Releases](https://003274.xyz/mix)
+
+for iOS
+- [My AltStore repo](https://003274.xyz/altstore) (this has everything listed below)
+- [YTLite](https://view.drifty.win/view/app/?source=https://raw.githubusercontent.com/driftywinds/driftywinds.github.io/master/AltStore/apps.json&id=com.google.ios.youtube) ([repo](https://github.com/driftywinds/YTLite))
+- [YTMusicUltimate](https://view.drifty.win/view/app/?source=https://driftywinds.github.io/AltStore/apps.json&id=com.google.ios.youtubemusic)
+- [BHX Twitter](https://github.com/driftywinds/BHX/releases)
+- ~~[YTLitePlus](https://ios.drifty.win/view/app/?source=https://driftywinds.github.io/AltStore/apps.json&id=com.google.ios.youtubeytlp) ([repo](https://github.com/driftywinds/YTLitePlus/))~~ (Not using YTLP anymore)
+- ~~[uYouEnhanced](https://ios.drifty.win/view/app/?source=https://driftywinds.github.io/AltStore/apps.json&id=com.google.ios.youtube) ([repo](https://github.com/driftywinds/uYouEnhanced))~~ (Not using UYE anymore)
+
+Cool projects I have made and use daily: -
+- [Twitchrise Bot](https://t.me/twitchrise_bot) ([repo](https://github.com/driftywinds/twitchrise-bot))
+- [Twitchrise](https://github.com/driftywinds/twitchrise)
+- [RSSrise](https://github.com/driftywinds/rssrise)
+- [Cyan Builder for iOS sideloading](https://github.com/driftywinds/cyan-builder)
+- [Docker Image Builder (using GitHub actions) for multiple platforms](https://github.com/driftywinds/image-builder) (this needs docs, I'll get to them someday)
+
+I am also a law graduate who works on sysadmin stuff on the side.
+
+Every service/website I host on the internet: - [docs.drifty.win](https://003274.xyz/services)
+
+I also work on - [The 3274 Project](https://www.003274.xyz/) (not very fleshed out yet)
+
+
+
+
+
+If you use my builds and want to support me you can check these out : - (even as low as 0.5 USD or 40 INR would help)
+
+[](https://ko-fi.com/driftywinds)
+
+or
+
+
+
+https://upi.drifty.win/ (for Indians)
+
+
+
+
+
+
+
+
diff --git a/cache/requirements.txt b/cache/requirements.txt
new file mode 100644
index 0000000..08c0a3c
--- /dev/null
+++ b/cache/requirements.txt
@@ -0,0 +1,3 @@
+python-dateutil
+requests
+lxml
\ No newline at end of file
diff --git a/dark_mode.svg b/dark_mode.svg
new file mode 100644
index 0000000..1646bfa
--- /dev/null
+++ b/dark_mode.svg
@@ -0,0 +1,133 @@
+
+
\ No newline at end of file
diff --git a/today.py b/today.py
new file mode 100644
index 0000000..e0f13ca
--- /dev/null
+++ b/today.py
@@ -0,0 +1,400 @@
+import datetime
+import requests
+import os
+from lxml import etree
+import time
+import hashlib
+
+# Fine-grained personal access token with All Repositories access:
+HEADERS = {'authorization': 'token '+ os.environ['ACCESS_TOKEN']}
+USER_NAME = os.environ['USER_NAME']
+QUERY_COUNT = {'user_getter': 0, 'follower_getter': 0, 'graph_repos_stars': 0, 'recursive_loc': 0, 'graph_commits': 0, 'loc_query': 0}
+
+def simple_request(func_name, query, variables):
+ """
+ Returns a request, or raises an Exception if the response does not succeed.
+ """
+ request = requests.post('https://api.github.com/graphql', json={'query': query, 'variables':variables}, headers=HEADERS)
+ if request.status_code == 200:
+ return request
+ raise Exception(func_name, ' has failed with a', request.status_code, request.text, QUERY_COUNT)
+
+
+def graph_commits(start_date, end_date):
+ """
+ Uses GitHub's GraphQL v4 API to return my total commit count
+ """
+ query_count('graph_commits')
+ query = '''
+ query($start_date: DateTime!, $end_date: DateTime!, $login: String!) {
+ user(login: $login) {
+ contributionsCollection(from: $start_date, to: $end_date) {
+ contributionCalendar {
+ totalContributions
+ }
+ }
+ }
+ }'''
+ variables = {'start_date': start_date,'end_date': end_date, 'login': USER_NAME}
+ request = simple_request(graph_commits.__name__, query, variables)
+ return int(request.json()['data']['user']['contributionsCollection']['contributionCalendar']['totalContributions'])
+
+
+def graph_repos_stars(count_type, owner_affiliation, cursor=None, add_loc=0, del_loc=0):
+ """
+ Uses GitHub's GraphQL v4 API to return my total repository, star, or lines of code count.
+ """
+ query_count('graph_repos_stars')
+ query = '''
+ query ($owner_affiliation: [RepositoryAffiliation], $login: String!, $cursor: String) {
+ user(login: $login) {
+ repositories(first: 100, after: $cursor, ownerAffiliations: $owner_affiliation) {
+ totalCount
+ edges {
+ node {
+ ... on Repository {
+ nameWithOwner
+ stargazers {
+ totalCount
+ }
+ }
+ }
+ }
+ pageInfo {
+ endCursor
+ hasNextPage
+ }
+ }
+ }
+ }'''
+ variables = {'owner_affiliation': owner_affiliation, 'login': USER_NAME, 'cursor': cursor}
+ request = simple_request(graph_repos_stars.__name__, query, variables)
+ if request.status_code == 200:
+ if count_type == 'repos':
+ return request.json()['data']['user']['repositories']['totalCount']
+ elif count_type == 'stars':
+ return stars_counter(request.json()['data']['user']['repositories']['edges'])
+
+
+def recursive_loc(owner, repo_name, data, cache_comment, addition_total=0, deletion_total=0, my_commits=0, cursor=None):
+ """
+ Uses GitHub's GraphQL v4 API and cursor pagination to fetch 100 commits from a repository at a time
+ """
+ query_count('recursive_loc')
+ query = '''
+ query ($repo_name: String!, $owner: String!, $cursor: String) {
+ repository(name: $repo_name, owner: $owner) {
+ defaultBranchRef {
+ target {
+ ... on Commit {
+ history(first: 100, after: $cursor) {
+ totalCount
+ edges {
+ node {
+ ... on Commit {
+ committedDate
+ }
+ author {
+ user {
+ id
+ }
+ }
+ deletions
+ additions
+ }
+ }
+ pageInfo {
+ endCursor
+ hasNextPage
+ }
+ }
+ }
+ }
+ }
+ }
+ }'''
+ variables = {'repo_name': repo_name, 'owner': owner, 'cursor': cursor}
+ request = requests.post('https://api.github.com/graphql', json={'query': query, 'variables':variables}, headers=HEADERS)
+ if request.status_code == 200:
+ if request.json()['data']['repository']['defaultBranchRef'] != None: # Only count commits if repo isn't empty
+ return loc_counter_one_repo(owner, repo_name, data, cache_comment, request.json()['data']['repository']['defaultBranchRef']['target']['history'], addition_total, deletion_total, my_commits)
+ else: return 0
+ force_close_file(data, cache_comment) # saves what is currently in the file before this program crashes
+ if request.status_code == 403:
+ raise Exception('Too many requests in a short amount of time!\nYou\'ve hit the non-documented anti-abuse limit!')
+ raise Exception('recursive_loc() has failed with a', request.status_code, request.text, QUERY_COUNT)
+
+
+def loc_counter_one_repo(owner, repo_name, data, cache_comment, history, addition_total, deletion_total, my_commits):
+ """
+ Recursively call recursive_loc (since GraphQL can only search 100 commits at a time)
+ only adds the LOC value of commits authored by me
+ """
+ for node in history['edges']:
+ if node['node']['author']['user'] == OWNER_ID:
+ my_commits += 1
+ addition_total += node['node']['additions']
+ deletion_total += node['node']['deletions']
+
+ if history['edges'] == [] or not history['pageInfo']['hasNextPage']:
+ return addition_total, deletion_total, my_commits
+ else: return recursive_loc(owner, repo_name, data, cache_comment, addition_total, deletion_total, my_commits, history['pageInfo']['endCursor'])
+
+
+def loc_query(owner_affiliation, comment_size=0, force_cache=False, cursor=None, edges=[]):
+ """
+ Uses GitHub's GraphQL v4 API to query all the repositories I have access to (with respect to owner_affiliation)
+ """
+ query_count('loc_query')
+ query = '''
+ query ($owner_affiliation: [RepositoryAffiliation], $login: String!, $cursor: String) {
+ user(login: $login) {
+ repositories(first: 60, after: $cursor, ownerAffiliations: $owner_affiliation) {
+ edges {
+ node {
+ ... on Repository {
+ nameWithOwner
+ defaultBranchRef {
+ target {
+ ... on Commit {
+ history {
+ totalCount
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ pageInfo {
+ endCursor
+ hasNextPage
+ }
+ }
+ }
+ }'''
+ variables = {'owner_affiliation': owner_affiliation, 'login': USER_NAME, 'cursor': cursor}
+ request = simple_request(loc_query.__name__, query, variables)
+ if request.json()['data']['user']['repositories']['pageInfo']['hasNextPage']:
+ edges += request.json()['data']['user']['repositories']['edges']
+ return loc_query(owner_affiliation, comment_size, force_cache, request.json()['data']['user']['repositories']['pageInfo']['endCursor'], edges)
+ else:
+ return cache_builder(edges + request.json()['data']['user']['repositories']['edges'], comment_size, force_cache)
+
+
+def cache_builder(edges, comment_size, force_cache, loc_add=0, loc_del=0):
+ """
+ Checks each repository in edges to see if it has been updated since the last time it was cached
+ """
+ cached = True
+ filename = 'cache/'+hashlib.sha256(USER_NAME.encode('utf-8')).hexdigest()+'.txt'
+ try:
+ with open(filename, 'r') as f:
+ data = f.readlines()
+ except FileNotFoundError:
+ data = []
+ if comment_size > 0:
+ for _ in range(comment_size): data.append('This line is a comment block. Write whatever you want here.\n')
+ with open(filename, 'w') as f:
+ f.writelines(data)
+
+ if len(data)-comment_size != len(edges) or force_cache:
+ cached = False
+ flush_cache(edges, filename, comment_size)
+ with open(filename, 'r') as f:
+ data = f.readlines()
+
+ cache_comment = data[:comment_size]
+ data = data[comment_size:]
+ for index in range(len(edges)):
+ repo_hash, commit_count, *__ = data[index].split()
+ if repo_hash == hashlib.sha256(edges[index]['node']['nameWithOwner'].encode('utf-8')).hexdigest():
+ try:
+ if int(commit_count) != edges[index]['node']['defaultBranchRef']['target']['history']['totalCount']:
+ owner, repo_name = edges[index]['node']['nameWithOwner'].split('/')
+ loc = recursive_loc(owner, repo_name, data, cache_comment)
+ data[index] = repo_hash + ' ' + str(edges[index]['node']['defaultBranchRef']['target']['history']['totalCount']) + ' ' + str(loc[2]) + ' ' + str(loc[0]) + ' ' + str(loc[1]) + '\n'
+ except TypeError:
+ data[index] = repo_hash + ' 0 0 0 0\n'
+ with open(filename, 'w') as f:
+ f.writelines(cache_comment)
+ f.writelines(data)
+ for line in data:
+ loc = line.split()
+ loc_add += int(loc[3])
+ loc_del += int(loc[4])
+ return [loc_add, loc_del, loc_add - loc_del, cached]
+
+
+def flush_cache(edges, filename, comment_size):
+ """
+ Wipes the cache file
+ """
+ with open(filename, 'r') as f:
+ data = []
+ if comment_size > 0:
+ data = f.readlines()[:comment_size]
+ with open(filename, 'w') as f:
+ f.writelines(data)
+ for node in edges:
+ f.write(hashlib.sha256(node['node']['nameWithOwner'].encode('utf-8')).hexdigest() + ' 0 0 0 0\n')
+
+
+def force_close_file(data, cache_comment):
+ """
+ Forces the file to close, preserving whatever data was written to it
+ """
+ filename = 'cache/'+hashlib.sha256(USER_NAME.encode('utf-8')).hexdigest()+'.txt'
+ with open(filename, 'w') as f:
+ f.writelines(cache_comment)
+ f.writelines(data)
+ print('Error writing to cache file. Partial data saved to', filename)
+
+
+def stars_counter(data):
+ """
+ Count total stars in repositories owned by me
+ """
+ total_stars = 0
+ for node in data: total_stars += node['node']['stargazers']['totalCount']
+ return total_stars
+
+
+def svg_overwrite(filename, commit_data, star_data, repo_data, contrib_data, follower_data, loc_data):
+ """
+ Parse SVG files and update elements with commits, stars, repositories, and lines written
+ """
+ tree = etree.parse(filename)
+ root = tree.getroot()
+ # Age data calculation removed
+ justify_format(root, 'commit_data', commit_data, 23)
+ justify_format(root, 'star_data', star_data, 14)
+ justify_format(root, 'repo_data', repo_data, 7)
+ justify_format(root, 'contrib_data', contrib_data)
+ justify_format(root, 'follower_data', follower_data, 10)
+ justify_format(root, 'loc_data', loc_data[2], 13)
+ justify_format(root, 'loc_add', loc_data[0])
+ justify_format(root, 'loc_del', loc_data[1], 7)
+ tree.write(filename, encoding='utf-8', xml_declaration=True)
+
+
+def justify_format(root, element_id, new_text, length=0):
+ """
+ Updates and formats the text of the element, and modifies the amount of dots in the previous element
+ """
+ if isinstance(new_text, int):
+ new_text = f"{'{:,}'.format(new_text)}"
+ new_text = str(new_text)
+ find_and_replace(root, element_id, new_text)
+ just_len = max(0, length - len(new_text))
+ if just_len <= 2:
+ dot_map = {0: '', 1: ' ', 2: '. '}
+ dot_string = dot_map[just_len]
+ else:
+ dot_string = ' ' + ('.' * just_len) + ' '
+ find_and_replace(root, f"{element_id}_dots", dot_string)
+
+
+def find_and_replace(root, element_id, new_text):
+ """
+ Finds the element in the SVG file and replaces its text with a new value
+ """
+ element = root.find(f".//*[@id='{element_id}']")
+ if element is not None:
+ element.text = new_text
+
+
+def commit_counter(comment_size):
+ """
+ Counts up my total commits, using the cache file created by cache_builder.
+ """
+ total_commits = 0
+ filename = 'cache/'+hashlib.sha256(USER_NAME.encode('utf-8')).hexdigest()+'.txt'
+ with open(filename, 'r') as f:
+ data = f.readlines()
+ cache_comment = data[:comment_size]
+ data = data[comment_size:]
+ for line in data:
+ total_commits += int(line.split()[2])
+ return total_commits
+
+
+def user_getter(username):
+ """
+ Returns the account ID and creation time of the user
+ """
+ query_count('user_getter')
+ query = '''
+ query($login: String!){
+ user(login: $login) {
+ id
+ createdAt
+ }
+ }'''
+ variables = {'login': username}
+ request = simple_request(user_getter.__name__, query, variables)
+ return {'id': request.json()['data']['user']['id']}, request.json()['data']['user']['createdAt']
+
+def follower_getter(username):
+ """
+ Returns the number of followers of the user
+ """
+ query_count('follower_getter')
+ query = '''
+ query($login: String!){
+ user(login: $login) {
+ followers {
+ totalCount
+ }
+ }
+ }'''
+ request = simple_request(follower_getter.__name__, query, {'login': username})
+ return int(request.json()['data']['user']['followers']['totalCount'])
+
+
+def query_count(funct_id):
+ global QUERY_COUNT
+ QUERY_COUNT[funct_id] += 1
+
+
+def perf_counter(funct, *args):
+ start = time.perf_counter()
+ funct_return = funct(*args)
+ return funct_return, time.perf_counter() - start
+
+
+def formatter(query_type, difference, funct_return=False, whitespace=0):
+ print('{:<23}'.format(' ' + query_type + ':'), sep='', end='')
+ print('{:>12}'.format('%.4f' % difference + ' s ')) if difference > 1 else print('{:>12}'.format('%.4f' % (difference * 1000) + ' ms'))
+ if whitespace:
+ return f"{'{:,}'.format(funct_return): <{whitespace}}"
+ return funct_return
+
+
+if __name__ == '__main__':
+ print('Calculation times:')
+ user_data, user_time = perf_counter(user_getter, USER_NAME)
+ OWNER_ID, acc_date = user_data
+ formatter('account data', user_time)
+
+ # Age calculation removed
+
+ total_loc, loc_time = perf_counter(loc_query, ['OWNER', 'COLLABORATOR', 'ORGANIZATION_MEMBER'], 7)
+ formatter('LOC (cached)', loc_time) if total_loc[-1] else formatter('LOC (no cache)', loc_time)
+ commit_data, commit_time = perf_counter(commit_counter, 7)
+ star_data, star_time = perf_counter(graph_repos_stars, 'stars', ['OWNER'])
+ repo_data, repo_time = perf_counter(graph_repos_stars, 'repos', ['OWNER'])
+ contrib_data, contrib_time = perf_counter(graph_repos_stars, 'repos', ['OWNER', 'COLLABORATOR', 'ORGANIZATION_MEMBER'])
+ follower_data, follower_time = perf_counter(follower_getter, USER_NAME)
+
+ for index in range(len(total_loc)-1): total_loc[index] = '{:,}'.format(total_loc[index])
+
+ # Only writing to dark_mode.svg, age argument removed
+ svg_overwrite('dark_mode.svg', commit_data, star_data, repo_data, contrib_data, follower_data, total_loc[:-1])
+
+ print('\033[F\033[F\033[F\033[F\033[F\033[F\033[F\033[F',
+ '{:<21}'.format('Total function time:'), '{:>11}'.format('%.4f' % (user_time + loc_time + commit_time + star_time + repo_time + contrib_time)),
+ ' s \033[E\033[E\033[E\033[E\033[E\033[E\033[E\033[E', sep='')
+
+ print('Total GitHub GraphQL API calls:', '{:>3}'.format(sum(QUERY_COUNT.values())))
+ for funct_name, count in QUERY_COUNT.items(): print('{:<28}'.format(' ' + funct_name + ':'), '{:>6}'.format(count))
\ No newline at end of file