Skip to content

Commit 17ce955

Browse files
committed
- update timeline.py to preserve existing GptVersion values
- add support for including latest repository commit - update commit markers in timeline_data.json - add new tools to sitemap: `legal_finder.html` and `reaction_time_trainer.html`
1 parent 9800f5c commit 17ce955

File tree

3 files changed

+73
-9
lines changed

3 files changed

+73
-9
lines changed

sitemap.xml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -500,6 +500,11 @@
500500
<lastmod>2024-05-21T22:00:00+00:00</lastmod>
501501
<priority>0.80</priority>
502502
</url>
503+
<url>
504+
<loc>https://www.gptgames.dev/tools/legal_finder.html</loc>
505+
<lastmod>2025-03-16T23:00:00+00:00</lastmod>
506+
<priority>0.80</priority>
507+
</url>
503508
<url>
504509
<loc>https://www.gptgames.dev/tools/letter_frequency_counter.html</loc>
505510
<lastmod>2024-07-10T22:00:00+00:00</lastmod>
@@ -680,6 +685,11 @@
680685
<lastmod>2024-08-24T22:00:00+00:00</lastmod>
681686
<priority>0.80</priority>
682687
</url>
688+
<url>
689+
<loc>https://www.gptgames.dev/tools/reaction_time_trainer.html</loc>
690+
<lastmod>2025-03-17T23:00:00+00:00</lastmod>
691+
<priority>0.80</priority>
692+
</url>
683693
<url>
684694
<loc>https://www.gptgames.dev/tools/reading_time_estimator.html</loc>
685695
<lastmod>2025-02-23T23:00:00+00:00</lastmod>

timeline_data.json

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,13 @@
11
[
2+
{
3+
"hash": "3124e0320713ec6dbbd320af2d6f4e1fa6677da7",
4+
"timestamp": "2025-03-20T18:23:53+00:00",
5+
"GptVersion": "CURRENT"
6+
},
27
{
38
"hash": "a2d2d96291ad3bb2172225e304f275ac2236b355",
49
"timestamp": "2025-03-07T15:23:34+00:00",
5-
"GptVersion": "CURRENT"
10+
"GptVersion": null
611
},
712
{
813
"hash": "9fb190b6536324e5bb90c3553f3e25f2028ad84e",

util/timeline.py

Lines changed: 57 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -3,35 +3,84 @@
33
import json
44
from github import Github
55
from dotenv import load_dotenv
6+
67
# Access the API key
78
load_dotenv()
89
GITHUB_ACCESS_TOKEN = os.getenv('GITHUB_ACCESS_TOKEN')
910

10-
# Set up the GitHub API client
11-
g = Github(GITHUB_ACCESS_TOKEN)
11+
# Set up the GitHub API client with no caching
12+
g = Github(GITHUB_ACCESS_TOKEN, per_page=100)
1213

1314
# Set up the repository and file path
1415
repo = g.get_repo("TobiasMue91/tobiasmue91.github.io")
1516
file_path = "index.html"
1617

17-
# Fetch the commit history
18-
commits = repo.get_commits(path=file_path)
18+
# Fetch the commit history for index.html
19+
index_commits = list(repo.get_commits(path=file_path, sha="main"))
20+
21+
# Also fetch the latest commit from the repository (regardless of file)
22+
latest_commit = repo.get_commits()[0] # First commit is the latest
23+
24+
# Print the date ranges to verify
25+
if index_commits:
26+
print(f"Oldest index.html commit: {index_commits[-1].commit.author.date}")
27+
print(f"Latest index.html commit: {index_commits[0].commit.author.date}")
28+
print(f"Total index.html commits found: {len(index_commits)}")
29+
30+
print(f"Latest repository commit: {latest_commit.commit.author.date} (hash: {latest_commit.sha})")
31+
32+
# Load existing data to preserve GptVersion values
33+
existing_data = {}
34+
try:
35+
with open("timeline_data.json", "r") as f:
36+
existing_entries = json.load(f)
37+
# Create a dictionary with commit hash as key for faster lookup
38+
for entry in existing_entries:
39+
existing_data[entry["hash"]] = entry
40+
except FileNotFoundError:
41+
pass # No existing file, that's OK
1942

2043
# Process the commits and generate the JSON data
2144
timeline_data = []
2245
last_date = None
2346

24-
for commit in commits:
47+
for commit in index_commits:
2548
commit_date = commit.commit.author.date.date()
2649

2750
if commit_date != last_date:
28-
timeline_data.append({
51+
# Create new entry
52+
new_entry = {
2953
"hash": commit.sha,
3054
"timestamp": commit.commit.author.date.isoformat(),
31-
"GptVersion": None # This can be added manually later
32-
})
55+
"GptVersion": None # Default value
56+
}
57+
58+
# If this hash exists in our existing data and has a GptVersion, preserve it
59+
if commit.sha in existing_data and existing_data[commit.sha].get("GptVersion"):
60+
new_entry["GptVersion"] = existing_data[commit.sha]["GptVersion"]
61+
62+
timeline_data.append(new_entry)
3363
last_date = commit_date
3464

65+
# Check if today's date is already in the timeline
66+
today = datetime.now().date()
67+
has_today_entry = any(datetime.fromisoformat(entry["timestamp"]).date() == today for entry in timeline_data)
68+
69+
# If not, add the latest commit from the repository
70+
if not has_today_entry:
71+
latest_entry = {
72+
"hash": latest_commit.sha,
73+
"timestamp": latest_commit.commit.author.date.isoformat(),
74+
"GptVersion": None # You can set this manually later
75+
}
76+
77+
# If this hash exists in our existing data and has a GptVersion, preserve it
78+
if latest_commit.sha in existing_data and existing_data[latest_commit.sha].get("GptVersion"):
79+
latest_entry["GptVersion"] = existing_data[latest_commit.sha]["GptVersion"]
80+
81+
print(f"Adding latest repository commit for today")
82+
timeline_data.insert(0, latest_entry) # Insert at the beginning (most recent)
83+
3584
# Save the JSON data to a file
3685
with open("timeline_data.json", "w") as f:
3786
json.dump(timeline_data, f, indent=2)

0 commit comments

Comments
 (0)