Skip to content

Commit e98bb0f

Browse files
authored
Merge pull request #63 from raulgomis/fix/changeset-file-race-condition
Fix changeset file race condition
2 parents ace7a8d + 9757e68 commit e98bb0f

File tree

3 files changed

+51
-7
lines changed

3 files changed

+51
-7
lines changed
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
{
2+
"type": "patch",
3+
"description": "Fix file creation race condition"
4+
}

semversioner/storage.py

Lines changed: 12 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -157,17 +157,22 @@ def create_changeset(self, change: Changeset) -> str:
157157
Absolute path of the file generated.
158158
"""
159159

160-
filename = None
161-
while (filename is None or os.path.isfile(os.path.join(self.next_release_path, filename))):
160+
# Retry loop with atomic file creation to prevent race conditions
161+
while True:
162162
filename = '{type_name}-{datetime}.json'.format(
163163
type_name=change.type,
164164
datetime="{:%Y%m%d%H%M%S%f}".format(datetime.now(timezone.utc))
165165
)
166-
167-
with open(os.path.join(self.next_release_path, filename), 'w') as f:
168-
f.write(json.dumps(change, cls=EnhancedJSONEncoder, indent=2) + "\n")
169-
170-
return os.path.join(self.next_release_path, filename)
166+
full_path = os.path.join(self.next_release_path, filename)
167+
168+
try:
169+
# Use 'x' mode for exclusive creation - fails if file already exists
170+
with open(full_path, 'x') as f:
171+
f.write(json.dumps(change, cls=EnhancedJSONEncoder, indent=2) + "\n")
172+
return full_path
173+
except FileExistsError:
174+
# File already exists, retry with a new timestamp
175+
continue
171176

172177
def remove_all_changesets(self) -> None:
173178
click.echo("Removing changeset files in '" + self.next_release_path + "' directory.")

tests/core_test.py

Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
import shutil
33
import os
44
import tempfile
5+
import threading
56

67
from semversioner import Semversioner
78
from semversioner import ReleaseStatus
@@ -88,6 +89,40 @@ def test_release_stress(self) -> None:
8889
releaser.release()
8990
self.assertEqual(releaser.get_status(), ReleaseStatus(version='1.0.0', next_version=None, unreleased_changes=[]))
9091

92+
def test_concurrent_changeset_creation_race_condition(self) -> None:
93+
"""
94+
Test that concurrent changeset creation does not result in file creation race conditions.
95+
"""
96+
releaser = Semversioner(self.directory_name)
97+
num_threads = 20
98+
threads = []
99+
descriptions = [f"desc {i}" for i in range(num_threads)]
100+
101+
def add_change(desc: str) -> None:
102+
# Each thread tries to add a changeset with a unique description
103+
releaser.add_change("patch", desc)
104+
105+
for desc in descriptions:
106+
t = threading.Thread(target=add_change, args=(desc,))
107+
threads.append(t)
108+
t.start()
109+
110+
for t in threads:
111+
t.join()
112+
113+
# Check that all changeset files were created and are unique
114+
files = [f for f in os.listdir(self.next_release_dirname) if f.endswith('.json')]
115+
self.assertEqual(len(files), num_threads)
116+
# Optionally, check that all descriptions are present
117+
found_descriptions = set()
118+
for f in files:
119+
with open(os.path.join(self.next_release_dirname, f)) as fh:
120+
data = fh.read()
121+
for desc in descriptions:
122+
if desc in data:
123+
found_descriptions.add(desc)
124+
self.assertEqual(set(descriptions), found_descriptions)
125+
91126
def test_is_deprecated(self) -> None:
92127
releaser = Semversioner(self.directory_name)
93128
self.assertFalse(releaser.is_deprecated())

0 commit comments

Comments
 (0)