Skip to content

Commit 573f00a

Browse files
committed
fix merge conflicts
2 parents c81d83d + 1250c82 commit 573f00a

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

61 files changed

+10067
-1135
lines changed

.env.example

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
## Here are the environment variables you can set for the Synapse Python client to configure OpenTelemetry tracing and metrics:
2+
# You can copy this file to `.env` and fill in the values as needed.
3+
# OTEL_SERVICE_NAME=my-service-using-synapse-python-client
4+
# OTEL_EXPORTER_OTLP_ENDPOINT=http://fill-me-in
5+
# OTEL_SERVICE_INSTANCE_ID=local_development_testing
6+
# OTEL_EXPORTER_OTLP_HEADERS=# Authorization

.github/scripts/delete_evaluations.py

Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,70 @@
1+
import asyncio
2+
from typing import Set
3+
4+
from synapseclient import Evaluation, Synapse
5+
6+
syn = Synapse()
7+
syn.login()
8+
9+
# Maximum number of concurrent deletion operations
10+
MAX_CONCURRENT_DELETIONS = 5
11+
12+
13+
async def delete_evaluation(eval_obj: Evaluation) -> str:
14+
"""Delete an evaluation asynchronously and return the status"""
15+
try:
16+
# Need to use run_in_executor since the delete function is synchronous
17+
loop = asyncio.get_running_loop()
18+
await loop.run_in_executor(None, lambda: syn.delete(eval_obj))
19+
return f"Deleted evaluation {eval_obj.id}"
20+
except Exception as e:
21+
return f"Failed to delete evaluation {eval_obj.id}: {str(e)}"
22+
23+
24+
async def main():
25+
# Create a semaphore to limit concurrent operations
26+
semaphore = asyncio.Semaphore(MAX_CONCURRENT_DELETIONS)
27+
28+
# Set to track active tasks
29+
pending_tasks: Set[asyncio.Task] = set()
30+
31+
# Track if we've processed any evaluations
32+
processed_any = False
33+
34+
async def delete_with_semaphore(eval_obj: Evaluation):
35+
"""Helper function that uses the semaphore to limit concurrency"""
36+
async with semaphore:
37+
result = await delete_evaluation(eval_obj)
38+
print(result)
39+
return result
40+
41+
# Process evaluations as they come in from the paginated iterator
42+
for result in syn._GET_paginated(
43+
"/evaluation?accessType=DELETE", limit=200, offset=0
44+
):
45+
processed_any = True
46+
eval_obj = Evaluation(**result)
47+
48+
# Create a new task for this evaluation
49+
task = asyncio.create_task(delete_with_semaphore(eval_obj))
50+
pending_tasks.add(task)
51+
task.add_done_callback(pending_tasks.discard)
52+
53+
# Process any completed tasks when we reach MAX_CONCURRENT_DELETIONS
54+
if len(pending_tasks) >= MAX_CONCURRENT_DELETIONS:
55+
# Wait for at least one task to complete before continuing
56+
done, _ = await asyncio.wait(
57+
pending_tasks, return_when=asyncio.FIRST_COMPLETED
58+
)
59+
60+
# Wait for all remaining tasks to complete
61+
if pending_tasks:
62+
await asyncio.gather(*pending_tasks)
63+
64+
if not processed_any:
65+
print("No evaluations found to delete")
66+
67+
68+
# Run the async main function
69+
if __name__ == "__main__":
70+
asyncio.run(main())

.github/scripts/delete_projects.py

Lines changed: 67 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
import asyncio
2+
from typing import Dict, Set
3+
4+
from synapseclient import Synapse
5+
6+
syn = Synapse()
7+
syn.login()
8+
9+
# Maximum number of concurrent deletion operations
10+
MAX_CONCURRENT_DELETIONS = 5
11+
12+
13+
async def delete_project(project_id: str) -> str:
14+
"""Delete a project asynchronously and return the result status"""
15+
try:
16+
# Need to use run_in_executor since the delete function is synchronous
17+
loop = asyncio.get_running_loop()
18+
await loop.run_in_executor(None, lambda: syn.delete(project_id))
19+
return f"Deleted {project_id}"
20+
except Exception as e:
21+
return f"Failed to delete {project_id}: {str(e)}"
22+
23+
24+
async def main():
25+
# Create a semaphore to limit concurrent operations
26+
semaphore = asyncio.Semaphore(MAX_CONCURRENT_DELETIONS)
27+
28+
# Set to track active tasks
29+
pending_tasks: Set[asyncio.Task] = set()
30+
31+
# Track if we've processed any projects
32+
processed_any = False
33+
34+
async def delete_with_semaphore(project: Dict):
35+
"""Helper function that uses the semaphore to limit concurrency"""
36+
async with semaphore:
37+
result = await delete_project(project["id"])
38+
print(result)
39+
return result
40+
41+
# Process projects as they come in from the iterator
42+
for project in syn.getChildren(parent=None, includeTypes=["project"]):
43+
processed_any = True
44+
45+
# Create a new task for this project
46+
task = asyncio.create_task(delete_with_semaphore(project))
47+
pending_tasks.add(task)
48+
task.add_done_callback(pending_tasks.discard)
49+
50+
# Process any completed tasks when we reach MAX_CONCURRENT_DELETIONS
51+
if len(pending_tasks) >= MAX_CONCURRENT_DELETIONS:
52+
# Wait for at least one task to complete before continuing
53+
done, _ = await asyncio.wait(
54+
pending_tasks, return_when=asyncio.FIRST_COMPLETED
55+
)
56+
57+
# Wait for all remaining tasks to complete
58+
if pending_tasks:
59+
await asyncio.gather(*pending_tasks)
60+
61+
if not processed_any:
62+
print("No projects found to delete")
63+
64+
65+
# Run the async main function
66+
if __name__ == "__main__":
67+
asyncio.run(main())

.github/scripts/delete_teams.py

Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,70 @@
1+
import asyncio
2+
from typing import Dict, Set
3+
4+
from synapseclient import Synapse
5+
6+
syn = Synapse()
7+
syn.login()
8+
9+
# Maximum number of concurrent team deletions
10+
MAX_CONCURRENT_DELETIONS = 5
11+
12+
13+
async def delete_team(team_id: str) -> str:
14+
"""Delete a team asynchronously and return the team_id"""
15+
try:
16+
# Need to use run_in_executor since the delete_team function is synchronous
17+
loop = asyncio.get_running_loop()
18+
await loop.run_in_executor(None, lambda: syn.delete_team(team_id))
19+
return f"Deleted team {team_id}"
20+
except Exception as e:
21+
return f"Failed to delete team {team_id}: {str(e)}"
22+
23+
24+
async def main():
25+
# Get all teams for the current user
26+
teams = syn._find_teams_for_principal(principal_id=syn.credentials.owner_id)
27+
28+
# Create a semaphore to limit concurrent operations
29+
semaphore = asyncio.Semaphore(MAX_CONCURRENT_DELETIONS)
30+
31+
# Set to track active tasks
32+
pending_tasks: Set[asyncio.Task] = set()
33+
34+
# Track if we've processed any teams
35+
processed_any = False
36+
37+
async def delete_with_semaphore(team: Dict):
38+
"""Helper function that uses the semaphore to limit concurrency"""
39+
async with semaphore:
40+
result = await delete_team(team["id"])
41+
print(result)
42+
return result
43+
44+
# Process teams as they come in from the iterator
45+
for team in teams:
46+
processed_any = True
47+
48+
# Create a new task for this team
49+
task = asyncio.create_task(delete_with_semaphore(team))
50+
pending_tasks.add(task)
51+
task.add_done_callback(pending_tasks.discard)
52+
53+
# Process any completed tasks
54+
if len(pending_tasks) >= MAX_CONCURRENT_DELETIONS:
55+
# Wait for at least one task to complete before continuing
56+
done, _ = await asyncio.wait(
57+
pending_tasks, return_when=asyncio.FIRST_COMPLETED
58+
)
59+
60+
# Wait for all remaining tasks to complete
61+
if pending_tasks:
62+
await asyncio.gather(*pending_tasks)
63+
64+
if not processed_any:
65+
print("No teams found to delete")
66+
67+
68+
# Run the async main function
69+
if __name__ == "__main__":
70+
asyncio.run(main())

.github/scripts/empty_trash.py

Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,70 @@
1+
import asyncio
2+
from typing import Set
3+
4+
from synapseclient import Synapse
5+
6+
syn = Synapse()
7+
syn.login()
8+
9+
# Maximum number of concurrent deletion operations
10+
MAX_CONCURRENT_DELETIONS = 5
11+
12+
13+
async def purge_entity(entity_id: str) -> str:
14+
"""Purge an entity from trash asynchronously and return the status"""
15+
try:
16+
# Need to use run_in_executor since the restPUT function is synchronous
17+
loop = asyncio.get_running_loop()
18+
await loop.run_in_executor(
19+
None, lambda: syn.restPUT(uri=f"/trashcan/purge/{entity_id}")
20+
)
21+
return f"Purged entity {entity_id} from trash"
22+
except Exception as e:
23+
return f"Failed to purge entity {entity_id}: {str(e)}"
24+
25+
26+
async def main():
27+
# Create a semaphore to limit concurrent operations
28+
semaphore = asyncio.Semaphore(MAX_CONCURRENT_DELETIONS)
29+
30+
# Set to track active tasks
31+
pending_tasks: Set[asyncio.Task] = set()
32+
33+
# Track if we've processed any entities
34+
processed_any = False
35+
36+
async def purge_with_semaphore(entity_id: str):
37+
"""Helper function that uses the semaphore to limit concurrency"""
38+
async with semaphore:
39+
result = await purge_entity(entity_id)
40+
print(result)
41+
return result
42+
43+
# Process entities as they come in from the paginated iterator
44+
for result in syn._GET_paginated("/trashcan/view", limit=200, offset=0):
45+
processed_any = True
46+
entity_id = result["entityId"]
47+
48+
# Create a new task for this entity
49+
task = asyncio.create_task(purge_with_semaphore(entity_id))
50+
pending_tasks.add(task)
51+
task.add_done_callback(pending_tasks.discard)
52+
53+
# Process any completed tasks when we reach MAX_CONCURRENT_DELETIONS
54+
if len(pending_tasks) >= MAX_CONCURRENT_DELETIONS:
55+
# Wait for at least one task to complete before continuing
56+
done, _ = await asyncio.wait(
57+
pending_tasks, return_when=asyncio.FIRST_COMPLETED
58+
)
59+
60+
# Wait for all remaining tasks to complete
61+
if pending_tasks:
62+
await asyncio.gather(*pending_tasks)
63+
64+
if not processed_any:
65+
print("No entities found in trash to purge")
66+
67+
68+
# Run the async main function
69+
if __name__ == "__main__":
70+
asyncio.run(main())

.github/workflows/build.yml

Lines changed: 7 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,7 @@ jobs:
8383
path: |
8484
${{ steps.get-dependencies.outputs.site_packages_loc }}
8585
${{ steps.get-dependencies.outputs.site_bin_dir }}
86-
key: ${{ runner.os }}-${{ matrix.python }}-build-${{ env.cache-name }}-${{ hashFiles('setup.py') }}-v23
86+
key: ${{ runner.os }}-${{ matrix.python }}-build-${{ env.cache-name }}-${{ hashFiles('setup.py') }}-v24
8787

8888
- name: Install py-dependencies
8989
if: steps.cache-dependencies.outputs.cache-hit != 'true'
@@ -123,40 +123,6 @@ jobs:
123123
else
124124
echo "synapse_pat_available=true" >> $GITHUB_OUTPUT;
125125
fi
126-
- name: OpenTelemtry pre-check
127-
id: otel-check
128-
if: ${{ steps.secret-check.outputs.secrets_available == 'true' && steps.secret-check.outputs.synapse_pat_available == 'true' }}
129-
shell: bash
130-
run: |
131-
# Leave disabled during normal integration test runs - Enable when we want to
132-
# collect the data.
133-
# echo "run_opentelemetry=true" >> $GITHUB_OUTPUT;
134-
echo "run_opentelemetry=false" >> $GITHUB_OUTPUT;
135-
136-
# AWS CLI is pre-installed on github hosted runners - Commented out for GH runs
137-
# curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
138-
# unzip awscliv2.zip
139-
# sudo ./aws/install
140-
# curl "https://s3.amazonaws.com/session-manager-downloads/plugin/latest/ubuntu_64bit/session-manager-plugin.deb" -o "session-manager-plugin.deb"
141-
# sudo dpkg -i session-manager-plugin.deb
142-
# - name: Create AWS Config
143-
# if: ${{ steps.otel-check.outputs.run_opentelemetry == 'true' }}
144-
# shell: bash
145-
# run: |
146-
# touch test.awsConfig
147-
# printf "[default]\nregion = us-east-1\ncredential_process = \"tests/integration/synapse_creds.sh\" \"https://sc.sageit.org\" \"${{ secrets.synapse_personal_access_token }}\"\n" >> test.awsConfig
148-
# chmod +x tests/integration/synapse_creds.sh
149-
# If you are exporting data using `otlp` you can start a port forwading session
150-
# - name: SSM Port Forward Start
151-
# if: ${{ steps.otel-check.outputs.run_opentelemetry == 'true' }}
152-
# shell: bash
153-
# env:
154-
# AWS_CONFIG_FILE: "test.awsConfig"
155-
# run: |
156-
# # Start a port-forwarding session in a non-interactive way. AWS will clean-up
157-
# # stale sessions after 20 minutes of inactivity
158-
# aws ssm start-session --target i-0ffcdecd1edf375ee --document-name AWS-StartPortForwardingSession --parameters "portNumber"=["4318"],"localPortNumber"=["4318"] & disown
159-
# sleep 15
160126
161127
# run integration tests iff the decryption keys for the test configuration are available.
162128
# they will not be available in pull requests from forks.
@@ -194,10 +160,12 @@ jobs:
194160
export EXTERNAL_S3_BUCKET_NAME="${{secrets.EXTERNAL_S3_BUCKET_NAME}}"
195161
export EXTERNAL_S3_BUCKET_AWS_ACCESS_KEY_ID="${{secrets.EXTERNAL_S3_BUCKET_AWS_ACCESS_KEY_ID}}"
196162
export EXTERNAL_S3_BUCKET_AWS_SECRET_ACCESS_KEY="${{secrets.EXTERNAL_S3_BUCKET_AWS_SECRET_ACCESS_KEY}}"
197-
if [ ${{ steps.otel-check.outputs.run_opentelemetry }} == "true" ]; then
198-
# Set to 'file' to enable OpenTelemetry export to file
199-
export SYNAPSE_OTEL_INTEGRATION_TEST_EXPORTER="file"
200-
fi
163+
164+
# Set env vars for OTEL
165+
export OTEL_EXPORTER_OTLP_ENDPOINT="${{ vars.OTEL_EXPORTER_OTLP_ENDPOINT }}"
166+
export OTEL_SERVICE_INSTANCE_ID="${{ vars.OTEL_SERVICE_INSTANCE_ID }}"
167+
export SYNAPSE_INTEGRATION_TEST_OTEL_ENABLED="${{ vars.SYNAPSE_INTEGRATION_TEST_OTEL_ENABLED }}"
168+
export OTEL_EXPORTER_OTLP_HEADERS="${{ secrets.OTEL_EXPORTER_OTLP_HEADERS }}"
201169
202170
# Setup ignore patterns based on Python version
203171
IGNORE_FLAGS="--ignore=tests/integration/synapseclient/test_command_line_client.py"
@@ -217,13 +185,6 @@ jobs:
217185
218186
# Execute the CLI tests in a non-dist way because they were causing some test instability when being run concurrently
219187
pytest -sv --reruns 3 --cov-append --cov=. --cov-report xml tests/integration/synapseclient/test_command_line_client.py
220-
- name: Upload otel spans
221-
uses: actions/upload-artifact@v4
222-
if: always()
223-
with:
224-
name: otel_spans_integration_testing_${{ matrix.os }}
225-
path: tests/integration/otel_spans_integration_testing_*.ndjson
226-
if-no-files-found: ignore
227188
- name: Upload coverage report
228189
id: upload_coverage_report
229190
uses: actions/upload-artifact@v4

0 commit comments

Comments
 (0)