Skip to content

Commit 2185a20

Browse files
Fixes ruff
Signed-off-by: Elena Kolevska <[email protected]>
1 parent 8a52a89 commit 2185a20

File tree

4 files changed

+71
-76
lines changed

4 files changed

+71
-76
lines changed

dapr/clients/grpc/client.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1797,6 +1797,9 @@ def schedule_job_alpha1(self, job: Job) -> DaprResponse:
17971797
)
17981798
validateNotBlankString(job_name=job.name)
17991799

1800+
if not job.schedule and not job.due_time:
1801+
raise ValueError('Job must have either schedule or due_time specified')
1802+
18001803
# Convert job to proto using the Job class private method
18011804
job_proto = job._get_proto()
18021805
request = api_v1.ScheduleJobRequest(job=job_proto)

examples/jobs/simple_job.py

Lines changed: 34 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -8,90 +8,85 @@
88
def create_job_data(message: str):
99
"""Helper function to create job payload data."""
1010
data = GrpcAny()
11-
data.value = json.dumps({"message": message}).encode('utf-8')
11+
data.value = json.dumps({'message': message}).encode('utf-8')
1212
return data
1313

1414

1515
def main():
1616
with DaprClient() as client:
1717
# Example 0: Simple job without data (works without protobuf)
18-
print("0. Scheduling a simple job without data...", flush=True)
19-
simple_job = Job(
20-
name="simple-job",
21-
schedule="@every 30s",
22-
overwrite=True
23-
)
18+
print('0. Scheduling a simple job without data...', flush=True)
19+
simple_job = Job(name='simple-job', schedule='@every 30s', overwrite=True)
2420

2521
try:
2622
client.schedule_job_alpha1(simple_job)
27-
print(f"✓ Simple job scheduled successfully", flush=True)
23+
print(f'✓ Simple job scheduled successfully', flush=True)
2824
except Exception as e:
29-
print(f"✗ Failed to schedule simple job: {e}", flush=True)
25+
print(f'✗ Failed to schedule simple job: {e}', flush=True)
3026
return
3127

3228
# Example 1: Schedule a recurring job with cron schedule
33-
print("1. Scheduling a recurring job with cron schedule...", flush=True)
34-
job_data = create_job_data("Hello from recurring job!")
29+
print('1. Scheduling a recurring job with cron schedule...', flush=True)
30+
job_data = create_job_data('Hello from recurring job!')
3531
recurring_job = Job(
36-
name="recurring-hello-job",
37-
schedule="@every 30s",
32+
name='recurring-hello-job',
33+
schedule='@every 30s',
3834
data=job_data,
39-
ttl="5m",
40-
overwrite=True
35+
ttl='5m',
36+
overwrite=True,
4137
)
4238

4339
try:
4440
client.schedule_job_alpha1(recurring_job)
45-
print(f"✓ Recurring job scheduled successfully", flush=True)
41+
print(f'✓ Recurring job scheduled successfully', flush=True)
4642
except Exception as e:
47-
print(f"✗ Failed to schedule recurring job: {e}", flush=True)
43+
print(f'✗ Failed to schedule recurring job: {e}', flush=True)
4844
return
4945

5046
# Example 2: Schedule a one-time job with due_time
51-
print("\n2. Scheduling a one-time job with due_time...", flush=True)
52-
due_time = (datetime.now() + timedelta(seconds=10)).isoformat() + "Z"
47+
print('\n2. Scheduling a one-time job with due_time...', flush=True)
48+
due_time = (datetime.now() + timedelta(seconds=10)).isoformat() + 'Z'
5349
one_time_job = Job(
54-
name="one-time-hello-job",
50+
name='one-time-hello-job',
5551
due_time=due_time,
56-
data=create_job_data("Hello from one-time job!")
52+
data=create_job_data('Hello from one-time job!'),
5753
)
5854

5955
try:
6056
client.schedule_job_alpha1(one_time_job)
61-
print(f"✓ One-time job scheduled successfully", flush=True)
57+
print(f'✓ One-time job scheduled successfully', flush=True)
6258
except Exception as e:
63-
print(f"✗ Failed to schedule one-time job: {e}", flush=True)
59+
print(f'✗ Failed to schedule one-time job: {e}', flush=True)
6460
return
6561

6662
# Example 3: Get job details
67-
print("\n3. Getting job details...", flush=True)
63+
print('\n3. Getting job details...', flush=True)
6864
try:
69-
job = client.get_job_alpha1("recurring-hello-job")
70-
print(f"✓ Retrieved job details:", flush=True)
71-
print(f" - Name: {job.name}", flush=True)
72-
print(f" - Schedule: {job.schedule}", flush=True)
73-
print(f" - TTL: {job.ttl}", flush=True)
65+
job = client.get_job_alpha1('recurring-hello-job')
66+
print(f'✓ Retrieved job details:', flush=True)
67+
print(f' - Name: {job.name}', flush=True)
68+
print(f' - Schedule: {job.schedule}', flush=True)
69+
print(f' - TTL: {job.ttl}', flush=True)
7470
if job.data:
7571
try:
7672
payload = json.loads(job.data.value.decode('utf-8'))
77-
print(f" - Data: {payload}", flush=True)
73+
print(f' - Data: {payload}', flush=True)
7874
except Exception:
79-
print(f" - Data: <binary data, {len(job.data.value)} bytes>", flush=True)
75+
print(f' - Data: <binary data, {len(job.data.value)} bytes>', flush=True)
8076
else:
81-
print(f" - Data: None", flush=True)
77+
print(f' - Data: None', flush=True)
8278
except Exception as e:
83-
print(f"✗ Failed to get job details: {e}", flush=True)
79+
print(f'✗ Failed to get job details: {e}', flush=True)
8480

8581
# Example 4: Delete jobs
86-
print("\n4. Cleaning up - deleting jobs...", flush=True)
87-
for job_name in ["simple-job", "recurring-hello-job", "one-time-hello-job"]:
82+
print('\n4. Cleaning up - deleting jobs...', flush=True)
83+
for job_name in ['simple-job', 'recurring-hello-job', 'one-time-hello-job']:
8884
try:
8985
client.delete_job_alpha1(job_name)
90-
print(f"✓ Deleted job: {job_name}", flush=True)
86+
print(f'✓ Deleted job: {job_name}', flush=True)
9187
except Exception as e:
92-
print(f"✗ Failed to delete job {job_name}: {e}", flush=True)
93-
88+
print(f'✗ Failed to delete job {job_name}: {e}', flush=True)
9489

9590

96-
if __name__ == "__main__":
91+
if __name__ == '__main__':
9792
main()

tests/clients/fake_dapr_server.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ def __init__(self, grpc_port: int = 50001, http_port: int = 8080):
5454
self.workflow_status = {}
5555
self.workflow_options: Dict[str, str] = {}
5656
self.metadata: Dict[str, str] = {}
57-
self.jobs: Dict[str, api_v1.Job] = {}
57+
self.jobs: Dict[str, api_v1.Job] = {}
5858
self._next_exception = None
5959

6060
def start(self):
@@ -544,6 +544,10 @@ def ScheduleJobAlpha1(self, request, context):
544544
if not request.job.name:
545545
raise ValueError('Job name is required')
546546

547+
# Validate job name
548+
if not request.job.schedule and not request.job.due_time:
549+
raise ValueError('Schedule is empty')
550+
547551
# Store the job
548552
self.jobs[request.job.name] = request.job
549553

tests/clients/test_dapr_grpc_client.py

Lines changed: 29 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -1242,7 +1242,7 @@ def test_converse_alpha1_error_handling(self):
12421242
def test_schedule_job_alpha1_success(self):
12431243
"""Test successful job scheduling."""
12441244
dapr = DaprGrpcClient(f'{self.scheme}localhost:{self.grpc_port}')
1245-
job = Job(name="test-job", schedule="@every 1m")
1245+
job = Job(name='test-job', schedule='@every 1m')
12461246

12471247
# Schedule the job
12481248
response = dapr.schedule_job_alpha1(job)
@@ -1251,10 +1251,10 @@ def test_schedule_job_alpha1_success(self):
12511251
self.assertIsInstance(response, DaprResponse)
12521252

12531253
# Verify job was stored in fake server
1254-
self.assertIn("test-job", self._fake_dapr_server.jobs)
1255-
stored_job = self._fake_dapr_server.jobs["test-job"]
1256-
self.assertEqual(stored_job.name, "test-job")
1257-
self.assertEqual(stored_job.schedule, "@every 1m")
1254+
self.assertIn('test-job', self._fake_dapr_server.jobs)
1255+
stored_job = self._fake_dapr_server.jobs['test-job']
1256+
self.assertEqual(stored_job.name, 'test-job')
1257+
self.assertEqual(stored_job.schedule, '@every 1m')
12581258
self.assertEqual(stored_job.overwrite, False)
12591259
# Verify data field is always set (even if empty)
12601260
self.assertTrue(stored_job.HasField('data'))
@@ -1269,13 +1269,7 @@ def test_schedule_job_alpha1_success_with_data(self):
12691269
data = GrpcAny()
12701270
data.value = b'{"message": "Hello from job!", "priority": "high"}'
12711271

1272-
job = Job(
1273-
name="test-job-with-data",
1274-
schedule="@every 2m",
1275-
data=data,
1276-
repeats=3,
1277-
ttl="10m"
1278-
)
1272+
job = Job(name='test-job-with-data', schedule='@every 2m', data=data, repeats=3, ttl='10m')
12791273

12801274
# Schedule the job
12811275
response = dapr.schedule_job_alpha1(job)
@@ -1284,86 +1278,88 @@ def test_schedule_job_alpha1_success_with_data(self):
12841278
self.assertIsInstance(response, DaprResponse)
12851279

12861280
# Verify job was stored in fake server with all data
1287-
self.assertIn("test-job-with-data", self._fake_dapr_server.jobs)
1288-
stored_job = self._fake_dapr_server.jobs["test-job-with-data"]
1289-
self.assertEqual(stored_job.name, "test-job-with-data")
1290-
self.assertEqual(stored_job.schedule, "@every 2m")
1281+
self.assertIn('test-job-with-data', self._fake_dapr_server.jobs)
1282+
stored_job = self._fake_dapr_server.jobs['test-job-with-data']
1283+
self.assertEqual(stored_job.name, 'test-job-with-data')
1284+
self.assertEqual(stored_job.schedule, '@every 2m')
12911285
self.assertEqual(stored_job.repeats, 3)
1292-
self.assertEqual(stored_job.ttl, "10m")
1286+
self.assertEqual(stored_job.ttl, '10m')
12931287
self.assertEqual(stored_job.overwrite, False)
12941288

12951289
# Verify data field contains the payload
12961290
self.assertTrue(stored_job.HasField('data'))
1297-
self.assertEqual(stored_job.data.value, b'{"message": "Hello from job!", "priority": "high"}')
1291+
self.assertEqual(
1292+
stored_job.data.value, b'{"message": "Hello from job!", "priority": "high"}'
1293+
)
12981294

12991295
def test_schedule_job_alpha1_validation_error(self):
13001296
"""Test validation error in job scheduling."""
13011297
dapr = DaprGrpcClient(f'{self.scheme}localhost:{self.grpc_port}')
13021298

13031299
# Test empty job name - this should be caught by client validation
13041300
with self.assertRaises(ValueError):
1305-
job = Job(name="", schedule="@every 1m")
1301+
job = Job(name='', schedule='@every 1m')
13061302
dapr.schedule_job_alpha1(job)
13071303

13081304
def test_get_job_alpha1_success(self):
13091305
"""Test successful job retrieval."""
13101306
dapr = DaprGrpcClient(f'{self.scheme}localhost:{self.grpc_port}')
13111307

13121308
# First schedule a job
1313-
original_job = Job(name="test-job", schedule="@every 1m", repeats=5, ttl="1h")
1309+
original_job = Job(name='test-job', schedule='@every 1m', repeats=5, ttl='1h')
13141310
dapr.schedule_job_alpha1(original_job)
13151311

13161312
# Now retrieve it
1317-
retrieved_job = dapr.get_job_alpha1("test-job")
1313+
retrieved_job = dapr.get_job_alpha1('test-job')
13181314

13191315
# Verify response
13201316
self.assertIsInstance(retrieved_job, Job)
1321-
self.assertEqual(retrieved_job.name, "test-job")
1322-
self.assertEqual(retrieved_job.schedule, "@every 1m")
1317+
self.assertEqual(retrieved_job.name, 'test-job')
1318+
self.assertEqual(retrieved_job.schedule, '@every 1m')
13231319
self.assertEqual(retrieved_job.repeats, 5)
1324-
self.assertEqual(retrieved_job.ttl, "1h")
1320+
self.assertEqual(retrieved_job.ttl, '1h')
13251321
self.assertEqual(retrieved_job.overwrite, False)
13261322

13271323
def test_get_job_alpha1_validation_error(self):
13281324
"""Test validation error in job retrieval."""
13291325
dapr = DaprGrpcClient(f'{self.scheme}localhost:{self.grpc_port}')
13301326

13311327
with self.assertRaises(ValueError):
1332-
dapr.get_job_alpha1("")
1328+
dapr.get_job_alpha1('')
13331329

13341330
def test_get_job_alpha1_not_found(self):
13351331
"""Test getting a job that doesn't exist."""
13361332
dapr = DaprGrpcClient(f'{self.scheme}localhost:{self.grpc_port}')
13371333

13381334
with self.assertRaises(DaprGrpcError):
1339-
dapr.get_job_alpha1("non-existent-job")
1335+
dapr.get_job_alpha1('non-existent-job')
13401336

13411337
def test_delete_job_alpha1_success(self):
13421338
"""Test successful job deletion."""
13431339
dapr = DaprGrpcClient(f'{self.scheme}localhost:{self.grpc_port}')
13441340

13451341
# First schedule a job
1346-
job = Job(name="test-job", schedule="@every 1m")
1342+
job = Job(name='test-job', schedule='@every 1m')
13471343
dapr.schedule_job_alpha1(job)
13481344

13491345
# Verify job exists
1350-
self.assertIn("test-job", self._fake_dapr_server.jobs)
1346+
self.assertIn('test-job', self._fake_dapr_server.jobs)
13511347

13521348
# Delete the job
1353-
response = dapr.delete_job_alpha1("test-job")
1349+
response = dapr.delete_job_alpha1('test-job')
13541350

13551351
# Verify response
13561352
self.assertIsInstance(response, DaprResponse)
13571353

13581354
# Verify job was removed from fake server
1359-
self.assertNotIn("test-job", self._fake_dapr_server.jobs)
1355+
self.assertNotIn('test-job', self._fake_dapr_server.jobs)
13601356

13611357
def test_delete_job_alpha1_validation_error(self):
13621358
"""Test validation error in job deletion."""
13631359
dapr = DaprGrpcClient(f'{self.scheme}localhost:{self.grpc_port}')
13641360

13651361
with self.assertRaises(ValueError):
1366-
dapr.delete_job_alpha1("")
1362+
dapr.delete_job_alpha1('')
13671363

13681364
def test_jobs_error_handling(self):
13691365
"""Test error handling for Jobs API using fake server's exception mechanism."""
@@ -1372,14 +1368,11 @@ def test_jobs_error_handling(self):
13721368
dapr = DaprGrpcClient(f'{self.scheme}localhost:{self.grpc_port}')
13731369

13741370
# Set up fake server to raise an exception on next call
1375-
error_status = status_pb2.Status(
1376-
code=code_pb2.INTERNAL,
1377-
message="Simulated server error"
1378-
)
1371+
error_status = status_pb2.Status(code=code_pb2.INTERNAL, message='Simulated server error')
13791372
self._fake_dapr_server.raise_exception_on_next_call(error_status)
13801373

13811374
# Try to schedule a job - should raise DaprGrpcError
1382-
job = Job(name="error-test", schedule="@every 1m")
1375+
job = Job(name='error-test', schedule='@every 1m')
13831376
with self.assertRaises(DaprGrpcError):
13841377
dapr.schedule_job_alpha1(job)
13851378

0 commit comments

Comments
 (0)