Skip to content

Commit 44ef9b6

Browse files
committed
Submit job 1 node
1 parent cedbf0b commit 44ef9b6

File tree

1 file changed

+101
-39
lines changed

1 file changed

+101
-39
lines changed
Lines changed: 101 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
},
1919
{
2020
"cell_type": "code",
21-
"execution_count": 5,
21+
"execution_count": 12,
2222
"id": "839fa332-70a6-4818-a190-18c9ca109c28",
2323
"metadata": {},
2424
"outputs": [
@@ -79,7 +79,7 @@
7979
"Building wheels for collected packages: dapi\n",
8080
" Building editable for dapi (pyproject.toml) ... \u001b[?25ldone\n",
8181
"\u001b[?25h Created wheel for dapi: filename=dapi-1.0.0-py3-none-any.whl size=3825 sha256=f9fd4761dd2e940e7cd0f96795280478e27aacfbc14275ffb57c0ee44f9b129c\n",
82-
" Stored in directory: /private/var/folders/w8/xz590jyd7r36zmxcspgzj3z40000gn/T/pip-ephem-wheel-cache-q5wx61zs/wheels/98/df/91/ed70fe2dca11c3c6e5b6e8e6eef18c373a119d095037f892a3\n",
82+
" Stored in directory: /private/var/folders/w8/xz590jyd7r36zmxcspgzj3z40000gn/T/pip-ephem-wheel-cache-mqlekm07/wheels/98/df/91/ed70fe2dca11c3c6e5b6e8e6eef18c373a119d095037f892a3\n",
8383
"Successfully built dapi\n",
8484
"Installing collected packages: dapi\n",
8585
"Successfully installed dapi-1.0.0\n",
@@ -96,7 +96,7 @@
9696
},
9797
{
9898
"cell_type": "code",
99-
"execution_count": 2,
99+
"execution_count": 13,
100100
"id": "35fca324-ee48-41c8-84a1-78ad7b03aae8",
101101
"metadata": {},
102102
"outputs": [],
@@ -116,7 +116,7 @@
116116
},
117117
{
118118
"cell_type": "code",
119-
"execution_count": 3,
119+
"execution_count": 14,
120120
"id": "58b62f77-23b6-4355-91f1-b680ae6d6cdf",
121121
"metadata": {},
122122
"outputs": [
@@ -138,7 +138,7 @@
138138
},
139139
{
140140
"cell_type": "code",
141-
"execution_count": 4,
141+
"execution_count": 15,
142142
"id": "feee3ce0",
143143
"metadata": {},
144144
"outputs": [],
@@ -153,7 +153,7 @@
153153
},
154154
{
155155
"cell_type": "code",
156-
"execution_count": 5,
156+
"execution_count": 16,
157157
"id": "3f0ee687",
158158
"metadata": {},
159159
"outputs": [
@@ -177,7 +177,7 @@
177177
},
178178
{
179179
"cell_type": "code",
180-
"execution_count": 6,
180+
"execution_count": 17,
181181
"id": "6257d31a",
182182
"metadata": {},
183183
"outputs": [
@@ -195,7 +195,7 @@
195195
"\n",
196196
"--- Generated Job Request Dictionary ---\n",
197197
"{\n",
198-
" \"name\": \"mpm-20250427_095544\",\n",
198+
" \"name\": \"mpm-20250427_101205\",\n",
199199
" \"appId\": \"mpm\",\n",
200200
" \"appVersion\": \"1.1.0\",\n",
201201
" \"description\": \"Material Point Method (MPM) is a particle based method that represents the material as a collection of material points, and their deformations are determined by Newton\\u2019s laws of motion.\",\n",
@@ -270,24 +270,70 @@
270270
},
271271
{
272272
"cell_type": "code",
273-
"execution_count": null,
273+
"execution_count": 18,
274274
"id": "5a17eee7",
275275
"metadata": {},
276-
"outputs": [],
276+
"outputs": [
277+
{
278+
"name": "stdout",
279+
"output_type": "stream",
280+
"text": [
281+
"Modifying job request dictionary...\n",
282+
"{\n",
283+
" \"name\": \"mpm-20250427_101205\",\n",
284+
" \"appId\": \"mpm\",\n",
285+
" \"appVersion\": \"1.1.0\",\n",
286+
" \"description\": \"Material Point Method (MPM) is a particle based method that represents the material as a collection of material points, and their deformations are determined by Newton\\u2019s laws of motion.\",\n",
287+
" \"execSystemId\": \"frontera\",\n",
288+
" \"archiveSystemId\": \"frontera\",\n",
289+
" \"archiveOnAppError\": true,\n",
290+
" \"execSystemLogicalQueue\": \"development\",\n",
291+
" \"nodeCount\": 1,\n",
292+
" \"coresPerNode\": 1,\n",
293+
" \"maxMinutes\": 10,\n",
294+
" \"memoryMB\": 192000,\n",
295+
" \"isMpi\": false,\n",
296+
" \"tags\": [],\n",
297+
" \"fileInputs\": [\n",
298+
" {\n",
299+
" \"name\": \"Input Directory\",\n",
300+
" \"sourceUrl\": \"tapis://designsafe.storage.default/kks32/mpm-benchmarks/2d/uniaxial_stress/\",\n",
301+
" \"autoMountLocal\": true,\n",
302+
" \"targetPath\": \"inputDirectory\"\n",
303+
" }\n",
304+
" ],\n",
305+
" \"parameterSet\": {\n",
306+
" \"appArgs\": [\n",
307+
" {\n",
308+
" \"name\": \"Input Script\",\n",
309+
" \"arg\": \"mpm.json\"\n",
310+
" }\n",
311+
" ],\n",
312+
" \"schedulerOptions\": [\n",
313+
" {\n",
314+
" \"name\": \"TACC Allocation\",\n",
315+
" \"arg\": \"-A BCS20003\"\n",
316+
" }\n",
317+
" ]\n",
318+
" }\n",
319+
"}\n"
320+
]
321+
}
322+
],
277323
"source": [
278324
"# At this point, the user can inspect and modify job_req_dict if needed.\n",
279325
"# For example:\n",
280-
"# print(\"Modifying job request dictionary...\")\n",
281-
"# job_req_dict['description'] = \"My modified description\"\n",
282-
"# job_req_dict['parameterSet']['envVariables'].append({'key': 'MY_EXTRA_VAR', 'value': 'extra_value'})\n",
283-
"# print(\"\\n--- Modified Job Request Dictionary ---\")\n",
284-
"# print(json.dumps(job_req_dict, indent=2, default=str))\n",
285-
"# print(\"--------------------------------------\")"
326+
"print(\"Modifying job request dictionary...\")\n",
327+
"job_req_dict[\"nodeCount\"] = 1\n",
328+
"job_req_dict[\"coresPerNode\"] = 1\n",
329+
"job_req_dict[\"execSystemLogicalQueue\"] = \"development\"\n",
330+
"\n",
331+
"print(json.dumps(job_req_dict, indent=2, default=str))"
286332
]
287333
},
288334
{
289335
"cell_type": "code",
290-
"execution_count": 7,
336+
"execution_count": 19,
291337
"id": "8e04a5ef",
292338
"metadata": {},
293339
"outputs": [
@@ -300,16 +346,16 @@
300346
"\n",
301347
"--- Submitting Tapis Job Request ---\n",
302348
"{\n",
303-
" \"name\": \"mpm-20250427_095544\",\n",
349+
" \"name\": \"mpm-20250427_101205\",\n",
304350
" \"appId\": \"mpm\",\n",
305351
" \"appVersion\": \"1.1.0\",\n",
306352
" \"description\": \"Material Point Method (MPM) is a particle based method that represents the material as a collection of material points, and their deformations are determined by Newton\\u2019s laws of motion.\",\n",
307353
" \"execSystemId\": \"frontera\",\n",
308354
" \"archiveSystemId\": \"frontera\",\n",
309355
" \"archiveOnAppError\": true,\n",
310-
" \"execSystemLogicalQueue\": \"normal\",\n",
311-
" \"nodeCount\": 3,\n",
312-
" \"coresPerNode\": 56,\n",
356+
" \"execSystemLogicalQueue\": \"development\",\n",
357+
" \"nodeCount\": 1,\n",
358+
" \"coresPerNode\": 1,\n",
313359
" \"maxMinutes\": 10,\n",
314360
" \"memoryMB\": 192000,\n",
315361
" \"isMpi\": false,\n",
@@ -338,9 +384,9 @@
338384
" }\n",
339385
"}\n",
340386
"------------------------------------\n",
341-
"Job submitted successfully. UUID: b196bf63-795e-4533-9544-b9ef16b1a04c-007\n",
387+
"Job submitted successfully. UUID: 6efde421-083b-48c3-b980-1f5b05494846-007\n",
342388
"Job Submitted Successfully!\n",
343-
"Job UUID: b196bf63-795e-4533-9544-b9ef16b1a04c-007\n"
389+
"Job UUID: 6efde421-083b-48c3-b980-1f5b05494846-007\n"
344390
]
345391
}
346392
],
@@ -370,7 +416,7 @@
370416
},
371417
{
372418
"cell_type": "code",
373-
"execution_count": 9,
419+
"execution_count": 20,
374420
"id": "cd46a8a6",
375421
"metadata": {},
376422
"outputs": [
@@ -379,16 +425,17 @@
379425
"output_type": "stream",
380426
"text": [
381427
"\n",
382-
"Monitoring job b196bf63-795e-4533-9544-b9ef16b1a04c-007...\n",
383-
"Monitoring job b196bf63-795e-4533-9544-b9ef16b1a04c-007 (Initial Status: STAGING_JOB, Timeout: 10 mins, Interval: 30s)\n",
384-
"\tJob b196bf63-795e-4533-9544-b9ef16b1a04c-007 Status: QUEUED (2025-04-27T09:59:31.487189)\n",
385-
"\tJob b196bf63-795e-4533-9544-b9ef16b1a04c-007 Status: RUNNING (2025-04-27T10:00:01.917496)\n",
386-
"\tJob b196bf63-795e-4533-9544-b9ef16b1a04c-007 Status: ARCHIVING (2025-04-27T10:03:05.393456)\n",
387-
"\n",
388-
"Error during monitoring for job b196bf63-795e-4533-9544-b9ef16b1a04c-007: Monitoring timeout after 10 minutes for job b196bf63-795e-4533-9544-b9ef16b1a04c-007. Last status: ARCHIVING\n",
428+
"Monitoring job 6efde421-083b-48c3-b980-1f5b05494846-007...\n",
429+
"Monitoring job 6efde421-083b-48c3-b980-1f5b05494846-007 (Initial Status: STAGING_INPUTS, Timeout: 10 mins, Interval: 30s)\n",
430+
"\tJob 6efde421-083b-48c3-b980-1f5b05494846-007 Status: STAGING_JOB (2025-04-27T10:16:15.015844)\n",
431+
"\tJob 6efde421-083b-48c3-b980-1f5b05494846-007 Status: QUEUED (2025-04-27T10:17:15.786104)\n",
432+
"\tJob 6efde421-083b-48c3-b980-1f5b05494846-007 Status: RUNNING (2025-04-27T10:17:46.194435)\n",
433+
"\tJob 6efde421-083b-48c3-b980-1f5b05494846-007 Status: ARCHIVING (2025-04-27T10:18:47.159331)\n",
434+
"\tJob 6efde421-083b-48c3-b980-1f5b05494846-007 Status: FINISHED (2025-04-27T10:19:17.708791)\n",
435+
"Job 6efde421-083b-48c3-b980-1f5b05494846-007 reached terminal state: FINISHED\n",
389436
"\n",
390-
"Job monitoring failed or timed out: Error monitoring job b196bf63-795e-4533-9544-b9ef16b1a04c-007: Monitoring timeout after 10 minutes for job b196bf63-795e-4533-9544-b9ef16b1a04c-007. Last status: ARCHIVING\n",
391-
"Last known status: ARCHIVING\n"
437+
"Job 6efde421-083b-48c3-b980-1f5b05494846-007 finished monitoring.\n",
438+
"Final Status: FINISHED\n"
392439
]
393440
}
394441
],
@@ -421,7 +468,7 @@
421468
},
422469
{
423470
"cell_type": "code",
424-
"execution_count": 10,
471+
"execution_count": 24,
425472
"id": "4a6daeec",
426473
"metadata": {},
427474
"outputs": [
@@ -430,7 +477,12 @@
430477
"output_type": "stream",
431478
"text": [
432479
"\n",
433-
"Skipping runtime summary because job b196bf63-795e-4533-9544-b9ef16b1a04c-007 did not finish normally (Status: ARCHIVING).\n"
480+
"Attempting to display runtime summary for job 6efde421-083b-48c3-b980-1f5b05494846-007...\n",
481+
"\n",
482+
"Runtime Summary\n",
483+
"---------------\n",
484+
"TOTAL time: 00:04:39\n",
485+
"---------------\n"
434486
]
435487
}
436488
],
@@ -453,7 +505,7 @@
453505
},
454506
{
455507
"cell_type": "code",
456-
"execution_count": 11,
508+
"execution_count": 25,
457509
"id": "2fe8ac5f",
458510
"metadata": {},
459511
"outputs": [
@@ -462,7 +514,17 @@
462514
"output_type": "stream",
463515
"text": [
464516
"\n",
465-
"Skipping archive listing as job did not complete or submission/monitoring failed.\n"
517+
"Attempting to access archive information for job 6efde421-083b-48c3-b980-1f5b05494846-007...\n",
518+
"Job Archive Tapis URI: tapis://frontera/work2/05873/kks32/frontera/tapis-jobs-archive/2025-04-27Z/mpm-20250427_101205-6efde421-083b-48c3-b980-1f5b05494846-007\n",
519+
"\n",
520+
"Listing archive contents (root):\n",
521+
"Listing files in system 'frontera' at path 'work2/05873/kks32/frontera/tapis-jobs-archive/2025-04-27Z/mpm-20250427_101205-6efde421-083b-48c3-b980-1f5b05494846-007'...\n",
522+
"Found 5 items.\n",
523+
"- inputDirectory (Type: dir, Size: 4096 bytes, Modified: 2025-04-27T15:18:54Z)\n",
524+
"- tapisjob.env (Type: file, Size: 1519 bytes, Modified: 2025-04-27T15:18:53Z)\n",
525+
"- tapisjob.out (Type: file, Size: 5590 bytes, Modified: 2025-04-27T15:18:48Z)\n",
526+
"- tapisjob.sh (Type: file, Size: 1208 bytes, Modified: 2025-04-27T15:18:54Z)\n",
527+
"- tapisjob_app.sh (Type: file, Size: 189 bytes, Modified: 2025-04-27T15:18:48Z)\n"
466528
]
467529
}
468530
],
@@ -500,7 +562,7 @@
500562
},
501563
{
502564
"cell_type": "code",
503-
"execution_count": null,
565+
"execution_count": 26,
504566
"id": "335379df-6e64-475e-8c14-5c8c748e818e",
505567
"metadata": {},
506568
"outputs": [
@@ -520,7 +582,7 @@
520582
},
521583
{
522584
"cell_type": "code",
523-
"execution_count": null,
585+
"execution_count": 27,
524586
"id": "f5574dcd-2c32-4822-be12-fe558747ebde",
525587
"metadata": {},
526588
"outputs": [

0 commit comments

Comments
 (0)