Skip to content

Commit 558c5d6

Browse files
committed
change initialization
Signed-off-by: Nitish Bharambe <[email protected]>
1 parent 5395152 commit 558c5d6

File tree

1 file changed

+87
-35
lines changed

1 file changed

+87
-35
lines changed

docs/examples/arrow_example.ipynb

Lines changed: 87 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -183,7 +183,8 @@
183183
"\n",
184184
"The [power-grid-model documentation on Components](https://power-grid-model.readthedocs.io/en/stable/user_manual/components.html) provides documentation on which components are required and which ones are optional.\n",
185185
"\n",
186-
"Construct the Arrow data as a table with the correct headers and data types."
186+
"Construct the Arrow data as a table with the correct headers and data types. \n",
187+
"The creation of arrays and combining it in a RecordBatch as well as the method of initializing that RecordBatch is up to the user."
187188
]
188189
},
189190
{
@@ -208,41 +209,54 @@
208209
}
209210
],
210211
"source": [
211-
"nodes_dict = {\"id\": [1, 2, 3], \"u_rated\": [10500.0, 10500.0, 10500.0]}\n",
212-
"\n",
213-
"\n",
214-
"lines_dict = {\n",
215-
" \"id\": [4, 5],\n",
216-
" \"from_node\": [1, 2],\n",
217-
" \"to_node\": [2, 3],\n",
218-
" \"from_status\": [1, 1],\n",
219-
" \"to_status\": [1, 1],\n",
220-
" \"r1\": [0.11, 0.15],\n",
221-
" \"x1\": [0.12, 0.16],\n",
222-
" \"c1\": [4.1e-05, 5.4e-05],\n",
223-
" \"tan1\": [0.1, 0.1],\n",
224-
" \"r0\": [0.01, 0.05],\n",
225-
" \"x0\": [0.22, 0.06],\n",
226-
" \"c0\": [4.1e-05, 5.4e-05],\n",
227-
" \"tan0\": [0.4, 0.1],\n",
228-
"}\n",
229-
"\n",
230-
"sources_dict = {\"id\": [6], \"node\": [1], \"status\": [1], \"u_ref\": [1.0]}\n",
212+
"nodes_schema = pgm_schema(DatasetType.input, ComponentType.node)\n",
213+
"nodes = pa.record_batch(\n",
214+
" [\n",
215+
" pa.array([1, 2, 3], type=nodes_schema.field(\"id\").type),\n",
216+
" pa.array([10500.0, 10500.0, 10500.0], type=nodes_schema.field(\"u_rated\").type),\n",
217+
" ],\n",
218+
" names=(\"id\", \"u_rated\"),\n",
219+
")\n",
231220
"\n",
232-
"sym_loads_dict = {\n",
233-
" \"id\": [7, 8],\n",
234-
" \"node\": [2, 3],\n",
235-
" \"status\": [1, 1],\n",
236-
" \"type\": [0, 0],\n",
237-
" \"p_specified\": [1.0, 2.0],\n",
238-
" \"q_specified\": [0.5, 1.5],\n",
239-
"}\n",
221+
"lines = pa.record_batch(\n",
222+
" {\n",
223+
" \"id\": [4, 5],\n",
224+
" \"from_node\": [1, 2],\n",
225+
" \"to_node\": [2, 3],\n",
226+
" \"from_status\": [1, 1],\n",
227+
" \"to_status\": [1, 1],\n",
228+
" \"r1\": [0.11, 0.15],\n",
229+
" \"x1\": [0.12, 0.16],\n",
230+
" \"c1\": [4.1e-05, 5.4e-05],\n",
231+
" \"tan1\": [0.1, 0.1],\n",
232+
" \"r0\": [0.01, 0.05],\n",
233+
" \"x0\": [0.22, 0.06],\n",
234+
" \"c0\": [4.1e-05, 5.4e-05],\n",
235+
" \"tan0\": [0.4, 0.1],\n",
236+
" },\n",
237+
" schema=pgm_schema(\n",
238+
" DatasetType.input,\n",
239+
" ComponentType.line,\n",
240+
" [\"id\", \"from_node\", \"to_node\", \"from_status\", \"to_status\", \"r1\", \"x1\", \"c1\", \"tan1\", \"r0\", \"x0\", \"c0\", \"tan0\"],\n",
241+
" ),\n",
242+
")\n",
240243
"\n",
241-
"nodes = pa.record_batch(nodes_dict, schema=pgm_schema(DatasetType.input, ComponentType.node, nodes_dict.keys()))\n",
242-
"lines = pa.record_batch(lines_dict, schema=pgm_schema(DatasetType.input, ComponentType.line, lines_dict.keys()))\n",
243-
"sources = pa.record_batch(sources_dict, schema=pgm_schema(DatasetType.input, ComponentType.source, sources_dict.keys()))\n",
244+
"sources = pa.record_batch(\n",
245+
" {\"id\": [6], \"node\": [1], \"status\": [1], \"u_ref\": [1.0]},\n",
246+
" schema=pgm_schema(DatasetType.input, ComponentType.source, [\"id\", \"node\", \"status\", \"u_ref\"]),\n",
247+
")\n",
244248
"sym_loads = pa.record_batch(\n",
245-
" sym_loads_dict, schema=pgm_schema(DatasetType.input, ComponentType.sym_load, sym_loads_dict.keys())\n",
249+
" {\n",
250+
" \"id\": [7, 8],\n",
251+
" \"node\": [2, 3],\n",
252+
" \"status\": [1, 1],\n",
253+
" \"type\": [0, 0],\n",
254+
" \"p_specified\": [1.0, 2.0],\n",
255+
" \"q_specified\": [0.5, 1.5],\n",
256+
" },\n",
257+
" schema=pgm_schema(\n",
258+
" DatasetType.input, ComponentType.sym_load, [\"id\", \"node\", \"status\", \"type\", \"p_specified\", \"q_specified\"]\n",
259+
" ),\n",
246260
")\n",
247261
"\n",
248262
"nodes\n",
@@ -349,7 +363,7 @@
349363
},
350364
{
351365
"cell_type": "code",
352-
"execution_count": null,
366+
"execution_count": 8,
353367
"metadata": {},
354368
"outputs": [
355369
{
@@ -564,7 +578,17 @@
564578
"}\n",
565579
"\n",
566580
"asym_loads = pa.record_batch(\n",
567-
" asym_loads_dict, schema=pgm_schema(DatasetType.input, ComponentType.asym_load, asym_loads_dict.keys())\n",
581+
" {\n",
582+
" \"id\": [7, 8],\n",
583+
" \"node\": [2, 3],\n",
584+
" \"status\": [1, 1],\n",
585+
" \"type\": [0, 0],\n",
586+
" \"p_specified\": [[1.0, 1.0e-2, 1.1e-2], [2.0, 2.5, 4.5e2]],\n",
587+
" \"q_specified\": [[0.5, 1.5e3, 0.1], [1.5, 2.5, 1.5e3]],\n",
588+
" },\n",
589+
" schema=pgm_schema(\n",
590+
" DatasetType.input, ComponentType.asym_load, [\"id\", \"node\", \"status\", \"type\", \"p_specified\", \"q_specified\"]\n",
591+
" ),\n",
568592
")\n",
569593
"\n",
570594
"asym_loads"
@@ -782,6 +806,34 @@
782806
"pa_asym_node_result"
783807
]
784808
},
809+
{
810+
"cell_type": "code",
811+
"execution_count": 14,
812+
"metadata": {},
813+
"outputs": [
814+
{
815+
"data": {
816+
"text/plain": [
817+
"<pyarrow.lib.DoubleArray object at 0x0000020996813E20>\n",
818+
"[\n",
819+
" 1,\n",
820+
" 0.01,\n",
821+
" 0.011,\n",
822+
" 2,\n",
823+
" 2.5,\n",
824+
" 450\n",
825+
"]"
826+
]
827+
},
828+
"execution_count": 14,
829+
"metadata": {},
830+
"output_type": "execute_result"
831+
}
832+
],
833+
"source": [
834+
"pa.array(asym_load_input[\"p_specified\"].flatten(), type=pa.float64())"
835+
]
836+
},
785837
{
786838
"cell_type": "markdown",
787839
"metadata": {},

0 commit comments

Comments
 (0)