|
294 | 294 | { |
295 | 295 | "data": { |
296 | 296 | "text/plain": [ |
297 | | - "<__main__._t at 0x7fd2eaf8cf50>" |
| 297 | + "<__main__._t at 0x7efe2eb5ef90>" |
298 | 298 | ] |
299 | 299 | }, |
300 | 300 | "execution_count": null, |
|
478 | 478 | "cell_type": "markdown", |
479 | 479 | "metadata": {}, |
480 | 480 | "source": [ |
481 | | - "## Attribute Helpers\n", |
482 | | - "\n", |
| 481 | + "## Attribute Helpers" |
| 482 | + ] |
| 483 | + }, |
| 484 | + { |
| 485 | + "cell_type": "markdown", |
| 486 | + "metadata": {}, |
| 487 | + "source": [ |
483 | 488 | "These functions reduce boilerplate when setting or manipulating attributes or properties of objects." |
484 | 489 | ] |
485 | 490 | }, |
| 491 | + { |
| 492 | + "cell_type": "code", |
| 493 | + "execution_count": null, |
| 494 | + "metadata": {}, |
| 495 | + "outputs": [], |
| 496 | + "source": [ |
| 497 | + "#export\n", |
| 498 | + "def dict2obj(d):\n", |
| 499 | + " \"Convert (possibly nested) dicts (or lists of dicts) to `SimpleNamespace`\"\n", |
| 500 | + " if isinstance(d, (L,list)): return L(d).map(dict2obj)\n", |
| 501 | + " if not isinstance(d, dict): return d\n", |
| 502 | + " return SimpleNamespace(**{k:dict2obj(v) for k,v in d.items()})" |
| 503 | + ] |
| 504 | + }, |
| 505 | + { |
| 506 | + "cell_type": "markdown", |
| 507 | + "metadata": {}, |
| 508 | + "source": [ |
| 509 | + "This is a convenience to give you \"dotted\" access to (possibly nested) dictionaries, e.g:" |
| 510 | + ] |
| 511 | + }, |
| 512 | + { |
| 513 | + "cell_type": "code", |
| 514 | + "execution_count": null, |
| 515 | + "metadata": {}, |
| 516 | + "outputs": [ |
| 517 | + { |
| 518 | + "data": { |
| 519 | + "text/plain": [ |
| 520 | + "namespace(a=1, b=namespace(c=2, d=3))" |
| 521 | + ] |
| 522 | + }, |
| 523 | + "execution_count": null, |
| 524 | + "metadata": {}, |
| 525 | + "output_type": "execute_result" |
| 526 | + } |
| 527 | + ], |
| 528 | + "source": [ |
| 529 | + "d1 = dict(a=1, b=dict(c=2,d=3))\n", |
| 530 | + "d2 = dict2obj(d1)\n", |
| 531 | + "test_eq(d2.b.c, 2)\n", |
| 532 | + "d2" |
| 533 | + ] |
| 534 | + }, |
| 535 | + { |
| 536 | + "cell_type": "markdown", |
| 537 | + "metadata": {}, |
| 538 | + "source": [ |
| 539 | + "It can also be used on lists of dicts." |
| 540 | + ] |
| 541 | + }, |
| 542 | + { |
| 543 | + "cell_type": "code", |
| 544 | + "execution_count": null, |
| 545 | + "metadata": {}, |
| 546 | + "outputs": [], |
| 547 | + "source": [ |
| 548 | + "ds = L(d1, d1)\n", |
| 549 | + "test_eq(dict2obj(ds[0]).b.c, 2)" |
| 550 | + ] |
| 551 | + }, |
486 | 552 | { |
487 | 553 | "cell_type": "code", |
488 | 554 | "execution_count": null, |
|
519 | 585 | "cell_type": "markdown", |
520 | 586 | "metadata": {}, |
521 | 587 | "source": [ |
522 | | - "In it's most basic form, you can use this to shorten code like this:" |
| 588 | + "In it's most basic form, you can use `store_attr` to shorten code like this:" |
523 | 589 | ] |
524 | 590 | }, |
525 | 591 | { |
|
1625 | 1691 | { |
1626 | 1692 | "data": { |
1627 | 1693 | "text/plain": [ |
1628 | | - "['g', 'e', 'f', 'a', 'h', 'c', 'd', 'b']" |
| 1694 | + "['a', 'e', 'b', 'h', 'c', 'd', 'g', 'f']" |
1629 | 1695 | ] |
1630 | 1696 | }, |
1631 | 1697 | "execution_count": null, |
|
3511 | 3577 | "#export\n", |
3512 | 3578 | "class ProcessPoolExecutor(concurrent.futures.ProcessPoolExecutor):\n", |
3513 | 3579 | " \"Same as Python's ProcessPoolExecutor, except can pass `max_workers==0` for serial execution\"\n", |
3514 | | - " def __init__(self, max_workers=defaults.cpus, on_exc=print, pause=0, **kwargs):\n", |
| 3580 | + " def __init__(self, max_workers=defaults.cpus, on_exc=print, pause=0,\n", |
| 3581 | + " mp_context=None, initializer=None, initargs=(),):\n", |
3515 | 3582 | " if max_workers is None: max_workers=defaults.cpus\n", |
3516 | 3583 | " store_attr()\n", |
3517 | 3584 | " self.not_parallel = max_workers==0\n", |
3518 | 3585 | " if self.not_parallel: max_workers=1\n", |
3519 | | - " super().__init__(max_workers, **kwargs)\n", |
| 3586 | + " super().__init__(max_workers, mp_context=mp_context, initializer=initializer, initargs=initargs)\n", |
3520 | 3587 | "\n", |
3521 | 3588 | " def map(self, f, items, timeout=None, chunksize=1, *args, **kwargs):\n", |
3522 | 3589 | " self.lock = Manager().Lock()\n", |
|
3537 | 3604 | "text/markdown": [ |
3538 | 3605 | "<h4 id=\"ProcessPoolExecutor\" class=\"doc_header\"><code>class</code> <code>ProcessPoolExecutor</code><a href=\"\" class=\"source_link\" style=\"float:right\">[source]</a></h4>\n", |
3539 | 3606 | "\n", |
3540 | | - "> <code>ProcessPoolExecutor</code>(**`max_workers`**=*`64`*, **`on_exc`**=*`print`*, **`pause`**=*`0`*, **\\*\\*`kwargs`**) :: [`ProcessPoolExecutor`](/utils.html#ProcessPoolExecutor)\n", |
| 3607 | + "> <code>ProcessPoolExecutor</code>(**`max_workers`**=*`64`*, **`on_exc`**=*`print`*, **`pause`**=*`0`*, **`mp_context`**=*`None`*, **`initializer`**=*`None`*, **`initargs`**=*`()`*) :: [`ProcessPoolExecutor`](/utils.html#ProcessPoolExecutor)\n", |
3541 | 3608 | "\n", |
3542 | 3609 | "Same as Python's ProcessPoolExecutor, except can pass `max_workers==0` for serial execution" |
3543 | 3610 | ], |
|
3553 | 3620 | "show_doc(ProcessPoolExecutor, title_level=4)" |
3554 | 3621 | ] |
3555 | 3622 | }, |
3556 | | - { |
3557 | | - "cell_type": "markdown", |
3558 | | - "metadata": {}, |
3559 | | - "source": [ |
3560 | | - "`kwargs` are passed to Python's [`concurrent.futures.ProcessPoolExecutor`](https://python.readthedocs.io/en/latest/library/concurrent.futures.html#processpoolexecutor), so depend on your python version. From Python 3.7, they are: `mp_context=None`, `initializer=None`, `initargs=()`." |
3561 | | - ] |
3562 | | - }, |
3563 | 3623 | { |
3564 | 3624 | "cell_type": "code", |
3565 | 3625 | "execution_count": null, |
|
0 commit comments