| 
 | 1 | +import logging  | 
 | 2 | +import pickle  | 
 | 3 | +import sqlite3  | 
 | 4 | +from concurrent.futures import Future  | 
 | 5 | +from pathlib import Path  | 
 | 6 | +from typing import Optional, Sequence  | 
 | 7 | + | 
 | 8 | +from parsl.dataflow.dflow import DataFlowKernel  | 
 | 9 | +from parsl.dataflow.memoization import Memoizer, make_hash  | 
 | 10 | +from parsl.dataflow.taskrecord import TaskRecord  | 
 | 11 | + | 
 | 12 | +logger = logging.getLogger(__name__)  | 
 | 13 | + | 
 | 14 | + | 
 | 15 | +class SQLiteMemoizer(Memoizer):  | 
 | 16 | +    """Memoize out of memory into an sqlite3 database.  | 
 | 17 | +
  | 
 | 18 | +    TODO: probably going to need some kind of shutdown now, to close  | 
 | 19 | +    the sqlite3 connection.  | 
 | 20 | +    which might also be useful for driving final checkpoints in the  | 
 | 21 | +    original impl?  | 
 | 22 | +    """  | 
 | 23 | + | 
 | 24 | +    def start(self, *, dfk: DataFlowKernel, memoize: bool = True, checkpoint_files: Sequence[str], run_dir: str) -> None:  | 
 | 25 | +        """TODO: run_dir is the per-workflow run dir, but we need a broader checkpoint context... one level up  | 
 | 26 | +        by default... get_all_checkpoints uses "runinfo/" as a relative path for that by default so replicating  | 
 | 27 | +        that choice would do here. likewise I think for monitoring."""  | 
 | 28 | + | 
 | 29 | +        self.db_path = Path(dfk.config.run_dir) / "checkpoint.sqlite3"  | 
 | 30 | +        logger.debug("starting with db_path %r", self.db_path)  | 
 | 31 | + | 
 | 32 | +        # TODO: api wart... turning memoization on or off should not be part of the plugin API  | 
 | 33 | +        self.memoize = memoize  | 
 | 34 | + | 
 | 35 | +        connection = sqlite3.connect(self.db_path)  | 
 | 36 | +        cursor = connection.cursor()  | 
 | 37 | + | 
 | 38 | +        cursor.execute("CREATE TABLE IF NOT EXISTS checkpoints(key, result)")  | 
 | 39 | +        # probably want some index on key because that's what we're doing all the access via.  | 
 | 40 | + | 
 | 41 | +        connection.commit()  | 
 | 42 | +        connection.close()  | 
 | 43 | +        logger.debug("checkpoint table created")  | 
 | 44 | + | 
 | 45 | +    def close(self):  | 
 | 46 | +        pass  | 
 | 47 | + | 
 | 48 | +    def checkpoint(self, tasks: Sequence[TaskRecord]) -> None:  | 
 | 49 | +        """All the behaviour for this memoizer is in check_memo and update_memo.  | 
 | 50 | +        """  | 
 | 51 | +        logger.debug("Explicit checkpoint call is a no-op with this memoizer")  | 
 | 52 | + | 
 | 53 | +    def check_memo(self, task: TaskRecord) -> Optional[Future]:  | 
 | 54 | +        """TODO: document this: check_memo is required to set the task hashsum,  | 
 | 55 | +        if that's how we're going to key checkpoints in update_memo. (that's not  | 
 | 56 | +        a requirement though: other equalities are available."""  | 
 | 57 | +        task_id = task['id']  | 
 | 58 | + | 
 | 59 | +        if not self.memoize or not task['memoize']:  | 
 | 60 | +            task['hashsum'] = None  | 
 | 61 | +            logger.debug("Task %s will not be memoized", task_id)  | 
 | 62 | +            return None  | 
 | 63 | + | 
 | 64 | +        hashsum = make_hash(task)  | 
 | 65 | +        logger.debug("Task {} has memoization hash {}".format(task_id, hashsum))  | 
 | 66 | +        task['hashsum'] = hashsum  | 
 | 67 | + | 
 | 68 | +        connection = sqlite3.connect(self.db_path)  | 
 | 69 | +        cursor = connection.cursor()  | 
 | 70 | +        cursor.execute("SELECT result FROM checkpoints WHERE key = ?", (hashsum, ))  | 
 | 71 | +        r = cursor.fetchone()  | 
 | 72 | + | 
 | 73 | +        if r is None:  | 
 | 74 | +            connection.close()  | 
 | 75 | +            return None  | 
 | 76 | +        else:  | 
 | 77 | +            data = pickle.loads(r[0])  | 
 | 78 | +            connection.close()  | 
 | 79 | + | 
 | 80 | +            memo_fu: Future = Future()  | 
 | 81 | + | 
 | 82 | +            if data['exception'] is None:  | 
 | 83 | +                memo_fu.set_result(data['result'])  | 
 | 84 | +            else:  | 
 | 85 | +                assert data['result'] is None  | 
 | 86 | +                memo_fu.set_exception(data['exception'])  | 
 | 87 | + | 
 | 88 | +            return memo_fu  | 
 | 89 | + | 
 | 90 | +    def update_memo(self, task: TaskRecord, r: Future) -> None:  | 
 | 91 | +        logger.debug("updating memo")  | 
 | 92 | + | 
 | 93 | +        if not self.memoize or not task['memoize'] or 'hashsum' not in task:  | 
 | 94 | +            logger.debug("preconditions for memo not satisfied")  | 
 | 95 | +            return  | 
 | 96 | + | 
 | 97 | +        if not isinstance(task['hashsum'], str):  | 
 | 98 | +            logger.error(f"Attempting to update app cache entry but hashsum is not a string key: {task['hashsum']}")  | 
 | 99 | +            return  | 
 | 100 | + | 
 | 101 | +        app_fu = task['app_fu']  | 
 | 102 | +        hashsum = task['hashsum']  | 
 | 103 | + | 
 | 104 | +        # this comes from the original concatenation-based checkpoint code:  | 
 | 105 | +        if app_fu.exception() is None:  | 
 | 106 | +            t = {'hash': hashsum, 'exception': None, 'result': app_fu.result()}  | 
 | 107 | +        else:  | 
 | 108 | +            t = {'hash': hashsum, 'exception': app_fu.exception(), 'result': None}  | 
 | 109 | + | 
 | 110 | +        value = pickle.dumps(t)  | 
 | 111 | + | 
 | 112 | +        connection = sqlite3.connect(self.db_path)  | 
 | 113 | +        cursor = connection.cursor()  | 
 | 114 | + | 
 | 115 | +        cursor.execute("INSERT INTO checkpoints VALUES(?, ?)", (hashsum, value))  | 
 | 116 | + | 
 | 117 | +        connection.commit()  | 
 | 118 | +        connection.close()  | 
0 commit comments