Skip to content

Commit f3ecc22

Browse files
committed
perf(analyzer): implemented DataCache, cache files are now saved in pickle format by default instead of json
1 parent e39afe9 commit f3ecc22

File tree

4 files changed

+186
-84
lines changed

4 files changed

+186
-84
lines changed
Lines changed: 83 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,83 @@
1+
import pickle
2+
from abc import ABC, abstractmethod
3+
from enum import Enum
4+
from pathlib import Path
5+
from typing import Any, Tuple, Type, TypeVar, Union, cast
6+
7+
from robotcode.core.utils.dataclasses import as_json, from_json
8+
9+
_T = TypeVar("_T")
10+
11+
12+
class CacheSection(Enum):
13+
LIBRARY = "libdoc"
14+
VARIABLES = "variables"
15+
16+
17+
class DataCache(ABC):
18+
@abstractmethod
19+
def cache_data_exists(self, section: CacheSection, entry_name: str) -> bool: ...
20+
21+
@abstractmethod
22+
def read_cache_data(
23+
self, section: CacheSection, entry_name: str, types: Union[Type[_T], Tuple[Type[_T], ...]]
24+
) -> _T: ...
25+
26+
@abstractmethod
27+
def save_cache_data(self, section: CacheSection, entry_name: str, data: Any) -> None: ...
28+
29+
30+
class JsonDataCache(DataCache):
31+
def __init__(self, cache_dir: Path) -> None:
32+
self.cache_dir = cache_dir
33+
34+
def build_cache_data_filename(self, section: CacheSection, entry_name: str) -> Path:
35+
return self.cache_dir / section.value / (entry_name + ".json")
36+
37+
def cache_data_exists(self, section: CacheSection, entry_name: str) -> bool:
38+
cache_file = self.build_cache_data_filename(section, entry_name)
39+
return cache_file.exists()
40+
41+
def read_cache_data(
42+
self, section: CacheSection, entry_name: str, types: Union[Type[_T], Tuple[Type[_T], ...]]
43+
) -> _T:
44+
cache_file = self.build_cache_data_filename(section, entry_name)
45+
return from_json(cache_file.read_text("utf-8"), types)
46+
47+
def save_cache_data(self, section: CacheSection, entry_name: str, data: Any) -> None:
48+
cached_file = self.build_cache_data_filename(section, entry_name)
49+
50+
cached_file.parent.mkdir(parents=True, exist_ok=True)
51+
cached_file.write_text(as_json(data), "utf-8")
52+
53+
54+
class PickleDataCache(DataCache):
55+
def __init__(self, cache_dir: Path) -> None:
56+
self.cache_dir = cache_dir
57+
58+
def build_cache_data_filename(self, section: CacheSection, entry_name: str) -> Path:
59+
return self.cache_dir / section.value / (entry_name + ".pkl")
60+
61+
def cache_data_exists(self, section: CacheSection, entry_name: str) -> bool:
62+
cache_file = self.build_cache_data_filename(section, entry_name)
63+
return cache_file.exists()
64+
65+
def read_cache_data(
66+
self, section: CacheSection, entry_name: str, types: Union[Type[_T], Tuple[Type[_T], ...]]
67+
) -> _T:
68+
cache_file = self.build_cache_data_filename(section, entry_name)
69+
70+
with cache_file.open("rb") as f:
71+
result = pickle.load(f)
72+
73+
if isinstance(result, types):
74+
return cast(_T, result)
75+
76+
raise TypeError(f"Expected {types} but got {type(result)}")
77+
78+
def save_cache_data(self, section: CacheSection, entry_name: str, data: Any) -> None:
79+
cached_file = self.build_cache_data_filename(section, entry_name)
80+
81+
cached_file.parent.mkdir(parents=True, exist_ok=True)
82+
with cached_file.open("wb") as f:
83+
pickle.dump(data, f)

0 commit comments

Comments
 (0)