Skip to content

Commit 3f61ffc

Browse files
committed
v4.2.12 Release
1 parent 4d4e6a7 commit 3f61ffc

File tree

12 files changed

+652
-129
lines changed

12 files changed

+652
-129
lines changed

python/deeplake/__init__.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ def progress_bar(iterable, *args, **kwargs):
1616
from deeplake.ingestion import from_coco
1717

1818

19-
__version__ = "4.2.8"
19+
__version__ = "4.2.12"
2020

2121
__all__ = [
2222
"__version__",
@@ -141,6 +141,7 @@ def progress_bar(iterable, *args, **kwargs):
141141
"core",
142142
"create",
143143
"create_async",
144+
"_create_global_cache",
144145
"delete",
145146
"disconnect",
146147
"exists",

python/deeplake/__init__.pyi

Lines changed: 35 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -129,6 +129,7 @@ __all__ = [
129129
"core",
130130
"create",
131131
"create_async",
132+
"_create_global_cache",
132133
"delete",
133134
"disconnect",
134135
"exists",
@@ -2460,6 +2461,29 @@ class Dataset(DatasetView):
24602461
When the dataset was created. The value is auto-generated at creation time.
24612462
"""
24622463

2464+
auto_commit_enabled: bool
2465+
"""
2466+
This property controls whether the dataset will perform time-based auto-commits.
2467+
2468+
<!-- test-context
2469+
```python
2470+
import deeplake
2471+
ds = deeplake.create("mem://auto_commit_ds")
2472+
ds.auto_commit_enabled = False
2473+
ds.add_column("column_name", deeplake.types.Text(deeplake.types.BM25))
2474+
a = ['a']*10_000
2475+
ds.append({"column_name":a})
2476+
ds.commit()
2477+
```
2478+
-->
2479+
2480+
Examples:
2481+
```python
2482+
ds = deeplake.open("mem://auto_commit_ds")
2483+
ds.auto_commit_enabled = True
2484+
```
2485+
"""
2486+
24632487
indexing_mode: IndexingMode
24642488
"""
24652489
The indexing mode of the dataset. This property can be set to change the indexing mode of the dataset for the current session,
@@ -2468,7 +2492,7 @@ class Dataset(DatasetView):
24682492
<!-- test-context
24692493
```python
24702494
import deeplake
2471-
ds = deeplake.create("mem://ds_id")
2495+
ds = deeplake.create("mem://indexing_mode_ds")
24722496
ds.indexing_mode = deeplake.IndexingMode.Off
24732497
ds.add_column("column_name", deeplake.types.Text(deeplake.types.BM25))
24742498
a = ['a']*10_000
@@ -2479,7 +2503,7 @@ class Dataset(DatasetView):
24792503
24802504
Examples:
24812505
```python
2482-
ds = deeplake.open("mem://ds_id")
2506+
ds = deeplake.open("mem://indexing_mode_ds")
24832507
ds.indexing_mode = deeplake.IndexingMode.Automatic
24842508
ds.commit()
24852509
```
@@ -3562,6 +3586,15 @@ def create_async(
35623586
RuntimeError: if a dataset already exists at the given URL (will be raised when the future is awaited)
35633587
"""
35643588

3589+
def _create_global_cache(
3590+
size: int = None,
3591+
) -> None:
3592+
"""
3593+
Creates a global cache with the given size.
3594+
Args:
3595+
size (int, optional): The size of the global cache in bytes. If not specified, a default size of 1GB is used.
3596+
"""
3597+
35653598
def copy(
35663599
src: str,
35673600
dst: str,
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,9 @@
11
from .labelbox import (
22
create_labelbox_annotation_project,
3+
create_dataset_from_image_annotation_project,
4+
create_dataset_from_image_annotation_project_with_custom_data_filler,
35
create_dataset_from_video_annotation_project,
46
converter_for_video_project_with_id,
7+
converter_for_image_project_with_id,
58
load_blob_file_paths_from_azure,
69
)

python/deeplake/integrations/labelbox/__init__.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,10 @@
22
create_labelbox_annotation_project,
33
create_dataset_from_video_annotation_project,
44
create_dataset_from_video_annotation_project_with_custom_data_filler,
5+
create_dataset_from_image_annotation_project,
6+
create_dataset_from_image_annotation_project_with_custom_data_filler,
57
converter_for_video_project_with_id,
8+
converter_for_image_project_with_id,
69
)
710
from .labelbox_azure_utils import (
811
load_blob_file_paths_from_azure,

python/deeplake/integrations/labelbox/converters.py

Lines changed: 49 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,50 @@ def interpolator(start, end, progress):
5555

5656
converter.registered_interpolators[obj.feature_schema_id] = interpolator
5757

58+
def polygon_converter_(obj, converter, tensor_name, context, generate_labels):
59+
ds = context["ds"]
60+
try:
61+
ds.create_tensor(tensor_name, **polygon_tensor_create_kwargs_())
62+
except:
63+
pass
64+
65+
if generate_labels:
66+
print("polygon converter does not support generating labels")
67+
68+
converter.register_feature_id_for_kind("tool", "polygon", obj, tensor_name)
69+
70+
def polygon_converter(row, obj):
71+
if tensor_name not in converter.values_cache:
72+
converter.values_cache[tensor_name] = dict()
73+
if row not in converter.values_cache[tensor_name]:
74+
converter.values_cache[tensor_name][row] = []
75+
polygon = obj["polygon"]
76+
if len(polygon) != 0 and not isinstance(polygon[0], dict):
77+
# if polygon is a list of points, convert it to a list of dicts
78+
polygon = [{"x": float(p[0]), "y": float(p[1])} for p in polygon]
79+
converter.values_cache[tensor_name][row].append(
80+
np.array([[float(p["x"]), float(p["y"])] for p in polygon])
81+
)
82+
83+
converter.regsistered_actions[obj.feature_schema_id] = polygon_converter
84+
85+
def interpolator(start, end, progress):
86+
start_polygon = start["polygon"]
87+
end_polygon = end["polygon"]
88+
polygon = copy.deepcopy(start)
89+
polygon["polygon"] = [
90+
[
91+
start_polygon[i]["x"]
92+
+ (end_polygon[i]["x"] - start_polygon[i]["x"]) * progress,
93+
start_polygon[i]["y"]
94+
+ (end_polygon[i]["y"] - start_polygon[i]["y"]) * progress,
95+
]
96+
for i in range(len(start_polygon))
97+
]
98+
99+
return polygon
100+
101+
converter.registered_interpolators[obj.feature_schema_id] = interpolator
58102

59103
def radio_converter_(obj, converter, tensor_name, context, generate_labels):
60104
ds = context["ds"]
@@ -199,9 +243,12 @@ def polygon_converter(row, obj):
199243
converter.values_cache[tensor_name] = dict()
200244
if row not in converter.values_cache[tensor_name]:
201245
converter.values_cache[tensor_name][row] = []
202-
246+
line = obj["line"]
247+
if len(line) != 0 and not isinstance(line[0], dict):
248+
# if line is a list of points, convert it to a list of dicts
249+
line = [{"x": int(l[0]), "y": int(l[1])} for l in line]
203250
converter.values_cache[tensor_name][row].append(
204-
[[int(l["x"]), int(l["y"])] for l in obj["line"]]
251+
[[int(l["x"]), int(l["y"])] for l in line]
205252
)
206253

207254
converter.regsistered_actions[obj.feature_schema_id] = polygon_converter

python/deeplake/integrations/labelbox/deeplake_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ def class_label_tensor_create_kwargs_(dtype="int32"):
3333
}
3434

3535

36-
def image_tensor_create_kwargs_(sample_compression="jpg"):
36+
def image_tensor_create_kwargs_(sample_compression="png"):
3737
if is_v3():
3838
raise ValueError("unexpected deeplake version 3")
3939
return {"dtype": deeplake.types.Image(sample_compression=sample_compression)}

0 commit comments

Comments
 (0)