Skip to content

Commit e7a13ec

Browse files
authored
Merge pull request #134 from aperture-data/release-0.2.9
Release 0.2.9
2 parents fe5258b + a7af492 commit e7a13ec

File tree

16 files changed

+260
-224
lines changed

16 files changed

+260
-224
lines changed

.github/workflows/main.yml

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,13 @@ jobs:
3434
username: ${{ secrets.DOCKER_USER }}
3535
password: ${{ secrets.DOCKER_PASS }}
3636

37+
- name: Login to Google Cloud
38+
uses: google-github-actions/setup-gcloud@v0
39+
with:
40+
service_account_key: ${{ secrets.GCP_SERVICE_ACCOUNT_KEY }}
41+
project_id: ${{ secrets.GCP_SERVICE_ACCOUNT_PROJECT_ID }}
42+
export_default_credentials: true
43+
3744
- name: Run Tests
3845
env:
3946
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}

.github/workflows/pr.yaml

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,13 @@ jobs:
3232
username: ${{ secrets.DOCKER_USER }}
3333
password: ${{ secrets.DOCKER_PASS }}
3434

35+
- name: Login to Google Cloud
36+
uses: google-github-actions/setup-gcloud@v0
37+
with:
38+
service_account_key: ${{ secrets.GCP_SERVICE_ACCOUNT_KEY }}
39+
project_id: ${{ secrets.GCP_SERVICE_ACCOUNT_PROJECT_ID }}
40+
export_default_credentials: true
41+
3542
- name: Run Tests
3643
env:
3744
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}

.github/workflows/release.yaml

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,13 @@ jobs:
3434
username: ${{ secrets.DOCKER_USER }}
3535
password: ${{ secrets.DOCKER_PASS }}
3636

37+
- name: Login to Google Cloud
38+
uses: google-github-actions/setup-gcloud@v0
39+
with:
40+
service_account_key: ${{ secrets.GCP_SERVICE_ACCOUNT_KEY }}
41+
project_id: ${{ secrets.GCP_SERVICE_ACCOUNT_PROJECT_ID }}
42+
export_default_credentials: true
43+
3744
- name: Run Tests
3845
env:
3946
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}

aperturedb/ImageDataCSV.py

Lines changed: 40 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
HEADER_PATH = "filename"
1414
HEADER_URL = "url"
1515
HEADER_S3_URL = "s3_url"
16+
HEADER_GS_URL = "gs_url"
1617
PROPERTIES = "properties"
1718
CONSTRAINTS = "constraints"
1819
IMG_FORMAT = "format"
@@ -37,6 +38,10 @@ class ImageDataCSV(CSVParser.CSVParser):
3738
OR
3839
3940
``s3_url``, ``PROP_NAME_1``, ... ``PROP_NAME_N``, ``constraint_PROP1``, ``format``
41+
42+
OR
43+
44+
``gs_url``, ``PROP_NAME_1``, ... ``PROP_NAME_N``, ``constraint_PROP1``, ``format``
4045
...
4146
4247
Example csv file::
@@ -63,6 +68,10 @@ class ImageDataCSV(CSVParser.CSVParser):
6368
"""
6469

6570
def __init__(self, filename, check_image=True, n_download_retries=3):
71+
self.loaders = [self.load_image, self.load_url,
72+
self.load_s3_url, self.load_gs_url]
73+
self.source_types = [HEADER_PATH,
74+
HEADER_URL, HEADER_S3_URL, HEADER_GS_URL]
6675

6776
super().__init__(filename)
6877

@@ -76,13 +85,12 @@ def __init__(self, filename, check_image=True, n_download_retries=3):
7685
if x.startswith(CSVParser.CONTRAINTS_PREFIX)]
7786

7887
self.source_type = self.header[0]
79-
loaders = [self.load_image, self.load_url, self.load_s3_url]
80-
source_types = [HEADER_PATH, HEADER_URL, HEADER_S3_URL]
81-
if self.source_type not in source_types:
88+
89+
if self.source_type not in self.source_types:
8290
logger.error("Source not recognized: " + self.source_type)
8391
raise Exception("Error loading image: " + filename)
8492
self.source_loader = {
85-
st: sl for st, sl in zip(source_types, loaders)
93+
st: sl for st, sl in zip(self.source_types, self.loaders)
8694
}
8795

8896
self.n_download_retries = n_download_retries
@@ -189,10 +197,36 @@ def load_s3_url(self, s3_url):
189197
logger.error("S3 ERROR:", s3_url)
190198
return False, None
191199

200+
def load_gs_url(self, gs_url):
201+
retries = 0
202+
from google.cloud import storage
203+
client = storage.Client()
204+
while True:
205+
try:
206+
bucket_name = gs_url.split("/")[2]
207+
object_name = gs_url.split("gs://" + bucket_name + "/")[-1]
208+
209+
blob = client.bucket(bucket_name).blob(
210+
object_name).download_as_bytes()
211+
imgbuffer = np.frombuffer(blob, dtype='uint8')
212+
if self.check_image and not self.check_image_buffer(imgbuffer):
213+
logger.error("IMAGE ERROR: ", gs_url)
214+
return False, None
215+
return True, blob
216+
except:
217+
if retries >= self.n_download_retries:
218+
break
219+
logger.warning("WARNING: Retrying object:", gs_url)
220+
retries += 1
221+
time.sleep(2)
222+
223+
logger.error("GS ERROR:", gs_url)
224+
return False, None
225+
192226
def validate(self):
193227

194228
self.header = list(self.df.columns.values)
195229

196-
if self.header[0] not in [HEADER_PATH, HEADER_URL, HEADER_S3_URL]:
230+
if self.header[0] not in self.source_types:
197231
raise Exception(
198-
"Error with CSV file field: filename. Must be first field")
232+
f"Error with CSV file field: {self.header[0]}. Must be first field")

aperturedb/Images.py

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -149,7 +149,7 @@ def __retrieve_batch(self, index):
149149
find = {
150150
"FindImage": {
151151
"constraints": {
152-
"_uniqueid": ["==", self.images_ids[idx]]
152+
self.img_id_prop: ["==", self.images_ids[idx]]
153153
},
154154
}
155155
}
@@ -184,7 +184,7 @@ def __retrieve_polygons(self, index, constraints, tag_key, tag_format):
184184
"FindImage": {
185185
"_ref": 1,
186186
"constraints": {
187-
"_uniqueid": ["==", uniqueid]
187+
self.img_id_prop: ["==", uniqueid]
188188
},
189189
"blobs": False,
190190
}
@@ -255,7 +255,7 @@ def __retrieve_bounding_boxes(self, index):
255255
"FindImage": {
256256
"_ref": 1,
257257
"constraints": {
258-
"_uniqueid": ["==", uniqueid]
258+
self.img_id_prop: ["==", uniqueid]
259259
},
260260
"blobs": False,
261261
}
@@ -381,12 +381,12 @@ def search(self, constraints=None, operations=None, format=None, limit=None, sor
381381

382382
self.search_result = response
383383

384-
def search_by_id(self, ids, id_key="id"):
384+
def search_by_property(self, prop_key, prop_values):
385385
const = Constraints()
386-
const.is_in(id_key, ids)
386+
const.is_in(prop_key, prop_values)
387387
img_sort = {
388-
"key": id_key,
389-
"sequence": ids,
388+
"key": prop_key,
389+
"sequence": prop_values,
390390
}
391391
self.search(constraints=const, sort=img_sort)
392392

@@ -429,7 +429,7 @@ def get_similar_images(self, set_name, n_neighbors):
429429
"FindImage": {
430430
"_ref": 1,
431431
"constraints": {
432-
"_uniqueid": ["==", uniqueid]
432+
self.img_id_prop: ["==", uniqueid]
433433
},
434434
"blobs": False,
435435
}
@@ -460,7 +460,7 @@ def get_similar_images(self, set_name, n_neighbors):
460460
"ref": 1,
461461
},
462462
"results": {
463-
"list": ["_uniqueid"]
463+
"list": [self.img_id_prop]
464464
}
465465
}
466466
}]
@@ -581,9 +581,6 @@ def display(self, show_bboxes=False, show_polygons=False, limit=None, polygon_co
581581
if polygon_constraints:
582582
show_polygons = True
583583
self.images_polygons = {}
584-
if "_uniqueid" in polygon_constraints.constraints.keys():
585-
print("WARNING: don't use '_uniqueid' in polygon_constraints")
586-
print("see https://github.com/aperture-data/athena/issues/532")
587584

588585
for i in range(len(self.images_ids)):
589586

@@ -616,9 +613,12 @@ def display(self, show_bboxes=False, show_polygons=False, limit=None, polygon_co
616613
self.__retrieve_polygons(
617614
i, polygon_constraints, polygon_tag_key, polygon_tag_format)
618615

619-
bounds = self.images_polygons[uniqueid]["bounds"]
620-
polygons = self.images_polygons[uniqueid]["polygons"]
621-
tags = self.images_polygons[uniqueid]["tags"]
616+
bounds = self.images_polygons[uniqueid]["bounds"] if uniqueid in self.images_polygons else [
617+
]
618+
polygons = self.images_polygons[uniqueid]["polygons"] if uniqueid in self.images_polygons else [
619+
]
620+
tags = self.images_polygons[uniqueid]["tags"] if uniqueid in self.images_polygons else [
621+
]
622622

623623
for pi in range(len(polygons)):
624624
self.__draw_polygon_and_tag(image, polygons[pi], tags[pi] if pi < len(
@@ -670,7 +670,7 @@ def get_properties(self, prop_list=[]):
670670
"FindImage": {
671671
"_ref": 1,
672672
"constraints": {
673-
"_uniqueid": ["==", uniqueid]
673+
self.img_id_prop: ["==", uniqueid]
674674
},
675675
"blobs": False,
676676
"results": {

aperturedb/Utils.py

Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -533,3 +533,52 @@ def count_descriptors_in_set(self, set_name):
533533
raise e
534534

535535
return total
536+
537+
def remove_all_objects(self):
538+
539+
cmd = {"constraints": {"_uniqueid": ["!=", "0.0.0"]}}
540+
541+
# There is no DeleteDescriptor, but when the sets are removed
542+
# all the descriptors are also removed.
543+
queries = [
544+
[{"DeleteDescriptorSet": cmd}],
545+
# [{"DeleteDescriptor": cmd}],
546+
[{"DeleteBoundingBox": cmd}],
547+
[{"DeleteVideo": cmd}],
548+
[{"DeleteImage": cmd}],
549+
[{"DeleteBlob": cmd}],
550+
[{"DeleteEntity": cmd}],
551+
]
552+
553+
try:
554+
for q in queries:
555+
response, _ = self.connector.query(q)
556+
if not self.connector.last_query_ok():
557+
logger.error(self.connector.get_last_response_str())
558+
return False
559+
560+
response, _ = self.connector.query(
561+
[{"GetSchema": {"refresh": True}}])
562+
563+
if not self.connector.last_query_ok():
564+
logger.error(self.connector.get_last_response_str())
565+
return False
566+
567+
entities = response[0]["GetSchema"]["entities"]
568+
connections = response[0]["GetSchema"]["connections"]
569+
570+
if entities is not None:
571+
logger.error("Entities not removed completely")
572+
logger.error(self.connector.get_last_response_str())
573+
return False
574+
575+
if connections is not None:
576+
logger.error("Connections not removed completely")
577+
logger.error(self.connector.get_last_response_str())
578+
return False
579+
580+
except BaseException as e:
581+
logger.error(self.connector.get_last_response_str())
582+
raise e
583+
584+
return True

aperturedb/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55

66
logger = logging.getLogger(__name__)
77

8-
__version__ = "0.2.8"
8+
__version__ = "0.2.9"
99

1010
# set log level
1111
logger.setLevel(logging.DEBUG)

docs/requirements-documentation.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,5 +3,5 @@ pre-commit==2.17.0
33
sphinx-autoapi==1.8.4
44
sphinx-rtd-theme==1.0.0
55
torch==1.10.2
6-
nbconvert==6.5.0
6+
nbconvert==7.0.0
77
ipython==8.0.1

setup.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,10 +5,10 @@
55

66
setuptools.setup(
77
name="aperturedb",
8-
version="0.2.8",
8+
version="0.2.9",
99
description="ApertureDB Client Module",
1010
install_requires=['protobuf>=3.20.0', 'scikit-image', 'image', 'requests', 'boto3',
11-
'opencv-python', 'numpy', 'matplotlib', 'pandas', 'kaggle'],
11+
'opencv-python', 'numpy', 'matplotlib', 'pandas', 'kaggle', 'google-cloud-storage'],
1212
long_description=long_description,
1313
long_description_content_type="text/markdown",
1414
url="https://github.com/aperture-data/aperturedb-python",

test/conftest.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,8 @@ def insert_data_from_csv(in_csv_file, rec_count=-1):
3535
"./input/setB.adb.csv": DescriptorDataCSV,
3636
"./input/s3_images.adb.csv": ImageDataCSV,
3737
"./input/http_images.adb.csv": ImageDataCSV,
38-
'./input/bboxes-constraints.adb.csv': BBoxDataCSV
38+
"./input/bboxes-constraints.adb.csv": BBoxDataCSV,
39+
"./input/gs_images.adb.csv": ImageDataCSV
3940
}
4041

4142
data = file_data_pair[in_csv_file](in_csv_file)
@@ -55,3 +56,8 @@ def insert_data_from_csv(in_csv_file, rec_count=-1):
5556
@pytest.fixture(scope="module")
5657
def utils(db):
5758
return Utils(db)
59+
60+
61+
@pytest.fixture()
62+
def images(insert_data_from_csv):
63+
return insert_data_from_csv("./input/images.adb.csv")

0 commit comments

Comments
 (0)