Skip to content

Commit 7c08e83

Browse files
authored
Merge branch 'dev' into doc-pedram
2 parents 85cdb88 + cec08b0 commit 7c08e83

File tree

8 files changed

+173
-91
lines changed

8 files changed

+173
-91
lines changed

CHANGELOG.rst

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,14 @@
1+
2.0.2
2+
=====
3+
* Fixed contains functions
4+
* Added UniqueConstrainViolation exception, inherits from CreationError
5+
16
2.0.1
27
=====
38

49
* Fixed max retries for write conflicts
10+
* Added parameter ``pool_maxsize`` on class ``Connection`` to allow user configure the http pool size.
11+
=======
512

613
2.0
714
=====
@@ -10,7 +17,6 @@
1017
* added to_default function to reset a document to its default values
1118
* fixed bug in default documents where default values could be overwritten
1219
* default value for fields is now None
13-
* defaual value for fields can now be a callable
1420

1521
1.3.5
1622
=====

pyArango/collection.py

Lines changed: 28 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -274,6 +274,9 @@ def getDefaultDocument(self, fields=None, dct=None):
274274
for k, v in fields.items():
275275
if isinstance(v, dict):
276276
dct[k] = self.getDefaultDocument(fields[k], None)
277+
elif isinstance(v, list) or isinstance(v, tuple):
278+
dct[k] = []
279+
277280
elif isinstance(v, Field):
278281
if callable(v.default):
279282
dct[k] = v.default()
@@ -342,7 +345,7 @@ def _writeBatch(self):
342345
raise UpdateError("Mixed bulk operations not supported - have " + str(self._bulkMode))
343346
payload = []
344347
for d in self._bulkCache:
345-
if type(d) is dict:
348+
if isinstance(d,dict):
346349
payload.append(json.dumps(d, default=str))
347350
else:
348351
try:
@@ -359,15 +362,15 @@ def _writeBatch(self):
359362
bulkError = None
360363
for xd in data:
361364
if not '_key' in xd and 'error' in xd and 'errorNum' in xd:
362-
if bulkError == None:
365+
if bulkError is None:
363366
bulkError = BulkOperationError("saving failed")
364367
bulkError.addBulkError(ArangoError(xd), self._bulkCache[i])
365368
else:
366369
self._bulkCache[i].setPrivates(xd)
367370
self._bulkCache[i]._key = \
368371
xd['_key']
369372
i += 1
370-
if bulkError != None:
373+
if bulkError is not None:
371374
self._bulkCache = []
372375
raise bulkError
373376

@@ -395,7 +398,7 @@ def _updateBatch(self):
395398
if d.collection._validation['on_save']:
396399
d.validate()
397400

398-
if type(d) is dict:
401+
if isinstance(d,dict):
399402
payload.append(json.dumps(d, default=str))
400403
else:
401404
try:
@@ -411,7 +414,7 @@ def _updateBatch(self):
411414
bulkError = None
412415
for xd in data:
413416
if not '_key' in xd and 'error' in xd and 'errorNum' in xd:
414-
if bulkError == None:
417+
if bulkError is None:
415418
bulkError = BulkOperationError("patching failed")
416419
bulkError.addBulkError(ArangoError(xd), str(self._bulkCache[i]))
417420
else:
@@ -420,7 +423,7 @@ def _updateBatch(self):
420423
xd['_key']
421424
i += 1
422425
self._bulkCache = []
423-
if bulkError != None:
426+
if bulkError is not None:
424427
raise bulkError
425428

426429

@@ -441,7 +444,7 @@ def _removeBatch(self):
441444
raise UpdateError("Mixed bulk operations not supported - have " + self._bulkMode)
442445
payload = []
443446
for d in self._bulkCache:
444-
if type(d) is dict:
447+
if isinstance(d,dict):
445448
payload.append('"%s"' % d['_key'])
446449
else:
447450
try:
@@ -458,14 +461,14 @@ def _removeBatch(self):
458461
bulkError = None
459462
for xd in data:
460463
if not '_key' in xd and 'error' in xd and 'errorNum' in xd:
461-
if bulkError == None:
464+
if bulkError is None:
462465
bulkError = BulkOperationError("deleting failed")
463466
bulkError.addBulkError(ArangoError(xd), self._bulkCache[i])
464467
else:
465468
self._bulkCache[i].reset(self)
466469
i += 1
467470
self._bulkCache = []
468-
if bulkError != None:
471+
if bulkError is not None:
469472
raise bulkError
470473

471474
def _deleteBatch(self, document, params):
@@ -682,8 +685,7 @@ def fetchDocument(self, key, rawResults = False, rev = None):
682685
return self.documentClass(self, r.json(), on_load_validation=self._validation["on_load"])
683686
elif r.status_code == 404 :
684687
raise DocumentNotFoundError("Unable to find document with _key: %s" % key, r.json())
685-
else:
686-
raise DocumentNotFoundError("Unable to find document with _key: %s, response: %s" % (key, r.json()), r.json())
688+
raise DocumentNotFoundError("Unable to find document with _key: %s, response: %s" % (key, r.json()), r.json())
687689

688690
def fetchByExample(self, exampleDict, batchSize, rawResults = False, **queryArgs):
689691
"""'exampleDict' should be something like {'age' : 28}."""
@@ -697,9 +699,9 @@ def fetchFirstExample(self, exampleDict, rawResults = False):
697699

698700
def fetchAll(self, rawResults = False, **queryArgs):
699701
"""Returns all the documents in the collection.
700-
701702
You can use the optinal arguments 'skip' and 'limit'::
702703
fetchAlll(limit = 3, shik = 10)"""
704+
703705
return self.simpleQuery('all', rawResults = rawResults, **queryArgs)
704706

705707
def simpleQuery(self, queryType, rawResults = False, **queryArgs):
@@ -724,7 +726,7 @@ def bulkSave(self, docs, onDuplicate="error", **params):
724726

725727
payload = []
726728
for d in docs:
727-
if type(d) is dict:
729+
if isinstance(d,dict):
728730
payload.append(json.dumps(d, default=str))
729731
else:
730732
try:
@@ -743,11 +745,10 @@ def bulkSave(self, docs, onDuplicate="error", **params):
743745
data = r.json()
744746
if (r.status_code == 201) and "error" not in data:
745747
return True
746-
else:
747-
if "errors" in data and data["errors"] > 0:
748-
raise UpdateError("%d documents could not be created" % data["errors"], data)
749-
elif data["error"]:
750-
raise UpdateError("Documents could not be created", data)
748+
if "errors" in data and data["errors"] > 0:
749+
raise UpdateError("%d documents could not be created" % data["errors"], data)
750+
elif data["error"]:
751+
raise UpdateError("Documents could not be created", data)
751752

752753
return data["updated"] + data["created"]
753754

@@ -762,10 +763,8 @@ def bulkImport_json(self, filename, onDuplicate="error", formatType="auto", **pa
762763
data = f.read()
763764
r = self.connection.session.post(url, params = params, data = data)
764765

765-
try:
766-
errorMessage = "At least: %d errors. The first one is: '%s'\n\n more in <this_exception>.data" % (len(data), data[0]["errorMessage"])
767-
except KeyError:
768-
raise UpdateError(data['errorMessage'], data)
766+
if r.status_code != 201:
767+
raise UpdateError('Unable to bulk import JSON', r)
769768

770769
def bulkImport_values(self, filename, onDuplicate="error", **params):
771770
"""Bulk import from a file following the ArangoDB json format."""
@@ -777,10 +776,8 @@ def bulkImport_values(self, filename, onDuplicate="error", **params):
777776
data = f.read()
778777
r = self.connection.session.post(url, params = params, data = data)
779778

780-
try:
781-
errorMessage = "At least: %d errors. The first one is: '%s'\n\n more in <this_exception>.data" % (len(data), data[0]["errorMessage"])
782-
except KeyError:
783-
raise UpdateError(data['errorMessage'], data)
779+
if r.status_code != 201:
780+
raise UpdateError('Unable to bulk import values', r)
784781

785782
def truncate(self):
786783
"""Delete every document in the collection."""
@@ -826,8 +823,7 @@ def getType(self):
826823
return "document"
827824
elif self.type == CONST.COLLECTION_EDGE_TYPE:
828825
return "edge"
829-
else:
830-
raise ValueError("The collection is of Unknown type %s" % self.type)
826+
raise ValueError("The collection is of Unknown type %s" % self.type)
831827

832828
def getStatus(self):
833829
"""Return a word describing the status of the collection (loaded, loading, deleted, unloaded, newborn) instead of a number, if you prefer the number it is in 'self.status'."""
@@ -841,8 +837,7 @@ def getStatus(self):
841837
return "unloaded"
842838
elif self.status == CONST.COLLECTION_NEWBORN_STATUS:
843839
return "newborn"
844-
else:
845-
raise ValueError("The collection has an Unknown status %s" % self.status)
840+
raise ValueError("The collection has an Unknown status %s" % self.status)
846841

847842
def __len__(self):
848843
"""Return the number of documents in the collection."""
@@ -900,8 +895,7 @@ def validateField(cls, fieldName, value):
900895
except SchemaViolation as e:
901896
if fieldName == "_from" or fieldName == "_to":
902897
return True
903-
else:
904-
raise e
898+
raise e
905899
return valValue
906900

907901
def createEdge(self, initValues = None):
@@ -924,7 +918,7 @@ def getEdges(self, vertex, inEdges = True, outEdges = True, rawResults = False):
924918
Otherwise, return a list of Edge objects."""
925919
if isinstance(vertex, Document):
926920
vId = vertex._id
927-
elif (type(vertex) is str) or (type(vertex) is bytes):
921+
elif isinstance(vertex,str) or isinstance(vertex,bytes):
928922
vId = vertex
929923
else:
930924
raise ValueError("Vertex is neither a Document nor a String")
@@ -947,8 +941,7 @@ def getEdges(self, vertex, inEdges = True, outEdges = True, rawResults = False):
947941
for e in data["edges"]:
948942
ret.append(Edge(self, e))
949943
return ret
950-
else:
951-
return data["edges"]
944+
return data["edges"]
952945
else:
953946
raise CreationError("Unable to return edges for vertex: %s" % vId, data)
954947

0 commit comments

Comments
 (0)