@@ -272,6 +272,9 @@ def getDefaultDocument(self, fields=None, dct=None):
272272 for k , v in fields .items ():
273273 if isinstance (v , dict ):
274274 dct [k ] = self .getDefaultDocument (fields [k ], None )
275+ elif isinstance (v , list ) or isinstance (v , tuple ):
276+ dct [k ] = []
277+
275278 elif isinstance (v , Field ):
276279 if callable (v .default ):
277280 dct [k ] = v .default ()
@@ -338,7 +341,7 @@ def _writeBatch(self):
338341 raise UpdateError ("Mixed bulk operations not supported - have " + str (self ._bulkMode ))
339342 payload = []
340343 for d in self ._bulkCache :
341- if type ( d ) is dict :
344+ if isinstance ( d , dict ) :
342345 payload .append (json .dumps (d , default = str ))
343346 else :
344347 try :
@@ -355,15 +358,15 @@ def _writeBatch(self):
355358 bulkError = None
356359 for xd in data :
357360 if not '_key' in xd and 'error' in xd and 'errorNum' in xd :
358- if bulkError == None :
361+ if bulkError is None :
359362 bulkError = BulkOperationError ("saving failed" )
360363 bulkError .addBulkError (ArangoError (xd ), self ._bulkCache [i ])
361364 else :
362365 self ._bulkCache [i ].setPrivates (xd )
363366 self ._bulkCache [i ]._key = \
364367 xd ['_key' ]
365368 i += 1
366- if bulkError != None :
369+ if bulkError is not None :
367370 self ._bulkCache = []
368371 raise bulkError
369372
@@ -391,7 +394,7 @@ def _updateBatch(self):
391394 if d .collection ._validation ['on_save' ]:
392395 d .validate ()
393396
394- if type ( d ) is dict :
397+ if isinstance ( d , dict ) :
395398 payload .append (json .dumps (d , default = str ))
396399 else :
397400 try :
@@ -407,7 +410,7 @@ def _updateBatch(self):
407410 bulkError = None
408411 for xd in data :
409412 if not '_key' in xd and 'error' in xd and 'errorNum' in xd :
410- if bulkError == None :
413+ if bulkError is None :
411414 bulkError = BulkOperationError ("patching failed" )
412415 bulkError .addBulkError (ArangoError (xd ), str (self ._bulkCache [i ]))
413416 else :
@@ -416,7 +419,7 @@ def _updateBatch(self):
416419 xd ['_key' ]
417420 i += 1
418421 self ._bulkCache = []
419- if bulkError != None :
422+ if bulkError is not None :
420423 raise bulkError
421424
422425
@@ -437,7 +440,7 @@ def _removeBatch(self):
437440 raise UpdateError ("Mixed bulk operations not supported - have " + self ._bulkMode )
438441 payload = []
439442 for d in self ._bulkCache :
440- if type ( d ) is dict :
443+ if isinstance ( d , dict ) :
441444 payload .append ('"%s"' % d ['_key' ])
442445 else :
443446 try :
@@ -454,14 +457,14 @@ def _removeBatch(self):
454457 bulkError = None
455458 for xd in data :
456459 if not '_key' in xd and 'error' in xd and 'errorNum' in xd :
457- if bulkError == None :
460+ if bulkError is None :
458461 bulkError = BulkOperationError ("deleting failed" )
459462 bulkError .addBulkError (ArangoError (xd ), self ._bulkCache [i ])
460463 else :
461464 self ._bulkCache [i ].reset (self )
462465 i += 1
463466 self ._bulkCache = []
464- if bulkError != None :
467+ if bulkError is not None :
465468 raise bulkError
466469
467470 def _deleteBatch (self , document , params ):
@@ -674,8 +677,7 @@ def fetchDocument(self, key, rawResults = False, rev = None):
674677 return self .documentClass (self , r .json (), on_load_validation = self ._validation ["on_load" ])
675678 elif r .status_code == 404 :
676679 raise DocumentNotFoundError ("Unable to find document with _key: %s" % key , r .json ())
677- else :
678- raise DocumentNotFoundError ("Unable to find document with _key: %s, response: %s" % (key , r .json ()), r .json ())
680+ raise DocumentNotFoundError ("Unable to find document with _key: %s, response: %s" % (key , r .json ()), r .json ())
679681
680682 def fetchByExample (self , exampleDict , batchSize , rawResults = False , ** queryArgs ):
681683 """exampleDict should be something like {'age' : 28}"""
@@ -689,7 +691,7 @@ def fetchFirstExample(self, exampleDict, rawResults = False):
689691 def fetchAll (self , rawResults = False , ** queryArgs ):
690692 """Returns all the documents in the collection. You can use the optinal arguments 'skip' and 'limit'::
691693
692- fetchAlll (limit = 3, shik = 10)"""
694+ fetchAll (limit = 3, skip = 10)"""
693695 return self .simpleQuery ('all' , rawResults = rawResults , ** queryArgs )
694696
695697 def simpleQuery (self , queryType , rawResults = False , ** queryArgs ):
@@ -711,7 +713,7 @@ def bulkSave(self, docs, onDuplicate="error", **params):
711713
712714 payload = []
713715 for d in docs :
714- if type ( d ) is dict :
716+ if isinstance ( d , dict ) :
715717 payload .append (json .dumps (d , default = str ))
716718 else :
717719 try :
@@ -730,11 +732,10 @@ def bulkSave(self, docs, onDuplicate="error", **params):
730732 data = r .json ()
731733 if (r .status_code == 201 ) and "error" not in data :
732734 return True
733- else :
734- if "errors" in data and data ["errors" ] > 0 :
735- raise UpdateError ("%d documents could not be created" % data ["errors" ], data )
736- elif data ["error" ]:
737- raise UpdateError ("Documents could not be created" , data )
735+ if "errors" in data and data ["errors" ] > 0 :
736+ raise UpdateError ("%d documents could not be created" % data ["errors" ], data )
737+ elif data ["error" ]:
738+ raise UpdateError ("Documents could not be created" , data )
738739
739740 return data ["updated" ] + data ["created" ]
740741
@@ -749,10 +750,8 @@ def bulkImport_json(self, filename, onDuplicate="error", formatType="auto", **pa
749750 data = f .read ()
750751 r = self .connection .session .post (url , params = params , data = data )
751752
752- try :
753- errorMessage = "At least: %d errors. The first one is: '%s'\n \n more in <this_exception>.data" % (len (data ), data [0 ]["errorMessage" ])
754- except KeyError :
755- raise UpdateError (data ['errorMessage' ], data )
753+ if r .status_code != 201 :
754+ raise UpdateError ('Unable to bulk import JSON' , r )
756755
757756 def bulkImport_values (self , filename , onDuplicate = "error" , ** params ):
758757 """bulk import from a file repecting arango's json format"""
@@ -764,10 +763,8 @@ def bulkImport_values(self, filename, onDuplicate="error", **params):
764763 data = f .read ()
765764 r = self .connection .session .post (url , params = params , data = data )
766765
767- try :
768- errorMessage = "At least: %d errors. The first one is: '%s'\n \n more in <this_exception>.data" % (len (data ), data [0 ]["errorMessage" ])
769- except KeyError :
770- raise UpdateError (data ['errorMessage' ], data )
766+ if r .status_code != 201 :
767+ raise UpdateError ('Unable to bulk import values' , r )
771768
772769 def truncate (self ):
773770 """deletes every document in the collection"""
@@ -811,8 +808,7 @@ def getType(self):
811808 return "document"
812809 elif self .type == CONST .COLLECTION_EDGE_TYPE :
813810 return "edge"
814- else :
815- raise ValueError ("The collection is of Unknown type %s" % self .type )
811+ raise ValueError ("The collection is of Unknown type %s" % self .type )
816812
817813 def getStatus (self ):
818814 """returns a word describing the status of the collection (loaded, loading, deleted, unloaded, newborn) instead of a number, if you prefer the number it's in self.status"""
@@ -826,8 +822,7 @@ def getStatus(self):
826822 return "unloaded"
827823 elif self .status == CONST .COLLECTION_NEWBORN_STATUS :
828824 return "newborn"
829- else :
830- raise ValueError ("The collection has an Unknown status %s" % self .status )
825+ raise ValueError ("The collection has an Unknown status %s" % self .status )
831826
832827 def __len__ (self ):
833828 """returns the number of documents in the collection"""
@@ -881,8 +876,7 @@ def validateField(cls, fieldName, value):
881876 except SchemaViolation as e :
882877 if fieldName == "_from" or fieldName == "_to" :
883878 return True
884- else :
885- raise e
879+ raise e
886880 return valValue
887881
888882 def createEdge (self , initValues = None ):
@@ -902,7 +896,7 @@ def getEdges(self, vertex, inEdges = True, outEdges = True, rawResults = False):
902896 If rawResults a arango results will be return as fetched, if false, will return a liste of Edge objects"""
903897 if isinstance (vertex , Document ):
904898 vId = vertex ._id
905- elif ( type ( vertex ) is str ) or ( type ( vertex ) is bytes ):
899+ elif isinstance ( vertex , str ) or isinstance ( vertex , bytes ):
906900 vId = vertex
907901 else :
908902 raise ValueError ("Vertex is neither a Document nor a String" )
@@ -925,8 +919,7 @@ def getEdges(self, vertex, inEdges = True, outEdges = True, rawResults = False):
925919 for e in data ["edges" ]:
926920 ret .append (Edge (self , e ))
927921 return ret
928- else :
929- return data ["edges" ]
922+ return data ["edges" ]
930923 else :
931924 raise CreationError ("Unable to return edges for vertex: %s" % vId , data )
932925
0 commit comments