@@ -418,119 +418,3 @@ def __str__(self):
418418 _s = "[TensorDB] Info:\n "
419419 _t = _s + " " + str (self .db )
420420 return _t
421-
422- # def save_bulk_data(self, data=None, filename='filename'):
423- # """ Put bulk data into TensorDB.datafs, return file ID.
424- # When you have a very large data, you may like to save it into GridFS Buckets
425- # instead of Collections, then when you want to load it, XXXX
426- #
427- # Parameters
428- # -----------
429- # data : serialized data.
430- # filename : string, GridFS Buckets.
431- #
432- # References
433- # -----------
434- # - MongoDB find, xxxxx
435- # """
436- # s = time.time()
437- # f_id = self.datafs.put(data, filename=filename)
438- # print("[TensorDB] save_bulk_data: {} took: {}s".format(filename, round(time.time()-s, 2)))
439- # return f_id
440- #
441- # def save_collection(self, data=None, collect_name='collect_name'):
442- # """ Insert data into MongoDB Collections, return xx.
443- #
444- # Parameters
445- # -----------
446- # data : serialized data.
447- # collect_name : string, MongoDB collection name.
448- #
449- # References
450- # -----------
451- # - MongoDB find, xxxxx
452- # """
453- # s = time.time()
454- # rl = self.db[collect_name].insert_many(data)
455- # print("[TensorDB] save_collection: {} took: {}s".format(collect_name, round(time.time()-s, 2)))
456- # return rl
457- #
458- # def find(self, args={}, collect_name='collect_name'):
459- # """ Find data from MongoDB Collections.
460- #
461- # Parameters
462- # -----------
463- # args : dictionary, arguments for finding.
464- # collect_name : string, MongoDB collection name.
465- #
466- # References
467- # -----------
468- # - MongoDB find, xxxxx
469- # """
470- # s = time.time()
471- #
472- # pc = self.db[collect_name].find(args) # pymongo.cursor.Cursor object
473- # flist = pc.distinct('f_id')
474- # fldict = {}
475- # for f in flist: # you may have multiple Buckets files
476- # # fldict[f] = pickle.loads(self.datafs.get(f).read())
477- # # s2 = time.time()
478- # tmp = self.datafs.get(f).read()
479- # # print(time.time()-s2)
480- # fldict[f] = pickle.loads(tmp)
481- # # print(time.time()-s2)
482- # # exit()
483- # # print(round(time.time()-s, 2))
484- # data = [fldict[x['f_id']][x['id']] for x in pc]
485- # data = np.asarray(data)
486- # print("[TensorDB] find: {} get: {} took: {}s".format(collect_name, pc.count(), round(time.time()-s, 2)))
487- # return data
488-
489-
490- class DBLogger :
491- """ """
492-
493- def __init__ (self , db , model ):
494- self .db = db
495- self .model = model
496-
497- def on_train_begin (self ):
498- print ("start" )
499-
500- def on_train_end (self ):
501- print ("end" )
502-
503- def on_epoch_begin (self , epoch ):
504- self .epoch = epoch
505- self .et = time .time ()
506- return
507-
508- def on_epoch_end (self , epoch , logs = {}):
509- self .et = time .time () - self .et
510- print ("ending" )
511- print (epoch )
512- logs ['epoch' ] = epoch
513- logs ['time' ] = datetime .utcnow ()
514- logs ['stepTime' ] = self .et
515- logs ['acc' ] = np .asscalar (logs ['acc' ])
516- print (logs )
517-
518- w = self .model .Params
519- fid = self .db .save_params (w , logs )
520- logs .update ({'params' : fid })
521- self .db .valid_log (logs )
522-
523- def on_batch_begin (self , batch , logs = {}):
524- self .t = time .time ()
525- self .losses = []
526- self .batch = batch
527-
528- def on_batch_end (self , logs = {}):
529- self .t2 = time .time () - self .t
530- logs ['acc' ] = np .asscalar (logs ['acc' ])
531- #logs['loss']=np.asscalar(logs['loss'])
532- logs ['step_time' ] = self .t2
533- logs ['time' ] = datetime .utcnow ()
534- logs ['epoch' ] = self .epoch
535- logs ['batch' ] = self .batch
536- self .db .train_log (logs )
0 commit comments