1919# import eventlet
2020import os
2121import tempfile
22+ import uuid
2223
23- from flask import Blueprint , jsonify , request
24+ from werkzeug .datastructures import FileStorage
25+ from flask import Blueprint , jsonify , request , session
2426from flask import json , send_file
2527from flask_socketio import emit
2628from sqlalchemy import exists
29+ from sqlalchemy .orm import immediateload
2730from sqlalchemy .exc import IntegrityError , SQLAlchemyError
2831from sqlalchemy .orm .exc import NoResultFound
2932from werkzeug .utils import secure_filename
@@ -305,10 +308,25 @@ def catch_all(path):
305308 return res
306309
307310
308- #
309- # @socketio.on('connect')
310- # def test_message(message=None):
311- # emit('message', request.sid + ' Connected', broadcast=True)
311+ connect_count = 0
312+
313+
314+ @socketio .on ('connect' )
315+ def test_message (message = None ):
316+ global connect_count
317+ connect_count += 1
318+ if not session .get ('uid' ):
319+ session ['uid' ] = uuid .uuid4 ()
320+ else :
321+ print "Session Set: {}" .format (session ['uid' ])
322+ print "Total Connection Events: {}" .format (connect_count )
323+ # emit('message', request.sid + ' Connected', broadcast=True)
324+
325+
326+ @socketio .on ('disconnect' )
327+ def discoonect (message = None ):
328+ print "Disconnected..."
329+
312330#
313331#
314332# @socketio.on('client_test')
@@ -337,6 +355,9 @@ def get_or_post_genotyping_projects():
337355 elif request .method == 'POST' :
338356 project_params = json .loads (request .get_json ())
339357 try :
358+ project_params ['artifact_estimator_id' ] = project_params .get ('artifact_estimator_id' ) or None
359+ project_params ['quantification_bias_estimator_id' ] = project_params .get ('quantification_bias_estimator_id' ) or None
360+ project_params ['bin_estimator_id' ] = project_params .get ('bin_estimator_id' ) or None
340361 project = GenotypingProject (** project_params )
341362 db .session .add (project )
342363 db .session .flush ()
@@ -466,7 +487,7 @@ def get_genotyping_peak_data(id):
466487 sample_ids = dict (ProjectSampleAnnotations .query .distinct ().join (Sample ).filter (
467488 ProjectSampleAnnotations .project_id == id ).values (ProjectSampleAnnotations .id , Sample .barcode ))
468489 for la in locus_annotations :
469- if not la [5 ][ 'failure' ] :
490+ if not la [5 ]. get ( 'failure' ) :
470491 alleles = la [4 ].items ()
471492 bin_ids = [str (x [0 ]) for x in alleles if x [1 ]]
472493 for peak in la [0 ]:
@@ -597,33 +618,53 @@ def get_or_update_genotyping_project(id):
597618 return handle_error (e )
598619
599620
600- @microspat .route ('/genotyping-project/<int:id>/add-samples/' , methods = ['POST' ])
601- def genotyping_project_add_samples (id ):
602- gp = GenotypingProject .query .get (id )
603- assert isinstance (gp , GenotypingProject )
604- files = request .files .getlist ('files' )
605-
606- if not files :
607- return handle_error ("Nothing Uploaded" )
608-
621+ def get_sample_ids_from_csv (f ):
622+ """
623+ Takes barcodes listed in a csv file and returns an array of IDs of the corresponding samples
624+ :param f: csv file with header ['barcode']
625+ :return: sample_id[]
626+ """
627+ assert isinstance (f , FileStorage )
609628 sample_ids = set ()
610- for sample_file in files :
611- if sample_file .filename [- 4 :] != '.csv' :
612- return handle_error ("Uploaded file is not a csv." )
629+ if f .filename [- 4 :] != '.csv' :
630+ raise ValueError ("Uploaded file is not a csv." )
613631
614- r = CaseInsensitiveDictReader (sample_file )
632+ try :
633+ r = CaseInsensitiveDictReader (f )
615634
616635 if 'barcode' not in r .fieldnames :
617- return handle_error ("File header not valid" )
636+ raise ValueError ("File header not valid" )
618637
619638 for sample_entry in r :
620639 sample_id = Sample .query .filter (Sample .barcode == sample_entry ['barcode' ]).value (Sample .id )
621640 if not sample_id :
622- return handle_error ("{} Does not yet exist." .format (sample_entry ))
641+ raise ValueError ("{} Does not yet exist." .format (sample_entry ))
623642 sample_ids .add (sample_id )
643+ except csv .Error :
644+ raise ValueError ("File is not valid." )
645+
646+ return sample_ids
647+
648+
649+ @microspat .route ('/genotyping-project/<int:id>/add-samples/' , methods = ['POST' ])
650+ def genotyping_project_add_samples (id ):
651+ gp = GenotypingProject .query .get (id )
652+ assert isinstance (gp , GenotypingProject )
653+ files = request .files .getlist ('files' )
624654
625- gp .add_samples (list (sample_ids ))
626- return jsonify (wrap_data (gp .serialize_details ()))
655+ if not files :
656+ return handle_error ("Nothing Uploaded" )
657+ try :
658+ full_sample_ids = set ()
659+ for sample_file in files :
660+ sample_ids = get_sample_ids_from_csv (sample_file )
661+ for sample_id in sample_ids :
662+ full_sample_ids .add (sample_id )
663+ # full_sample_ids.add(get_sample_ids_from_csv(sample_file))
664+ gp .add_samples (list (full_sample_ids ))
665+ return jsonify (wrap_data (gp .serialize_details ()))
666+ except ValueError as e :
667+ return handle_error (e )
627668
628669
629670@microspat .route ('/quantification-bias-estimator-project/' , methods = ['GET' , 'POST' ])
@@ -693,7 +734,7 @@ def get_or_create_artifact_estimators():
693734 project = ArtifactEstimatorProject (** project_params )
694735 db .session .add (project )
695736 db .session .flush ()
696- project .initialize_project ()
737+ # project.initialize_project()
697738 return jsonify (wrap_data (project .serialize_details ()))
698739 except Exception as e :
699740 return handle_error (e )
@@ -702,7 +743,15 @@ def get_or_create_artifact_estimators():
702743@microspat .route ('/artifact-estimator-project/<int:id>/' , methods = ['GET' , 'PUT' , 'DELETE' ])
703744def get_or_update_artifact_estimator (id ):
704745 if request .method == 'GET' :
705- return table_get_details (ArtifactEstimatorProject , id )
746+ q = ArtifactEstimatorProject .query .filter (ArtifactEstimatorProject .id == id )
747+ q .options (subqueryload (ArtifactEstimatorProject .locus_parameters ),
748+ subqueryload (ArtifactEstimatorProject .locus_artifact_estimators )
749+ .subqueryload (LocusArtifactEstimator .artifact_estimators )
750+ .subqueryload (ArtifactEstimator .artifact_equations )).order_by (ArtifactEstimatorProject .id )
751+ ae = q .first ()
752+ res = wrap_data (ae .serialize_details ())
753+ # res = table_get_details(ArtifactEstimatorProject, id)
754+ return jsonify (res )
706755 elif request .method == 'PUT' :
707756 project_update_dict = json .loads (request .get_json ())
708757 project = ArtifactEstimatorProject .query .get (id )
@@ -731,6 +780,35 @@ def get_or_update_artifact_estimator(id):
731780 return handle_error (e )
732781
733782
783+ @microspat .route ('/artifact-estimator-project/<int:id>/set-samples/' , methods = ['POST' ])
784+ def set_artifact_estimator_samples (id ):
785+ ae = ArtifactEstimatorProject .query .get (id )
786+ assert isinstance (ae , ArtifactEstimatorProject )
787+ files = request .files .getlist ('files' )
788+
789+ if not files :
790+ return handle_error ("Nothing Uploaded" )
791+ try :
792+ full_sample_ids = set ()
793+ for sample_file in files :
794+ full_sample_ids .add (get_sample_ids_from_csv (sample_file ))
795+ ae .set_samples (full_sample_ids )
796+ return get_artifact_estimator_samples (id )
797+ except ValueError as e :
798+ return handle_error (e )
799+
800+
801+ @microspat .route ('/artifact-estimator-project/<int:id>/get-samples/' , methods = ['GET' ])
802+ def get_artifact_estimator_samples (id ):
803+ samples = Sample .query .join (Channel ).join (ProjectChannelAnnotations ).join (Project ).filter (
804+ Project .id == id ).distinct (Sample .id ).values (Sample .id , Sample .barcode , Sample .last_updated )
805+ samples = [{'id' : _ [0 ],
806+ 'barcode' : _ [1 ],
807+ 'last_updated' : _ [2 ]
808+ } for _ in samples ]
809+ return jsonify (wrap_data (samples ))
810+
811+
734812@microspat .route ('/artifact-estimator/<int:id>/' , methods = ['DELETE' ])
735813def delete_artifact_estimator (id ):
736814 try :
@@ -741,7 +819,6 @@ def delete_artifact_estimator(id):
741819 ArtifactEstimator .label == ArtifactEstimator .GLOBAL_ESTIMATOR ).filter (
742820 ArtifactEstimator .locus_artifact_estimator_id == estimator .locus_artifact_estimator_id ).first ()
743821 if global_estimator :
744- app .logger .debug ("Adding Peaks to Global Estimator" )
745822 assert isinstance (global_estimator , ArtifactEstimator )
746823 peak_data = estimator .peak_data
747824 global_estimator .peak_data += peak_data
@@ -804,7 +881,7 @@ def get_or_create_bin_estimators():
804881 project = BinEstimatorProject (** project_params )
805882 db .session .add (project )
806883 db .session .flush ()
807- project .initialize_project ()
884+ # project.initialize_project()
808885 return jsonify (wrap_data (project .serialize_details ()))
809886 except Exception as e :
810887 return handle_error (e )
@@ -813,7 +890,12 @@ def get_or_create_bin_estimators():
813890@microspat .route ('/bin-estimator/<int:id>/' , methods = ['GET' , 'PUT' , 'DELETE' ])
814891def get_or_update_bin_estimator (id ):
815892 if request .method == 'GET' :
816- return table_get_details (BinEstimatorProject , id )
893+ q = BinEstimatorProject .query .filter (BinEstimatorProject .id == id )
894+ q .options (subqueryload (BinEstimatorProject .locus_parameters ),
895+ subqueryload (BinEstimatorProject .locus_bin_sets ))
896+ be = q .first ()
897+ res = wrap_data (be .serialize_details ())
898+ return jsonify (res )
817899 elif request .method == 'PUT' :
818900 project_update_dict = json .loads (request .get_json ())
819901 project = BinEstimatorProject .query .get (id )
@@ -845,6 +927,24 @@ def get_or_update_bin_estimator(id):
845927 return handle_error (e )
846928
847929
930+ @microspat .route ('/bin-estimator/<int:id>/set-samples/' , methods = ['POST' ])
931+ def set_bin_estimator_samples (id ):
932+ be = BinEstimatorProject .query .get (id )
933+ assert isinstance (be , BinEstimatorProject )
934+ files = request .files .getlist ('files' )
935+
936+ if not files :
937+ return handle_error ("Nothing Uploaded" )
938+ try :
939+ full_sample_ids = set ()
940+ for sample_file in files :
941+ full_sample_ids .add (get_sample_ids_from_csv (sample_file ))
942+ be .set_samples (full_sample_ids )
943+ return jsonify (wrap_data (be .serialize_details ()))
944+ except ValueError as e :
945+ return handle_error (e )
946+
947+
848948@microspat .route ('/bin-estimator/<int:id>/locus/<int:locus_id>/bins/' , methods = ['PUT' , 'POST' ])
849949def create_or_update_bins (id , locus_id ):
850950 bins = map (json .loads , request .json )
@@ -943,17 +1043,23 @@ def get_or_update_locus_parameters(id):
9431043 locus_params_update_dict = json .loads (request .get_json ())
9441044 locus_params = ProjectLocusParams .query .get (id )
9451045 assert isinstance (locus_params , ProjectLocusParams )
1046+ print "Getting Project..."
9461047 project = Project .query .get (locus_params .project_id )
1048+ print "Got Project {}" .format (project .title )
9471049 if locus_params :
9481050 try :
9491051 updater = update_fns .get (locus_params .discriminator , update_locus_params )
9501052 try :
1053+ print "Updating Locus Params..."
9511054 locus_params = updater (locus_params , locus_params_update_dict )
1055+ print "Updated Locus Params for {}" .format (locus_params .locus .label )
9521056 except StaleParametersError as e :
9531057 return handle_error ("{} is stale at locus {}, analyze that first!" .format (e .project , e .locus ))
9541058 db .session .flush ()
9551059 send_notification ('info' , 'Beginning Analysis: {}' .format (locus_params .locus .label ))
1060+ print "Analyzing Locus {}..." .format (locus_params .locus .label )
9561061 project .analyze_locus (locus_params .locus_id )
1062+ print "Done Analyzing Locus"
9571063 send_notification ('success' , 'Completed Analysis: {}' .format (locus_params .locus .label ))
9581064 return jsonify (wrap_data (locus_params .serialize ()))
9591065 except SQLAlchemyError as e :
0 commit comments