Skip to content

Commit a1efc8d

Browse files
authored
Merge pull request #155 from lehtiolab/154-small-things-collected
Accumulated fixes for smaller problems
2 parents 3784300 + d305ca0 commit a1efc8d

File tree

24 files changed

+437
-255
lines changed

24 files changed

+437
-255
lines changed

src/backend/analysis/tests.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -857,11 +857,16 @@ def test_new_analysis_and_run_and_purge(self):
857857
self.assertFalse(os.path.exists(webfn))
858858
self.assertTrue(os.path.exists(anafn))
859859
self.assertTrue(os.path.exists(nfrunfn))
860+
rmdirjobs = jm.Job.objects.filter(funcname='delete_empty_directory')
861+
self.assertEqual(rmdirjobs.filter(state=jj.Jobstates.PENDING).count(), 2)
860862
# Two purge jobs and two delete dir jobs, this test is getting slow
861863
self.run_job() # purge files
862864
self.run_job() # rm dir
863865
self.run_job() # purge
864866
self.run_job() # rm dir
867+
# Second rmdir job is executed but status is not resolved
868+
self.assertEqual(rmdirjobs.filter(state=jj.Jobstates.DONE).count(), 1)
869+
self.assertEqual(rmdirjobs.filter(state=jj.Jobstates.PROCESSING).count(), 1)
865870
self.assertFalse(os.path.exists(anafn))
866871
self.assertFalse(os.path.exists(nfrunfn))
867872

src/backend/analysis/views.py

Lines changed: 11 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1374,9 +1374,9 @@ def do_analysis_deletion(analysis):
13741374
# FIXME analysisdeleted tracking no longer needed when we backup etc
13751375
# implement a log instead
13761376
am.AnalysisDeleted.objects.update_or_create(analysis=analysis)
1377-
if jobq := jm.Job.objects.filter(nextflowsearch__analysis=analysis).exclude(state__in=[
1378-
jj.Jobstates.ERROR, jj.Jobstates.DONE, jj.Jobstates.REVOKING, jj.Jobstates.CANCELED]):
1379-
jv.cancel_or_revoke_job(jobq)
1377+
jm.Job.objects.filter(nextflowsearch__analysis=analysis).exclude(state__in=[
1378+
jj.Jobstates.DONE, jj.Jobstates.REVOKING, jj.Jobstates.CANCELED]).update(
1379+
state=jj.Jobstates.REVOKING)
13801380
if analysis.success_completed:
13811381
# Back up files if that for some reason (old analysis) hasnt happened earlier
13821382
backup_jobs = []
@@ -1431,13 +1431,17 @@ def do_analysis_deletion(analysis):
14311431
return f'error trying to queue delete files job: {joberror}'
14321432
rmdirkw = {'sfloc_ids': sfloc_ids, 'path': path, 'share_id': shareid}
14331433
rmdirjobs.append(rmdirkw)
1434-
if joberror := check_job_error('delete_empty_directory', sfloc_ids=sfloc_ids):
1434+
if joberror := check_job_error('delete_empty_directory', **rmdirkw):
14351435
return f'error trying to queue delete files job: {joberror}'
14361436
for sfloc_ids in purgejobs:
14371437
create_job_without_check('purge_files', sfloc_ids=sfloc_ids)
14381438
rm.StoredFileLoc.objects.filter(pk__in=sfloc_ids).update(active=False)
1439+
jwrap = jobmap['delete_empty_directory'](False)
14391440
for rmdirkw in rmdirjobs:
1440-
create_job_without_check('delete_empty_directory', **rmdirkw)
1441+
# Check first if not files exist in dir, e.g. in case of multiple
1442+
# analyses. Skip delete dir in that case
1443+
if not jwrap.files_in_dir_existing(**rmdirkw):
1444+
create_job_without_check('delete_empty_directory', **rmdirkw)
14411445
sfiles.update(deleted=True)
14421446

14431447
# No more errors, mark for deletion
@@ -1483,8 +1487,7 @@ def stop_analysis(request):
14831487
if not request.user.is_superuser and not anaq.exists():
14841488
return JsonResponse({'error': 'Analysis does not exist or you dont have permission'}, status=403)
14851489
anaq.update(editable=True)
1486-
jobq = jm.Job.objects.filter(nextflowsearch__analysis_id=req['item_id'])
1487-
jv.cancel_or_revoke_job(jobq)
1490+
jm.Job.objects.filter(nextflowsearch__analysis_id=req['item_id']).update(state=jj.Jobstates.REVOKING)
14881491
return JsonResponse({})
14891492

14901493

@@ -1606,7 +1609,7 @@ def upload_servable_file(request):
16061609
@login_required
16071610
def find_datasets(request):
16081611
searchterms = [x for x in request.GET['q'].split() if x != '']
1609-
dbdsets = dm.Dataset.query_creator(searchterms).filter(deleted=False)
1612+
dbdsets = dm.Dataset.objects.filter(dm.Dataset.query_creator(searchterms), deleted=False)
16101613
dsets = {}
16111614
qpid = dm.Datatype.get_quantprot_id()
16121615
for d in dbdsets.select_related('runname__experiment__project', 'datatype',

src/backend/datasets/jobs.py

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -29,12 +29,18 @@ def process(self, **kwargs):
2929
raise RuntimeError('Dataset source files are spread over more than one location, please '
3030
'contact admin to make sure files are consolicated before renaming path')
3131

32-
srcloc = srcloc.get()
33-
fss = FileserverShare.objects.filter(share=srcloc['servershare__pk'], server__active=True
34-
).values('server__name', 'path').first()
35-
self.queue = self.get_server_based_queue(fss['server__name'], settings.QUEUE_FASTSTORAGE)
36-
self.run_tasks = [(fss['path'], srcloc['path'], kwargs['newpath'],
37-
[x['pk'] for x in srcsfs.values('pk')], kwargs['dss_id'])]
32+
elif srcloc.count():
33+
srcloc = srcloc.get()
34+
fss = FileserverShare.objects.filter(share=srcloc['servershare__pk'], server__active=True
35+
).values('server__name', 'path').first()
36+
self.queue = self.get_server_based_queue(fss['server__name'], settings.QUEUE_FASTSTORAGE)
37+
self.run_tasks = [(fss['path'], srcloc['path'], kwargs['newpath'],
38+
[x['pk'] for x in srcsfs.values('pk')], kwargs['dss_id'])]
39+
else:
40+
# There are no sfloc files in this job, e.g. empty dataset, so we will not rename
41+
# on disk
42+
dss = DatasetServer.objects.filter(pk=kwargs['dss_id'])
43+
dss.update(storage_loc=kwargs['newpath'])
3844

3945

4046
class RsyncDatasetServershare(DatasetJob):

src/backend/datasets/models.py

Lines changed: 30 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,8 @@
66

77

88
class PrincipalInvestigator(models.Model):
9+
# Not for actual accounting (that would be external reference on the project table)
10+
# more for staff to know who to contact for an active project
911
name = models.TextField(max_length=100)
1012

1113
def __str__(self):
@@ -117,6 +119,34 @@ class Dataset(models.Model):
117119
purged = models.BooleanField(default=False) # for UI only, indicate permanent deleted from cold storage too
118120
locked = models.BooleanField(default=False)
119121

122+
@staticmethod
123+
def query_creator(searchterms):
124+
query = Q(runname__name__icontains=searchterms[0])
125+
query |= Q(runname__experiment__name__icontains=searchterms[0])
126+
query |= Q(runname__experiment__project__name__icontains=searchterms[0])
127+
query |= Q(datatype__name__icontains=searchterms[0])
128+
try:
129+
float(searchterms[0])
130+
except ValueError:
131+
pass
132+
else:
133+
query |= Q(prefractionationdataset__hiriefdataset__hirief__start=searchterms[0])
134+
query |= Q(prefractionationdataset__hiriefdataset__hirief__end=searchterms[0])
135+
for term in searchterms[1:]:
136+
subquery = Q(runname__name__icontains=term)
137+
subquery |= Q(runname__experiment__name__icontains=term)
138+
subquery |= Q(runname__experiment__project__name__icontains=term)
139+
subquery |= Q(datatype__name__icontains=term)
140+
try:
141+
float(term)
142+
except ValueError:
143+
pass
144+
else:
145+
subquery |= Q(prefractionationdataset__hiriefdataset__hirief__start=term)
146+
subquery |= Q(prefractionationdataset__hiriefdataset__hirief__end=term)
147+
query &= subquery
148+
return query
149+
120150

121151
class DatasetServer(models.Model):
122152
'''
@@ -154,35 +184,6 @@ class Meta:
154184
models.UniqueConstraint(fields=['dataset', 'storageshare'], name='uni_dsshare'),
155185
]
156186

157-
@classmethod
158-
def query_creator(cls, searchterms):
159-
query = Q(runname__name__icontains=searchterms[0])
160-
query |= Q(runname__experiment__name__icontains=searchterms[0])
161-
query |= Q(runname__experiment__project__name__icontains=searchterms[0])
162-
query |= Q(datatype__name__icontains=searchterms[0])
163-
try:
164-
float(searchterms[0])
165-
except ValueError:
166-
pass
167-
else:
168-
query |= Q(prefractionationdataset__hiriefdataset__hirief__start=searchterms[0])
169-
query |= Q(prefractionationdataset__hiriefdataset__hirief__end=searchterms[0])
170-
for term in searchterms[1:]:
171-
subquery = Q(runname__name__icontains=term)
172-
subquery |= Q(runname__experiment__name__icontains=term)
173-
subquery |= Q(runname__experiment__project__name__icontains=term)
174-
subquery |= Q(datatype__name__icontains=term)
175-
try:
176-
float(term)
177-
except ValueError:
178-
pass
179-
else:
180-
subquery |= Q(prefractionationdataset__hiriefdataset__hirief__start=term)
181-
subquery |= Q(prefractionationdataset__hiriefdataset__hirief__end=term)
182-
query &= subquery
183-
return cls.objects.filter(query)
184-
185-
186187

187188
class DatasetOwner(models.Model):
188189
dataset = models.ForeignKey(Dataset, on_delete=models.CASCADE)

0 commit comments

Comments
 (0)