Skip to content

Commit

Permalink
Dev (#813)
Browse files Browse the repository at this point in the history
* add habituation procedure to init fixtures
* Tag filter in dataset admin page
* Exclude non-personal, non-globus endpoints from local file record delete
* mpciROIs.uuids dataset type fixture
---------

Co-authored-by: olivier <[email protected]>
Co-authored-by: github-actions <[email protected]>
  • Loading branch information
3 people authored Oct 16, 2023
1 parent 317dde7 commit c704a9b
Show file tree
Hide file tree
Showing 5 changed files with 68 additions and 48 deletions.
11 changes: 10 additions & 1 deletion alyx/actions/fixtures/actions.proceduretype.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
"fields": {
"name": "Water restriction",
"json": null,
"description": "- Variation in the composition, constituents, quantity and/or availability of the diet and/or drinking water which may cause pain, suffering, distress or lasting harm - withholding of water"
"description": "- Variation in the compositigit con, constituents, quantity and/or availability of the diet and/or drinking water which may cause pain, suffering, distress or lasting harm - withholding of water"
}
},
{
Expand Down Expand Up @@ -268,6 +268,15 @@
"json": null,
"description": "Covers the acquisition of histological sections, the subsequent registration of slices to a common reference atlas and the eventual recovery of electrophysiology tracks through manual picking."
}
},
{
"model": "actions.proceduretype",
"pk": "e55c9ca1-3208-4b81-936f-75c0cf6e9565",
"fields": {
"name": "handling_habituation",
"json": null,
"description": "Rig habituation protocol for a new task and/or a new subject"
}
},
{
"model": "actions.proceduretype",
Expand Down
1 change: 1 addition & 0 deletions alyx/data/admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@ class DatasetAdmin(BaseExperimentalDataAdmin):
list_filter = [('created_by', RelatedDropdownFilter),
('created_datetime', DateRangeFilter),
('dataset_type', RelatedDropdownFilter),
('tags', RelatedDropdownFilter)
]
search_fields = ('session__id', 'name', 'collection', 'dataset_type__name',
'dataset_type__filename_pattern', 'version')
Expand Down
11 changes: 11 additions & 0 deletions alyx/data/fixtures/data.datasettype.json
Original file line number Diff line number Diff line change
Expand Up @@ -2198,5 +2198,16 @@
"description": "Image meta-data listing the depths and dimensions of each field of view, acquisition parameters, channels, and the MLAP coordinates of the imaging window center.",
"filename_pattern": "_*_rawImagingData.meta*"
}
},
{
"model": "data.datasettype",
"pk": "289cb419-274f-4699-ac43-7612de07382e",
"fields": {
"json": null,
"name": "mpciROIs.uuids",
"created_by": null,
"description": "Unique identifier assigned to each ROI when ALF files created after ROI detection.",
"filename_pattern": ""
}
}
]
25 changes: 12 additions & 13 deletions alyx/data/transfers.py
Original file line number Diff line number Diff line change
Expand Up @@ -659,21 +659,22 @@ def globus_delete_local_datasets(datasets, dry=True, gc=None, label=None):
"""
For each dataset in the queryset delete the file records belonging to a Globus personal repo
only if a server file exists and matches the size.
:param datasets:
:param datasets: data.models.Dataset query set
:param label: label for the transfer
:param dry: default True
:return:
"""
# first get the list of Globus endpoints concerned
file_records = FileRecord.objects.filter(dataset__in=datasets)
file_records = FileRecord.objects.filter(
dataset__in=datasets, data_repository__globus_is_personal=True)
file_records = file_records.exclude(data_repository__globus_endpoint_id__isnull=True)
globus_endpoints = file_records.values_list('data_repository__globus_endpoint_id',
flat=True).distinct()
label = label or 'alyx globus client'
# create a globus delete_client for each globus endpoint
gtc = gc or globus_transfer_client()
delete_clients = []
for ge in globus_endpoints:
delete_clients.append(globus_sdk.DeleteData(gtc, ge, label=label))
# Map of Globus endpoint UUID -> DeleteData client
delete_clients = {ge: globus_sdk.DeleteData(gtc, ge, label=label) for ge in globus_endpoints}

def _ls_globus(file_record, add_uuid=False):
N_RETRIES = 3
Expand All @@ -694,7 +695,6 @@ def _ls_globus(file_record, add_uuid=False):
return [ls for ls in ls_obj['DATA'] if ls['name'] == path.name]
# appends each file for deletion
fr2delete = []
del_client = []
for ds in datasets:
# check the existence of the server file
fr_server = ds.file_records.filter(exists=True,
Expand Down Expand Up @@ -724,17 +724,16 @@ def _ls_globus(file_record, add_uuid=False):
# the files exist local and remote,
fr2delete.append(frloc.id)
file2del = _filename_from_file_record(frloc)
del_client = [dc for dc in delete_clients if dc['endpoint'] ==
str(frloc.data_repository.globus_endpoint_id)][0]
del_client = delete_clients[(gid := frloc.data_repository.globus_endpoint_id)]
assert del_client['endpoint'] == str(gid)
del_client.add_item(file2del)
logger.info('DELETE: ' + _filename_from_file_record(frloc))
# launch the deletion jobs and remove records from the database
if dry:
return del_client
for dc in delete_clients:
# submitting a deletion without data will create an error
if dc['DATA'] == []:
continue
return delete_clients
# NB: filter empty clients as submitting a deletion without data will raise an error
for dc in filter(lambda x: x['DATA'], delete_clients.values()):
logger.info('Submitting delete for %i file(s) on %s', len(dc['DATA']), dc['endpoint'])
gtc.submit_delete(dc)
# remove file records
frecs = FileRecord.objects.filter(id__in=fr2delete).exclude(
Expand Down
68 changes: 34 additions & 34 deletions requirements_frozen.txt
Original file line number Diff line number Diff line change
@@ -1,81 +1,81 @@
asgiref==3.7.2
backports.zoneinfo==0.2.1
boto3==1.28.37
botocore==1.31.37
boto3==1.28.63
botocore==1.31.63
certifi==2023.7.22
cffi==1.15.1
charset-normalizer==3.2.0
cffi==1.16.0
charset-normalizer==3.3.0
click==8.1.7
colorlog==6.7.0
contourpy==1.1.0
contourpy==1.1.1
coreapi==2.3.3
coreschema==0.0.4
coverage==6.5.0
coveralls==3.3.1
cryptography==41.0.3
cycler==0.11.0
Django==4.2.4
cryptography==41.0.4
cycler==0.12.1
Django==4.2.6
django-admin-list-filter-dropdown==1.0.3
django-admin-rangefilter==0.11.0
django-admin-rangefilter==0.11.2
django-autocomplete-light==3.9.7
django-cleanup==8.0.0
django-filter==21.1
django-ipware==5.0.0
django-ipware==5.0.1
django-js-asset==2.1.0
django-mptt==0.14.0
django-polymorphic==3.1.0
django-reversion==5.0.4
django-storages==1.13.2
django-structlog==5.3.0
django-reversion==5.0.6
django-storages==1.14.2
django-structlog==6.0.0
django-test-without-migrations==0.6
djangorestframework==3.14.0
docopt==0.6.2
docutils==0.20.1
drfdocs==0.0.11
flake8==6.1.0
fonttools==4.42.1
globus-cli==3.17.0
globus-sdk==3.27.0
iblutil==1.7.0
fonttools==4.43.1
globus-cli==3.18.0
globus-sdk==3.28.0
iblutil==1.7.1
idna==3.4
importlib-metadata==6.8.0
importlib-resources==6.0.1
importlib-resources==6.1.0
itypes==1.2.0
Jinja2==3.1.2
jmespath==1.0.1
kiwisolver==1.4.5
llvmlite==0.40.1
llvmlite==0.41.0
lxml==4.9.3
Markdown==3.4.4
Markdown==3.5
MarkupSafe==2.1.3
matplotlib==3.7.2
matplotlib==3.7.3
mccabe==0.7.0
numba==0.57.1
numba==0.58.0
numpy==1.24.4
ONE-api==2.2.2
packaging==23.1
ONE-api==2.3.0
packaging==23.2
pandas==2.0.3
Pillow==10.0.0
psycopg2-binary==2.9.7
Pillow==10.1.0
psycopg2-binary==2.9.9
pyarrow==13.0.0
pycodestyle==2.11.0
pycodestyle==2.11.1
pycparser==2.21
pyflakes==3.1.0
PyJWT==2.8.0
pyparsing==3.0.9
pyparsing==3.1.1
python-dateutil==2.8.2
python-magic==0.4.27
pytz==2023.3
pytz==2023.3.post1
PyYAML==6.0.1
requests==2.31.0
s3transfer==0.6.2
s3transfer==0.7.0
six==1.16.0
sqlparse==0.4.4
structlog==23.1.0
structlog==23.2.0
tqdm==4.66.1
typing_extensions==4.7.1
typing_extensions==4.8.0
tzdata==2023.3
uritemplate==4.1.1
urllib3==1.26.16
urllib3==1.26.17
webdavclient3==3.14.6
zipp==3.16.2
zipp==3.17.0

0 comments on commit c704a9b

Please sign in to comment.