Skip to content

Commit

Permalink
Merge branch 'release/2.32.0'
Browse files Browse the repository at this point in the history
  • Loading branch information
mayofaulkner committed Mar 4, 2024
2 parents 6912198 + 7ae6123 commit 6ad2f7d
Show file tree
Hide file tree
Showing 5 changed files with 26 additions and 7 deletions.
2 changes: 1 addition & 1 deletion ibllib/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import logging
import warnings

__version__ = '2.31.0'
__version__ = '2.32.0'
warnings.filterwarnings('always', category=DeprecationWarning, module='ibllib')

# if this becomes a full-blown library we should let the logging configuration to the discretion of the dev
Expand Down
8 changes: 6 additions & 2 deletions ibllib/oneibl/patcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ def __init__(self, one=None):
assert one
self.one = one

def _patch_dataset(self, path, dset_id=None, dry=False, ftp=False):
def _patch_dataset(self, path, dset_id=None, revision=None, dry=False, ftp=False):
"""
This private methods gets the dataset information from alyx, computes the local
and remote paths and initiates the file copy
Expand All @@ -113,6 +113,10 @@ def _patch_dataset(self, path, dset_id=None, dry=False, ftp=False):
dset_id = None
assert dset_id
assert is_uuid_string(dset_id)
# If the revision is not None then we need to add the revision into the path. Note the moving of the file
# is handled by one registration client
if revision is not None:
path = path.parent.joinpath(f'#{revision}#', path.name)
assert path.exists()
dset = self.one.alyx.rest('datasets', 'read', id=dset_id)
fr = next(fr for fr in dset['file_records'] if 'flatiron' in fr['data_repository'])
Expand Down Expand Up @@ -185,7 +189,7 @@ def patch_dataset(self, file_list, dry=False, ftp=False, **kwargs):
return
# from the dataset info, set flatIron flag to exists=True
for p, d in zip(file_list, response):
self._patch_dataset(p, dset_id=d['id'], dry=dry, ftp=ftp)
self._patch_dataset(p, dset_id=d['id'], revision=d['revision'], dry=dry, ftp=ftp)
return response

def patch_datasets(self, file_list, **kwargs):
Expand Down
4 changes: 3 additions & 1 deletion ibllib/qc/alignment_qc.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import numpy as np
from pathlib import Path
from datetime import date

from neuropixel import trace_header
import spikeglx
Expand Down Expand Up @@ -166,7 +167,7 @@ def run(self, update=True, upload_alyx=True, upload_flatiron=True):

return results

def resolve_manual(self, align_key, update=True, upload_alyx=True, upload_flatiron=True,
def resolve_manual(self, align_key, update=True, upload_alyx=True, upload_flatiron=False,
force=False):
"""
Method to manually resolve the alignment of a probe insertion with a given alignment
Expand All @@ -193,6 +194,7 @@ def resolve_manual(self, align_key, update=True, upload_alyx=True, upload_flatir
results['alignment_resolved'] = True
results['alignment_stored'] = align_key
results['alignment_resolved_by'] = 'experimenter'
results['alignment_resolved_date'] = date.today().isoformat()

if update:
self.update_extended_qc(results)
Expand Down
11 changes: 8 additions & 3 deletions ibllib/tests/qc/test_alignment_qc.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import copy
import random
import string
from datetime import date

from one.api import ONE
from neuropixel import trace_header
Expand Down Expand Up @@ -322,7 +323,8 @@ def _02_manual_resolution_latest(self):
alignment_stored='2020-09-28T15:57:25_mayo',
alignment_count=3,
trajectory_created=False,
alignment_qc=0.604081)
alignment_qc=0.604081,
alignment_date=date.today().isoformat())

def _03_manual_resolution_not_latest(self):
align_qc = AlignmentQC(self.probe_id, one=one, brain_atlas=brain_atlas, channels=False)
Expand All @@ -338,7 +340,8 @@ def _03_manual_resolution_not_latest(self):
alignment_stored='2020-09-28T10:03:06_alejandro',
alignment_count=3,
trajectory_created=True,
alignment_qc=0.604081)
alignment_qc=0.604081,
alignment_date=date.today().isoformat())

@classmethod
def tearDownClass(cls) -> None:
Expand All @@ -347,7 +350,7 @@ def tearDownClass(cls) -> None:


def _verify(tc, alignment_resolved=None, alignment_count=None,
alignment_stored=None, trajectory_created=False, alignment_qc=None):
alignment_stored=None, trajectory_created=False, alignment_qc=None, alignment_date=None):
"""
For a given test case with a `probe_id` attribute, check that Alyx returns insertion records
that match the provided parameters.
Expand Down Expand Up @@ -382,6 +385,8 @@ def _verify(tc, alignment_resolved=None, alignment_count=None,
f'&probe_id={tc.probe_id}'
'&provenance=Ephys aligned histology track', clobber=True)
tc.assertNotEqual(tc.prev_traj_id == traj[0]['id'], trajectory_created)
if alignment_date:
tc.assertEqual(insertion['json']['extended_qc']['alignment_resolved_date'], alignment_date)


class TestUploadToFlatIron(unittest.TestCase):
Expand Down
8 changes: 8 additions & 0 deletions release_notes.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,11 @@
## Release Notes 2.32

## features
- SDSC patcher automatically support revisons

## others
- Add extra key to alignment qc with manual resolution for channel upload
-
## Release Notes 2.31

### features
Expand Down

0 comments on commit 6ad2f7d

Please sign in to comment.