Skip to content

Commit

Permalink
Merge pull request #97 from int-brain-lab/develop
Browse files Browse the repository at this point in the history
fix docstring
  • Loading branch information
mayofaulkner authored Aug 30, 2023
2 parents e3e5a90 + 044c6d6 commit 48a91eb
Show file tree
Hide file tree
Showing 4 changed files with 22 additions and 19 deletions.
5 changes: 3 additions & 2 deletions atlaselectrophysiology/ephys_atlas_gui.py
Original file line number Diff line number Diff line change
Expand Up @@ -1057,7 +1057,7 @@ def plot_probe(self, data, bounds=None):
{'img': image data for each channel bank, list of np.array((1,ny)), list
'scale': scaling to apply to each image, list of np.array([xscale,yscale]), list
'offset': offset to apply to each image, list of np.array([xoffset,yoffset]), list
'level': colourbar extremes np.array([min val, max val]), float
'levels': colourbar extremes np.array([min val, max val]), float
'cmap': colourmap to use, string
'xrange': range to display of x axis, np.array([min range, max range]), float
'title': description to place on colorbar, string
Expand Down Expand Up @@ -1111,7 +1111,8 @@ def plot_image(self, data):
param data: dictionary of data to plot
{'img': image data, np.array((nx,ny)), float
'scale': scaling to apply to each axis, np.array([xscale,yscale]), float
'level': colourbar extremes np.array([min val, max val]), float
'levels': colourbar extremes np.array([min val, max val]), float
'offset': offset to apply to each image, np.array([xoffset,yoffset]), float
'cmap': colourmap to use, string
'xrange': range to display of x axis, np.array([min range, max range]), float
'xaxis': label for xaxis, string
Expand Down
31 changes: 15 additions & 16 deletions ephysfeatures/features_across_region.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,7 @@
'psd_gamma': {'plot_type': 'probe', 'cmap': 'viridis'},
'rms_ap': {'plot_type': 'probe', 'cmap': 'plasma'},
'rms_lf': {'plot_type': 'probe', 'cmap': 'inferno'},
'spike_rate': {'plot_type': 'probe', 'cmap': 'hot'},
'amps': {'plot_type': 'line', 'xlabel': 'Amplitude'},
'spike_rate_line': {'plot_type': 'line', 'xlabel': 'Firing Rate'}
'spike_count': {'plot_type': 'probe', 'cmap': 'hot'},
}


Expand All @@ -54,11 +52,11 @@ def __init__(self, one, region_ids=None, ba=None, download=True, size=(1600, 800
self.ba = ba or AllenAtlas()
br = self.ba.regions

table_path = self.one.cache_dir.joinpath('bwm_features') #TODO NOTE THIS FOLDER DOES NOT EXIST ON S3
table_path = self.one.cache_dir.joinpath('bwm_features')
if download:
s3, bucket_name = aws.get_s3_from_alyx(alyx=self.one.alyx)
# Download file
base_path = Path("aggregates/atlas") # TODO THIS FOLDER DOES NOT CONTAIN PROBES
base_path = Path("aggregates/atlas/latest") # TODO THIS FOLDER DOES NOT CONTAIN PROBES
file_list = ['channels.pqt', 'probes.pqt', 'raw_ephys_features.pqt']
for file_name in file_list:
aws.s3_download_file(base_path.joinpath(file_name), table_path.joinpath(file_name),
Expand All @@ -67,23 +65,20 @@ def __init__(self, one, region_ids=None, ba=None, download=True, size=(1600, 800
channels = pd.read_parquet(table_path.joinpath('channels.pqt'))
probes = pd.read_parquet(table_path.joinpath('probes.pqt'))
features = pd.read_parquet(table_path.joinpath('raw_ephys_features.pqt'))
channels = channels.drop(columns='histology')

df_voltage = pd.merge(features, channels, left_index=True, right_index=True)
df_voltage = df_voltage.reset_index()
data = pd.merge(df_voltage, probes, left_on='pid', right_index=True)
data['rms_ap'] *= 1e6
data['rms_lf'] *= 1e6

depths = pd.read_parquet(table_path.joinpath('depths.pqt'))
depths = depths.reset_index()
depths = depths.rename(columns={'spike_rate': 'spike_rate_line'})

self.data = pd.merge(data, depths, left_on=['pid', 'axial_um'], right_on=['pid', 'depths'], how='outer')
self.data = data
self.data.loc[self.data['histology'] == 'alf', 'histology'] = 'resolved'

# Initialise region combobox
if region_ids is not None:
self.region_ids = region_ids
acro_h = br.id2acronym(self.region_ids)
else:
# self.region_ids = self.data.atlas_id.unique()

Expand All @@ -93,13 +88,14 @@ def __init__(self, one, region_ids=None, ba=None, download=True, size=(1600, 800
indata = np.isin(br.id, ids)
self.region_ids = br.id[indata]

del data, depths, channels, probes, features, df_voltage
acronyms = br.id2acronym(self.region_ids)
level = br.level[indata]

acronyms = br.id2acronym(self.region_ids)
level = br.level[indata]
# Show hierarchy
acro_h = [((' ' * level) + acro) for (level, acro) in zip(level, acronyms)]

# Show hierarchy
acro_h = [((' ' * level) + acro) for (level, acro) in zip(level, acronyms)]
# del data, depths, channels, probes, features, df_voltage
del data, channels, probes, features, df_voltage

# NOTE: this does not work well as is because of the hierarchy spaces at the beginning
# of the strings. You can't type "FRP" but need to type " FRP" which is impractical!
Expand Down Expand Up @@ -643,6 +639,9 @@ def get_region_data(self, pids):
}

d = df.iloc[0]
if d.eid is None:
d.eid, name = self.one.pid2eid(d.pid)

info = {'pid': pid,
'eid': d.eid,
'session': '/'.join(self.one.eid2path(d.eid).parts[-3:]),
Expand Down
3 changes: 3 additions & 0 deletions needles2/probe_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -355,6 +355,9 @@ def crawl_up_from_tip(ins, covered_length):
per0 = []

for p, traj in enumerate(trajs):
keys_display = ["id", "provenance", "probe_insertion"] # Keys to print, if exist
if len(set(keys_display).intersection(traj.keys())) == len(keys_display):
print(f'Trajectory: {traj["id"]}, provenance: {traj["provenance"]}, PID: {traj["probe_insertion"]}')
if len(trajs) > 20 and self.verbose is True:
if p % 20 == 0:
print(p / len(trajs))
Expand Down
2 changes: 1 addition & 1 deletion viewspikes/data.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from pathlib import Path
import shutil

from ibllib.io import spikeglx
from brainbox.io import spikeglx
from one.webclient import dataset_record_to_url

import numpy as np
Expand Down

0 comments on commit 48a91eb

Please sign in to comment.