• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

int-brain-lab / ibllib / 6161719581526678

28 Jun 2024 01:14PM UTC coverage: 64.584% (+0.03%) from 64.55%
6161719581526678

push

tests

web-flow
Stim on extraction (#788)

* Issue #775
* Handle no go trials
* Pre-6.2.5 trials extraction
* DeprecationWarning -> FutureWarning; extractor fixes; timeline trials extraction

49 of 60 new or added lines in 9 files covered. (81.67%)

2 existing lines in 1 file now uncovered.

13055 of 20214 relevant lines covered (64.58%)

0.65 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

59.46
/ibllib/pipes/misc.py
1
"""Miscellaneous pipeline utility functions."""
1✔
2
import ctypes
1✔
3
import hashlib
1✔
4
import json
1✔
5
import os
1✔
6
import re
1✔
7
import shutil
1✔
8
import subprocess
1✔
9
import sys
1✔
10
import time
1✔
11
import logging
1✔
12
import warnings
1✔
13
from functools import wraps
1✔
14
from pathlib import Path
1✔
15
from typing import Union, List, Callable, Any
1✔
16
from inspect import signature
1✔
17
import uuid
1✔
18
import socket
1✔
19
import traceback
1✔
20
import tempfile
1✔
21

22
import spikeglx
1✔
23
from iblutil.io import hashfile, params
1✔
24
from iblutil.util import range_str
1✔
25
from one.alf.files import get_session_path
1✔
26
from one.alf.spec import is_uuid_string, is_session_path, describe
1✔
27
from one.api import ONE
1✔
28

29
import ibllib.io.flags as flags
1✔
30
import ibllib.io.raw_data_loaders as raw
1✔
31
from ibllib.io.misc import delete_empty_folders
1✔
32
import ibllib.io.session_params as sess_params
1✔
33

34
log = logging.getLogger(__name__)
1✔
35

36
DEVICE_FLAG_MAP = {'neuropixel': 'ephys',
1✔
37
                   'cameras': 'video',
38
                   'widefield': 'widefield',
39
                   'sync': 'sync'}
40

41

42
def subjects_data_folder(folder: Path, rglob: bool = False) -> Path:
1✔
43
    """Given a root_data_folder will try to find a 'Subjects' data folder.
44

45
    If Subjects folder is passed will return it directly."""
46
    if not isinstance(folder, Path):
1✔
47
        folder = Path(folder)
1✔
48
    if rglob:
1✔
49
        func = folder.rglob
1✔
50
    else:
51
        func = folder.glob
×
52

53
    # Try to find Subjects folder one level
54
    if folder.name.lower() != 'subjects':
1✔
55
        # Try to find Subjects folder if folder.glob
56
        spath = [x for x in func('*') if x.name.lower() == 'subjects']
1✔
57
        if not spath:
1✔
58
            raise ValueError('No "Subjects" folder in children folders')
×
59
        elif len(spath) > 1:
1✔
60
            raise ValueError(f'Multiple "Subjects" folder in children folders: {spath}')
×
61
        else:
62
            folder = folder / spath[0]
1✔
63

64
    return folder
1✔
65

66

67
def cli_ask_default(prompt: str, default: str):
1✔
68
    """
69
    Prompt the user for input, display the default option and return user input or default
70
    :param prompt: String to display to user
71
    :param default: The default value to return if user doesn't enter anything
72
    :return: User input or default
73
    """
74
    return input(f'{prompt} [default: {default}]: ') or default
1✔
75

76

77
def cli_ask_options(prompt: str, options: list, default_idx: int = 0) -> str:
1✔
78
    parsed_options = [str(x) for x in options]
×
79
    if default_idx is not None:
×
80
        parsed_options[default_idx] = f"[{parsed_options[default_idx]}]"
×
81
    options_str = " (" + " | ".join(parsed_options) + ")> "
×
82
    ans = input(prompt + options_str) or str(options[default_idx])
×
83
    if ans not in [str(x) for x in options]:
×
84
        return cli_ask_options(prompt, options, default_idx=default_idx)
×
85
    return ans
×
86

87

88
def behavior_exists(session_path: str, include_devices=False) -> bool:
1✔
89
    """
90
    Returns True if the session has a task behaviour folder
91
    :param session_path:
92
    :return:
93
    """
94
    session_path = Path(session_path)
1✔
95
    if include_devices and session_path.joinpath("_devices").exists():
1✔
96
        return True
×
97
    if session_path.joinpath("raw_behavior_data").exists():
1✔
98
        return True
1✔
99
    return any(session_path.glob('raw_task_data_*'))
1✔
100

101

102
def check_transfer(src_session_path, dst_session_path):
1✔
103
    """
104
    Check all the files in the source directory match those in the destination directory. Function
105
    will throw assertion errors/exceptions if number of files do not match, file names do not
106
    match, or if file sizes do not match.
107

108
    :param src_session_path: The source directory that was copied
109
    :param dst_session_path: The copy target directory
110
    """
111
    src_files = sorted([x for x in Path(src_session_path).rglob('*') if x.is_file()])
1✔
112
    dst_files = sorted([x for x in Path(dst_session_path).rglob('*') if x.is_file()])
1✔
113
    assert len(src_files) == len(dst_files), 'Not all files transferred'
1✔
114
    for s, d in zip(src_files, dst_files):
1✔
115
        assert s.name == d.name, 'file name mismatch'
1✔
116
        assert s.stat().st_size == d.stat().st_size, 'file size mismatch'
1✔
117

118

119
def rename_session(session_path: str, new_subject=None, new_date=None, new_number=None,
1✔
120
                   ask: bool = False) -> Path:
121
    """Rename a session.  Prompts the user for the new subject name, data and number and then moves
122
    session path to new session path.
123

124
    :param session_path: A session path to rename
125
    :type session_path: str
126
    :param new_subject: A new subject name, if none provided, the user is prompted for one
127
    :param new_date: A new session date, if none provided, the user is prompted for one
128
    :param new_number: A new session number, if none provided, the user is prompted for one
129
    :param ask: used to ensure prompt input from user, defaults to False
130
    :type ask: bool
131
    :return: The renamed session path
132
    :rtype: Path
133
    """
134
    session_path = get_session_path(session_path)
1✔
135
    if session_path is None:
1✔
136
        raise ValueError('Session path not valid ALF session folder')
1✔
137
    mouse = session_path.parts[-3]
1✔
138
    date = session_path.parts[-2]
1✔
139
    sess = session_path.parts[-1]
1✔
140
    new_mouse = new_subject or mouse
1✔
141
    new_date = new_date or date
1✔
142
    new_sess = new_number or sess
1✔
143
    if ask:
1✔
144
        new_mouse = input(f"Please insert subject NAME [current value: {mouse}]> ")
1✔
145
        new_date = input(f"Please insert new session DATE [current value: {date}]> ")
1✔
146
        new_sess = input(f"Please insert new session NUMBER [current value: {sess}]> ")
1✔
147

148
    new_session_path = Path(*session_path.parts[:-3]).joinpath(new_mouse, new_date,
1✔
149
                                                               new_sess.zfill(3))
150
    assert is_session_path(new_session_path), 'invalid subject, date or number'
1✔
151

152
    if new_session_path.exists():
1✔
153
        ans = input(f'Warning: session path {new_session_path} already exists.\nWould you like to '
×
154
                    f'move {new_session_path} to a backup directory? [y/N] ')
155
        if (ans or 'n').lower() in ['n', 'no']:
×
156
            print(f'Manual intervention required, data exists in the following directory: '
×
157
                  f'{session_path}')
158
            return
×
159
        if backup_session(new_session_path):
×
160
            print(f'Backup was successful, removing directory {new_session_path}...')
×
161
            shutil.rmtree(str(new_session_path), ignore_errors=True)
×
162
    shutil.move(str(session_path), str(new_session_path))
1✔
163
    print(session_path, "--> renamed to:")
1✔
164
    print(new_session_path)
1✔
165

166
    return new_session_path
1✔
167

168

169
def backup_session(folder_path, root=None, extra=''):
1✔
170
    """
171
    Used to move the contents of a session to a backup folder, likely before the folder is
172
    removed.
173

174
    Parameters
175
    ----------
176
    folder_path : str, pathlib.Path
177
        The folder path to remove.
178
    root : str, pathlib.Path
179
        Copy folder tree relative to this. If None, copies from the session_path root.
180
    extra : str, pathlib.Path
181
        Extra folder parts to append to destination root path.
182

183
    Returns
184
    -------
185
    pathlib.Path
186
        The location of the backup data, if succeeded to copy.
187
    """
188
    if not root:
1✔
189
        if session_path := get_session_path(folder_path):
1✔
190
            root = session_path.parents[2]
1✔
191
        else:
192
            root = folder_path.parent
1✔
193
    folder_path = Path(folder_path)
1✔
194
    bk_path = Path(tempfile.gettempdir(), 'backup_sessions', extra, folder_path.relative_to(root))
1✔
195
    if folder_path.exists():
1✔
196
        if not folder_path.is_dir():
1✔
197
            log.error(f'The given folder path is not a directory: {folder_path}')
1✔
198
            return
1✔
199
        try:
1✔
200
            log.debug(f'Created path: {bk_path.parent}')
1✔
201
            bk_path = shutil.copytree(folder_path, bk_path)
1✔
202
            log.info(f'Copied contents from {folder_path} to {bk_path}')
1✔
203
            return bk_path
1✔
204
        except FileExistsError:
1✔
205
            log.error(f'A backup session for the given path already exists: {bk_path}, '
1✔
206
                      f'manual intervention is necessary.')
207
        except shutil.Error as ex:
1✔
208
            log.error('Failed to copy files from %s to %s: %s', folder_path, bk_path, ex)
1✔
209
    else:
210
        log.error('The given session path does not exist: %s', folder_path)
1✔
211

212

213
def copy_with_check(src, dst, **kwargs):
1✔
214
    dst = Path(dst)
×
215
    if dst.exists() and Path(src).stat().st_size == dst.stat().st_size:
×
216
        return dst
×
217
    elif dst.exists():
×
218
        dst.unlink()
×
219
    return shutil.copy2(src, dst, **kwargs)
×
220

221

222
def transfer_session_folders(local_sessions: list, remote_subject_folder, subfolder_to_transfer):
1✔
223
    """
224
    Used to determine which local session folders should be transferred to which remote session folders, will prompt the user
225
    when necessary.
226

227
    Parameters
228
    ----------
229
    local_sessions : list
230
        Required list of local session folder paths to sync to local server.
231
    remote_subject_folder : str, pathlib.Path
232
        The remote location of the subject folder (typically pulled from the params).
233
    subfolder_to_transfer : str
234
        Which subfolder to sync
235

236
    Returns
237
    -------
238
    list of tuples
239
        For each session, a tuple of (source, destination) of attempted file transfers.
240
    list of bool
241
        A boolean True/False for success/failure of the transfer.
242
    """
243
    transfer_list = []  # list of sessions to transfer
1✔
244
    skip_list = ""  # "list" of sessions to skip and the reason for the skip
1✔
245
    # Iterate through all local sessions in the given list
246
    for local_session in local_sessions:
1✔
247
        # Set expected remote_session location and perform simple error state checks
248
        remote_session = remote_subject_folder.joinpath(*local_session.parts[-3:])
1✔
249
        # Skip session if ...
250
        if subfolder_to_transfer:
1✔
251
            if not local_session.joinpath(subfolder_to_transfer).exists():
1✔
252
                msg = f"{local_session} - skipping session, no '{subfolder_to_transfer}' folder found locally"
×
253
                log.warning(msg)
×
254
                skip_list += msg + "\n"
×
255
                continue
×
256
        if not remote_session.parent.exists():
1✔
257
            msg = f"{local_session} - no matching remote session date folder found for the given local session"
1✔
258
            log.info(msg)
1✔
259
            skip_list += msg + "\n"
1✔
260
            continue
1✔
261
        if not behavior_exists(remote_session):
1✔
262
            msg = f"{local_session} - skipping session, no behavior data found in remote folder {remote_session}"
1✔
263
            log.warning(msg)
1✔
264
            skip_list += msg + "\n"
1✔
265
            continue
1✔
266

267
        # Determine if there are multiple session numbers from the date path
268
        local_sessions_for_date = get_session_numbers_from_date_path(local_session.parent)
1✔
269
        remote_sessions_for_date = get_session_numbers_from_date_path(remote_session.parent)
1✔
270
        remote_session_pick = None
1✔
271
        if len(local_sessions_for_date) > 1 or len(remote_sessions_for_date) > 1:
1✔
272
            # Format folder size output for end user to review
273
            local_session_numbers_with_size = remote_session_numbers_with_size = ""
1✔
274
            for lsfd in local_sessions_for_date:
1✔
275
                size_in_gb = round(get_directory_size(local_session.parent / lsfd, in_gb=True), 2)
1✔
276
                local_session_numbers_with_size += lsfd + " (" + str(size_in_gb) + " GB)\n"
1✔
277
            for rsfd in remote_sessions_for_date:
1✔
278
                size_in_gb = round(get_directory_size(remote_session.parent / rsfd, in_gb=True), 2)
1✔
279
                remote_session_numbers_with_size += rsfd + " (" + str(size_in_gb) + " GB)\n"
1✔
280
            log.info(f"\n\nThe following local session folder(s) were found on this acquisition PC:\n\n"
1✔
281
                     f"{''.join(local_session_numbers_with_size)}\nThe following remote session folder(s) were found on the "
282
                     f"server:\n\n{''.join(remote_session_numbers_with_size)}\n")
283

284
            def _remote_session_picker(sessions_for_date):
1✔
285
                resp = "s"
1✔
286
                resp_invalid = True
1✔
287
                while resp_invalid:  # loop until valid user input
1✔
288
                    resp = input(f"\n\n--- USER INPUT NEEDED ---\nWhich REMOTE session number would you like to transfer your "
1✔
289
                                 f"local session to? Options {range_str(map(int, sessions_for_date))} or "
290
                                 f"[s]kip/[h]elp/[e]xit> ").strip().lower()
291
                    if resp == "h":
1✔
292
                        print("An example session filepath:\n")
×
293
                        describe("number")  # Explain what a session number is
×
294
                        input("Press enter to continue")
×
295
                    elif resp == "s" or resp == "e":  # exit loop
1✔
296
                        resp_invalid = False
×
297
                    elif len(resp) <= 3:
1✔
298
                        resp_invalid = False if [i for i in sessions_for_date if int(resp) == int(i)] else None
1✔
299
                    else:
300
                        print("Invalid response. Please try again.")
×
301
                return resp
1✔
302

303
            log.info(f"Evaluation for local session "
1✔
304
                     f"{local_session.parts[-3]}/{local_session.parts[-2]}/{local_session.parts[-1]}...")
305
            user_response = _remote_session_picker(remote_sessions_for_date)
1✔
306
            if user_response == "s":
1✔
307
                msg = f"{local_session} - Local session skipped due to user input"
×
308
                log.info(msg)
×
309
                skip_list += msg + "\n"
×
310
                continue
×
311
            elif user_response == "e":
1✔
312
                log.info("Exiting, no files transferred.")
×
313
                return
×
314
            else:
315
                remote_session_pick = remote_session.parent / user_response.zfill(3)
1✔
316

317
        # Append to the transfer_list
318
        transfer_tuple = (local_session, remote_session_pick) if remote_session_pick else (local_session, remote_session)
1✔
319
        transfer_list.append(transfer_tuple)
1✔
320
        log.info(f"{transfer_tuple[0]}, {transfer_tuple[1]} - Added to the transfer list")
1✔
321

322
    # Verify that the number of local transfer_list entries match the number of remote transfer_list entries
323
    if len(transfer_list) != len(set(dst for _, dst in transfer_list)):
1✔
324
        raise RuntimeError(
×
325
            "An invalid combination of sessions were picked; the most likely cause of this error is multiple local "
326
            "sessions being selected for a single remote session. Please rerun the script."
327
        )
328

329
    # Call rsync/rdiff function for every entry in the transfer list
330
    success = []
1✔
331
    for src, dst in transfer_list:
1✔
332
        if subfolder_to_transfer:
1✔
333
            success.append(rsync_paths(src / subfolder_to_transfer, dst / subfolder_to_transfer))
1✔
334
        else:
335
            success.append(rsync_paths(src, dst))
×
336
        if not success[-1]:
1✔
337
            log.error("File transfer failed, check log for reason.")
×
338

339
    # Notification to user for any transfers were skipped
340
    log.warning(f"Video transfers that were not completed:\n\n{skip_list}") if skip_list else log.info("No transfers skipped.")
1✔
341
    return transfer_list, success
1✔
342

343

344
def transfer_folder(src: Path, dst: Path, force: bool = False) -> None:
1✔
345
    """functionality has been replaced by transfer_session_folders function"""
346
    print(f"Attempting to copy:\n{src}\n--> {dst}")
×
347
    if force:
×
348
        print(f"Removing {dst}")
×
349
        shutil.rmtree(dst, ignore_errors=True)
×
350
    else:
351
        try:
×
352
            check_transfer(src, dst)
×
353
            print("All files already copied, use force=True to re-copy")
×
354
            return
×
355
        except AssertionError:
×
356
            pass
×
357
    print(f"Copying all files:\n{src}\n--> {dst}")
×
358
    # rsync_folder(src, dst, '**transfer_me.flag')
359
    if sys.version_info.minor < 8:
×
360
        # dirs_exist_ok kwarg not supported in < 3.8
361
        shutil.rmtree(dst, ignore_errors=True)
×
362
        shutil.copytree(src, dst, copy_function=copy_with_check)
×
363
    else:
364
        shutil.copytree(src, dst, dirs_exist_ok=True, copy_function=copy_with_check)
×
365
    # If folder was created delete the src_flag_file
366
    if check_transfer(src, dst) is None:
×
367
        print("All files copied")
×
368
    # rdiff-backup --compare /tmp/tmpw9o1zgn0 /tmp/tmp82gg36rm
369
    # No changes found.  Directory matches archive data.
370

371

372
def load_params_dict(params_fname: str) -> dict:
1✔
373
    params_fpath = Path(params.getfile(params_fname))
×
374
    if not params_fpath.exists():
×
375
        return None
×
376
    with open(params_fpath, "r") as f:
×
377
        out = json.load(f)
×
378
    return out
×
379

380

381
def load_videopc_params():
1✔
382
    """(DEPRECATED) This will be removed in favour of iblrigv8 functions."""
383
    warnings.warn('load_videopc_params will be removed in favour of iblrigv8', FutureWarning)
×
384
    if not load_params_dict("videopc_params"):
×
385
        create_videopc_params()
×
386
    return load_params_dict("videopc_params")
×
387

388

389
def load_ephyspc_params():
1✔
390
    if not load_params_dict("ephyspc_params"):
×
391
        create_ephyspc_params()
×
392
    return load_params_dict("ephyspc_params")
×
393

394

395
def create_basic_transfer_params(param_str='transfer_params', local_data_path=None,
1✔
396
                                 remote_data_path=None, clobber=False, **kwargs):
397
    """Create some basic parameters common to all acquisition rigs.
398

399
    Namely prompt user for the local root data path and the remote (lab server) data path.
400
    NB: All params stored in uppercase by convention.
401

402
    Parameters
403
    ----------
404
    param_str : str
405
        The name of the parameters to load/save.
406
    local_data_path : str, pathlib.Path
407
        The local root data path, stored with the DATA_FOLDER_PATH key.  If None, user is prompted.
408
    remote_data_path : str, pathlib.Path, bool
409
        The local root data path, stored with the REMOTE_DATA_FOLDER_PATH key.  If None, user is prompted.
410
        If False, the REMOTE_DATA_PATH key is not updated or is set to False if clobber = True.
411
    clobber : bool
412
        If True, any parameters in existing parameter file not found as keyword args will be removed,
413
        otherwise the user is prompted for these also.
414
    **kwargs
415
        Extra parameters to set. If value is None, the user is prompted.
416

417
    Returns
418
    -------
419
    dict
420
        The parameters written to disc.
421

422
    Examples
423
    --------
424
    Set up basic transfer parameters for modality acquisition PC
425

426
    >>> par = create_basic_transfer_params()
427

428
    Set up basic transfer paramers without prompting the user
429

430
    >>> par = create_basic_transfer_params(
431
    ...     local_data_path='/iblrig_data/Subjects',
432
    ...     remote_data_path='/mnt/iblserver.champalimaud.pt/ibldata/Subjects')
433

434
    Prompt user for extra parameter using custom prompt (will call function with current default)
435

436
    >>> from functools import partial
437
    >>> par = create_basic_transfer_params(
438
    ...     custom_arg=partial(cli_ask_default, 'Please enter custom arg value'))
439

440
    Set up with no remote path (NB: if not the first time, use clobber=True to save param key)
441

442
    >>> par = create_basic_transfer_params(remote_data_path=False)
443

444
    """
445
    parameters = params.as_dict(params.read(param_str, {})) or {}
1✔
446
    if local_data_path is None:
1✔
447
        local_data_path = parameters.get('DATA_FOLDER_PATH')
1✔
448
        if not local_data_path or clobber:
1✔
449
            local_data_path = cli_ask_default("Where's your LOCAL 'Subjects' data folder?", local_data_path)
1✔
450
    parameters['DATA_FOLDER_PATH'] = local_data_path
1✔
451

452
    if remote_data_path is None:
1✔
453
        remote_data_path = parameters.get('REMOTE_DATA_FOLDER_PATH')
1✔
454
        if remote_data_path in (None, '') or clobber:
1✔
455
            remote_data_path = cli_ask_default("Where's your REMOTE 'Subjects' data folder?", remote_data_path)
1✔
456
    if remote_data_path is not False:
1✔
457
        parameters['REMOTE_DATA_FOLDER_PATH'] = remote_data_path
1✔
458
    elif 'REMOTE_DATA_FOLDER_PATH' not in parameters or clobber:
1✔
459
        parameters['REMOTE_DATA_FOLDER_PATH'] = False  # Always assume no remote path
1✔
460

461
    # Deal with extraneous parameters
462
    for k, v in kwargs.items():
1✔
463
        if callable(v):  # expect function handle with default value as input
1✔
464
            n_pars = len(signature(v).parameters)
1✔
465
            parameters[k.upper()] = v(parameters.get(k.upper())) if n_pars > 0 else v()
1✔
466
        elif v is None:  # generic prompt for key
1✔
467
            parameters[k.upper()] = cli_ask_default(
1✔
468
                f'Enter a value for parameter {k.upper()}', parameters.get(k.upper())
469
            )
470
        else:  # assign value to parameter
471
            parameters[k.upper()] = str(v)
1✔
472

473
    defined = list(map(str.upper, ('DATA_FOLDER_PATH', 'REMOTE_DATA_FOLDER_PATH', 'TRANSFER_LABEL', *kwargs.keys())))
1✔
474
    if clobber:
1✔
475
        # Delete any parameters in parameter dict that were not passed as keyword args into function
476
        parameters = {k: v for k, v in parameters.items() if k in defined}
1✔
477
    else:
478
        # Prompt for any other parameters that weren't passed into function
479
        for k in filter(lambda x: x not in defined, map(str.upper, parameters.keys())):
1✔
480
            parameters[k] = cli_ask_default(f'Enter a value for parameter {k}', parameters.get(k))
1✔
481

482
    if 'TRANSFER_LABEL' not in parameters:
1✔
483
        parameters['TRANSFER_LABEL'] = f'{socket.gethostname()}_{uuid.getnode()}'
1✔
484

485
    # Write parameters
486
    params.write(param_str, parameters)
1✔
487
    return parameters
1✔
488

489

490
def create_videopc_params(force=False, silent=False):
1✔
491
    """(DEPRECATED) This will be removed in favour of iblrigv8 functions."""
492
    url = 'https://github.com/int-brain-lab/iblrig/blob/videopc/docs/source/video.rst'
×
NEW
493
    warnings.warn(f'create_videopc_params is deprecated, see {url}', FutureWarning)
×
494
    if Path(params.getfile("videopc_params")).exists() and not force:
×
495
        print(f"{params.getfile('videopc_params')} exists already, exiting...")
×
496
        print(Path(params.getfile("videopc_params")).exists())
×
497
        return
×
498
    if silent:
×
499
        data_folder_path = r"D:\iblrig_data\Subjects"
×
500
        remote_data_folder_path = r"\\iblserver.champalimaud.pt\ibldata\Subjects"
×
501
        body_cam_idx = 0
×
502
        left_cam_idx = 1
×
503
        right_cam_idx = 2
×
504
    else:
505
        data_folder_path = cli_ask_default(
×
506
            r"Where's your LOCAL 'Subjects' data folder?", r"D:\iblrig_data\Subjects"
507
        )
508
        remote_data_folder_path = cli_ask_default(
×
509
            r"Where's your REMOTE 'Subjects' data folder?",
510
            r"\\iblserver.champalimaud.pt\ibldata\Subjects",
511
        )
512
        body_cam_idx = cli_ask_default("Please select the index of the BODY camera", "0")
×
513
        left_cam_idx = cli_ask_default("Please select the index of the LEFT camera", "1")
×
514
        right_cam_idx = cli_ask_default("Please select the index of the RIGHT camera", "2")
×
515

516
    param_dict = {
×
517
        "DATA_FOLDER_PATH": data_folder_path,
518
        "REMOTE_DATA_FOLDER_PATH": remote_data_folder_path,
519
        "BODY_CAM_IDX": body_cam_idx,
520
        "LEFT_CAM_IDX": left_cam_idx,
521
        "RIGHT_CAM_IDX": right_cam_idx,
522
    }
523
    params.write("videopc_params", param_dict)
×
524
    print(f"Created {params.getfile('videopc_params')}")
×
525
    print(param_dict)
×
526
    return param_dict
×
527

528

529
def create_ephyspc_params(force=False, silent=False):
1✔
530
    if Path(params.getfile("ephyspc_params")).exists() and not force:
×
531
        print(f"{params.getfile('ephyspc_params')} exists already, exiting...")
×
532
        print(Path(params.getfile("ephyspc_params")).exists())
×
533
        return
×
534
    if silent:
×
535
        data_folder_path = r"D:\iblrig_data\Subjects"
×
536
        remote_data_folder_path = r"\\iblserver.champalimaud.pt\ibldata\Subjects"
×
537
        probe_types = {"PROBE_TYPE_00": "3A", "PROBE_TYPE_01": "3B"}
×
538
    else:
539
        data_folder_path = cli_ask_default(
×
540
            r"Where's your LOCAL 'Subjects' data folder?", r"D:\iblrig_data\Subjects"
541
        )
542
        remote_data_folder_path = cli_ask_default(
×
543
            r"Where's your REMOTE 'Subjects' data folder?",
544
            r"\\iblserver.champalimaud.pt\ibldata\Subjects",
545
        )
546
        n_probes = int(cli_ask_default("How many probes are you using?", '2'))
×
547
        assert 100 > n_probes > 0, 'Please enter number between 1, 99 inclusive'
×
548
        probe_types = {}
×
549
        for i in range(n_probes):
×
550
            probe_types[f'PROBE_TYPE_{i:02}'] = cli_ask_options(
×
551
                f"What's the type of PROBE {i:02}?", ["3A", "3B"])
552
    param_dict = {
×
553
        "DATA_FOLDER_PATH": data_folder_path,
554
        "REMOTE_DATA_FOLDER_PATH": remote_data_folder_path,
555
        **probe_types
556
    }
557
    params.write("ephyspc_params", param_dict)
×
558
    print(f"Created {params.getfile('ephyspc_params')}")
×
559
    print(param_dict)
×
560
    return param_dict
×
561

562

563
def rdiff_install() -> bool:
1✔
564
    """
565
    For windows:
566
    * if the rdiff-backup executable does not already exist on the system
567
      * downloads rdiff-backup zip file
568
      * copies the executable to the C:\tools folder
569

570
    For linux/mac:
571
    * runs a pip install rdiff-backup
572

573
    Returns:
574
        True when install is successful, False when an error is encountered
575
    """
576
    if os.name == "nt":
1✔
577
        # ensure tools folder exists
578
        tools_folder = "C:\\tools\\"
×
579
        os.mkdir(tools_folder) if not Path(tools_folder).exists() else None
×
580

581
        rdiff_cmd_loc = tools_folder + "rdiff-backup.exe"
×
582
        if not Path(rdiff_cmd_loc).exists():
×
583
            import requests
×
584
            import zipfile
×
585
            from io import BytesIO
×
586

587
            url = "https://github.com/rdiff-backup/rdiff-backup/releases/download/v2.0.5/rdiff-backup-2.0.5.win32exe.zip"
×
588
            log.info("Downloading zip file for rdiff-backup.")
×
589
            # Download the file by sending the request to the URL, ensure success by status code
590
            if requests.get(url).status_code == 200:
×
591
                log.info("Download complete for rdiff-backup zip file.")
×
592
                # extracting the zip file contents
593
                zipfile = zipfile.ZipFile(BytesIO(requests.get(url).content))
×
594
                zipfile.extractall("C:\\Temp")
×
595
                rdiff_folder_name = zipfile.namelist()[0]  # attempting a bit of future-proofing
×
596
                # move the executable to the C:\tools folder
597
                shutil.copy("C:\\Temp\\" + rdiff_folder_name + "rdiff-backup.exe", rdiff_cmd_loc)
×
598
                shutil.rmtree("C:\\Temp\\" + rdiff_folder_name)  # cleanup temp folder
×
599
                try:  # attempt to call the rdiff command
×
600
                    subprocess.run([rdiff_cmd_loc, "--version"], check=True)
×
601
                except (FileNotFoundError, subprocess.CalledProcessError) as e:
×
602
                    log.error("rdiff-backup installation did not complete.\n", e)
×
603
                    return False
×
604
                return True
×
605
            else:
606
                log.error("Download request status code not 200, something did not go as expected.")
×
607
                return False
×
608
    else:  # anything not Windows
609
        try:  # package should not be installed via the requirements.txt to accommodate windows
1✔
610
            subprocess.run(["pip", "install", "rdiff-backup"], check=True)
1✔
611
        except subprocess.CalledProcessError as e:
×
612
            log.error("rdiff-backup pip install did not complete.\n", e)
×
613
            return False
×
614
        return True
1✔
615

616

617
def get_directory_size(dir_path: Path, in_gb=False) -> float:
1✔
618
    """
619
    Used to determine total size of all files in a given session_path, including all child directories
620

621
    Args:
622
        dir_path (Path): path we want to get the total size of
623
        in_gb (bool): set to True for returned value to be in gigabytes
624

625
    Returns:
626
        float: sum of all files in the given directory path (in bytes by default, in GB if specified)
627
    """
628
    total = 0
1✔
629
    with iter(os.scandir(dir_path)) as it:
1✔
630
        for entry in it:
1✔
631
            if entry.is_file():
1✔
632
                total += entry.stat().st_size
1✔
633
            elif entry.is_dir():
1✔
634
                total += get_directory_size(entry.path)
1✔
635
    if in_gb:
1✔
636
        return total / 1024 / 1024 / 1024  # in GB
1✔
637
    return total  # in bytes
1✔
638

639

640
def get_session_numbers_from_date_path(date_path: Path) -> list:
1✔
641
    """
642
    Retrieves session numbers when given a date path
643

644
    Args:
645
        date_path (Path): path to date, i.e. \\\\server\\some_lab\\Subjects\\Date"
646

647
    Returns:
648
        (list): Found sessions as a sorted list
649
    """
650
    contents = Path(date_path).glob('*')
1✔
651
    folders = filter(lambda x: x.is_dir() and re.match(r'^\d{3}$', x.name), contents)
1✔
652
    sessions_as_set = set(map(lambda x: x.name, folders))
1✔
653
    sessions_as_sorted_list = sorted(sessions_as_set)
1✔
654
    return sessions_as_sorted_list
1✔
655

656

657
def rsync_paths(src: Path, dst: Path) -> bool:
1✔
658
    """
659
    Used to run the rsync algorithm via a rdiff-backup command on the paths contained on the provided source and destination.
660
    This function relies on the rdiff-backup package and is run from the command line, i.e. subprocess.run(). Full documentation
661
    can be found here - https://rdiff-backup.net/docs/rdiff-backup.1.html
662

663
    Parameters
664
    ----------
665
    src : Path
666
        source path that contains data to be transferred
667
    dst : Path
668
        destination path that will receive the transferred data
669

670
    Returns
671
    -------
672
    bool
673
        True for success, False for failure
674

675
    Raises
676
    ------
677
    FileNotFoundError, subprocess.CalledProcessError
678
    """
679
    # Set rdiff_cmd_loc based on OS type (assuming C:\tools is not in Windows PATH environ)
680
    rdiff_cmd_loc = "C:\\tools\\rdiff-backup.exe" if os.name == "nt" else "rdiff-backup"
1✔
681
    try:  # Check if rdiff-backup command is available
1✔
682
        subprocess.run([rdiff_cmd_loc, "--version"], check=True)
1✔
683
    except (FileNotFoundError, subprocess.CalledProcessError) as e:
1✔
684
        if not rdiff_install():  # Attempt to install rdiff
1✔
685
            log.error("rdiff-backup command is unavailable, transfers can not continue.\n", e)
×
686
            raise
×
687

688
    log.info("Attempting to transfer data: " + str(src) + " -> " + str(dst))
1✔
689
    WindowsInhibitor().inhibit() if os.name == "nt" else None  # prevent Windows from going to sleep
1✔
690
    try:
1✔
691
        rsync_command = [rdiff_cmd_loc, "--verbosity", str(0),
1✔
692
                         "--create-full-path", "--backup-mode", "--no-acls", "--no-eas",
693
                         "--no-file-statistics", "--exclude", "**transfer_me.flag",
694
                         str(src), str(dst)]
695
        subprocess.run(rsync_command, check=True)
1✔
696
        time.sleep(1)  # give rdiff-backup a second to complete all logging operations
1✔
697
    except (FileNotFoundError, subprocess.CalledProcessError) as e:
×
698
        log.error("Transfer failed with code %i.\n", e.returncode)
×
699
        if e.stderr:
×
700
            log.error(e.stderr)
×
701
        return False
×
702
    log.info("Validating transfer completed...")
1✔
703
    try:  # Validate the transfers succeeded
1✔
704
        rsync_validate = [rdiff_cmd_loc, "--verify", str(dst)]
1✔
705
        subprocess.run(rsync_validate, check=True)
1✔
706
    except (FileNotFoundError, subprocess.CalledProcessError) as e:
×
707
        log.error(f"Validation for destination {dst} failed.\n", e)
×
708
        return False
×
709
    log.info("Cleaning up rdiff files...")
1✔
710
    shutil.rmtree(dst / "rdiff-backup-data")
1✔
711
    WindowsInhibitor().uninhibit() if os.name == 'nt' else None  # allow Windows to go to sleep
1✔
712
    return True
1✔
713

714

715
def confirm_ephys_remote_folder(local_folder=False, remote_folder=False, force=False, iblscripts_folder=False,
1✔
716
                                session_path=None):
717
    """
718
    :param local_folder: The full path to the local Subjects folder
719
    :param remote_folder:  the full path to the remote Subjects folder
720
    :param force:
721
    :param iblscripts_folder:
722
    :return:
723
    """
724
    # FIXME: session_path can be relative
725
    pars = load_ephyspc_params()
×
726
    if not iblscripts_folder:
×
727
        import deploy
×
728
        iblscripts_folder = Path(deploy.__file__).parent.parent
×
729
    if not local_folder:
×
730
        local_folder = pars["DATA_FOLDER_PATH"]
×
731
    if not remote_folder:
×
732
        remote_folder = pars["REMOTE_DATA_FOLDER_PATH"]
×
733
    local_folder = Path(local_folder)
×
734
    remote_folder = Path(remote_folder)
×
735
    # Check for Subjects folder
736
    local_folder = subjects_data_folder(local_folder, rglob=True)
×
737
    remote_folder = subjects_data_folder(remote_folder, rglob=True)
×
738

739
    log.info(f"local folder: {local_folder}")
×
740
    log.info(f"remote folder: {remote_folder}")
×
741
    if session_path is None:
×
742
        src_session_paths = [x.parent for x in local_folder.rglob("transfer_me.flag")]
×
743
    else:
744
        src_session_paths = session_path if isinstance(session_path, list) else [session_path]
×
745

746
    if not src_session_paths:
×
747
        log.info("Nothing to transfer, exiting...")
×
748
        return
×
749
    for session_path in src_session_paths:
×
750
        log.info(f"Found : {session_path}")
×
751
    log.info(f"Found: {len(src_session_paths)} sessions to transfer, starting transferring now")
×
752

753
    for session_path in src_session_paths:
×
754
        log.info(f"Transferring session: {session_path}")
×
755
        # Rename ephys files
756
        # FIXME: if transfer has failed and wiring file is there renaming will fail!
757
        rename_ephys_files(str(session_path))
×
758
        # Move ephys files
759
        move_ephys_files(str(session_path))
×
760
        # Copy wiring files
761
        copy_wiring_files(str(session_path), iblscripts_folder)
×
762
        try:
×
763
            create_alyx_probe_insertions(str(session_path))
×
764
        except BaseException:
×
765
            log.error(traceback.print_exc())
×
766
            log.info("Probe creation failed, please create the probe insertions manually. Continuing transfer...")
×
767
        msg = f"Transfer {session_path }to {remote_folder} with the same name?"
×
768
        resp = input(msg + "\n[y]es/[r]ename/[s]kip/[e]xit\n ^\n> ") or "y"
×
769
        resp = resp.lower()
×
770
        log.info(resp)
×
771
        if resp not in ["y", "r", "s", "e", "yes", "rename", "skip", "exit"]:
×
772
            return confirm_ephys_remote_folder(
×
773
                local_folder=local_folder,
774
                remote_folder=remote_folder,
775
                force=force,
776
                iblscripts_folder=iblscripts_folder,
777
            )
778
        elif resp == "y" or resp == "yes":
×
779
            pass
×
780
        elif resp == "r" or resp == "rename":
×
781
            session_path = rename_session(session_path)
×
782
            if not session_path:
×
783
                continue
×
784
        elif resp == "s" or resp == "skip":
×
785
            continue
×
786
        elif resp == "e" or resp == "exit":
×
787
            return
×
788

789
        remote_session_path = remote_folder / Path(*session_path.parts[-3:])
×
790
        if not behavior_exists(remote_session_path, include_devices=True):
×
791
            log.error(f"No behavior folder found in {remote_session_path}: skipping session...")
×
792
            return
×
793
        # TODO: Check flagfiles on src.and dst + alf dir in session folder then remove
794
        # Try catch? wher catch condition is force transfer maybe
795
        transfer_folder(session_path / "raw_ephys_data", remote_session_path / "raw_ephys_data", force=force)
×
796
        # if behavior extract_me.flag exists remove it, because of ephys flag
797
        flag_file = session_path / "transfer_me.flag"
×
798
        if flag_file.exists():  # this file only exists for the iblrig v7 and lower
×
799
            flag_file.unlink()
×
800
            if (remote_session_path / "extract_me.flag").exists():
×
801
                (remote_session_path / "extract_me.flag").unlink()
×
802
            # Create remote flags
803
            create_ephys_transfer_done_flag(remote_session_path)
×
804
            check_create_raw_session_flag(remote_session_path)
×
805

806

807
def probe_labels_from_session_path(session_path: Union[str, Path]) -> List[str]:
1✔
808
    """
809
    Finds ephys probes according to the metadata spikeglx files. Only returns first subfolder
810
    name under raw_ephys_data folder, ie. raw_ephys_data/probe00/copy_of_probe00 won't be returned
811
    If there is a NP2.4 probe with several shanks, create several probes
812
    :param session_path:
813
    :return: list of strings
814
    """
815
    plabels = []
1✔
816
    raw_ephys_folder = Path(session_path).joinpath('raw_ephys_data')
1✔
817
    for meta_file in raw_ephys_folder.rglob('*.ap.meta'):
1✔
818
        if meta_file.parents[1] != raw_ephys_folder:
1✔
819
            continue
1✔
820
        meta = spikeglx.read_meta_data(meta_file)
1✔
821
        nshanks = spikeglx._get_nshanks_from_meta(meta)
1✔
822
        if nshanks > 1:
1✔
823
            for i in range(nshanks):
1✔
824
                plabels.append(meta_file.parts[-2] + 'abcdefghij'[i])
1✔
825
        else:
826
            plabels.append(meta_file.parts[-2])
1✔
827
    plabels.sort()
1✔
828
    return plabels
1✔
829

830

831
def create_alyx_probe_insertions(
1✔
832
    session_path: str,
833
    force: bool = False,
834
    one: object = None,
835
    model: str = None,
836
    labels: list = None,
837
):
838
    if one is None:
1✔
839
        one = ONE(cache_rest=None, mode='local')
×
840
    eid = session_path if is_uuid_string(session_path) else one.path2eid(session_path)
1✔
841
    if eid is None:
1✔
842
        log.warning("Session not found on Alyx: please create session before creating insertions")
×
843
    if model is None:
1✔
844
        probe_model = spikeglx.get_neuropixel_version_from_folder(session_path)
×
845
        pmodel = "3B2" if probe_model == "3B" else probe_model
×
846
    else:
847
        pmodel = model
1✔
848
    labels = labels or probe_labels_from_session_path(session_path)
1✔
849
    # create the qc fields in the json field
850
    qc_dict = {}
1✔
851
    qc_dict.update({"qc": "NOT_SET"})
1✔
852
    qc_dict.update({"extended_qc": {}})
1✔
853

854
    # create the dictionary
855
    insertions = []
1✔
856
    for plabel in labels:
1✔
857
        insdict = {"session": eid, "name": plabel, "model": pmodel, "json": qc_dict}
1✔
858
        # search for the corresponding insertion in Alyx
859
        alyx_insertion = one.alyx.get(f'/insertions?&session={eid}&name={plabel}', clobber=True)
1✔
860
        # if it doesn't exist, create it
861
        if len(alyx_insertion) == 0:
1✔
862
            alyx_insertion = one.alyx.rest("insertions", "create", data=insdict)
1✔
863
        else:
864
            iid = alyx_insertion[0]["id"]
×
865
            if force:
×
866
                alyx_insertion = one.alyx.rest("insertions", "update", id=iid, data=insdict)
×
867
            else:
868
                alyx_insertion = alyx_insertion[0]
×
869
        insertions.append(alyx_insertion)
1✔
870
    return insertions
1✔
871

872

873
def create_ephys_flags(session_folder: str):
1✔
874
    """
875
    Create flags for processing an ephys session.  Should be called after move_ephys_files
876
    :param session_folder: A path to an ephys session
877
    :return:
878
    """
879
    session_path = Path(session_folder)
1✔
880
    flags.write_flag_file(session_path.joinpath("extract_ephys.flag"))
1✔
881
    flags.write_flag_file(session_path.joinpath("raw_ephys_qc.flag"))
1✔
882
    for probe_path in session_path.joinpath('raw_ephys_data').glob('probe*'):
1✔
883
        flags.write_flag_file(probe_path.joinpath("spike_sorting.flag"))
1✔
884

885

886
def create_ephys_transfer_done_flag(session_folder: str) -> None:
1✔
887
    session_path = Path(session_folder)
1✔
888
    flags.write_flag_file(session_path.joinpath("ephys_data_transferred.flag"))
1✔
889

890

891
def create_video_transfer_done_flag(session_folder: str) -> None:
1✔
892
    session_path = Path(session_folder)
1✔
893
    flags.write_flag_file(session_path.joinpath("video_data_transferred.flag"))
1✔
894

895

896
def create_transfer_done_flag(session_folder: str, flag_name: str) -> None:
1✔
897
    session_path = Path(session_folder)
1✔
898
    flags.write_flag_file(session_path.joinpath(f"{flag_name}_data_transferred.flag"))
1✔
899

900

901
def check_create_raw_session_flag(session_folder: str) -> None:
1✔
902
    session_path = Path(session_folder)
1✔
903

904
    # if we have an experiment description file read in whether we expect video, ephys widefield etc, don't do it just based
905
    # on the task protocol
906
    experiment_description = sess_params.read_params(session_path)
1✔
907

908
    def check_status(expected, flag):
1✔
909
        if expected is not False and flag.exists():
1✔
910
            return True
1✔
911
        if expected is False and not flag.exists():
1✔
912
            return True
×
913
        else:
914
            return False
1✔
915

916
    if experiment_description is not None:
1✔
917

918
        if any(session_path.joinpath('_devices').glob('*')):
1✔
919
            return
×
920

921
        # Find the devices in the experiment description file
922
        devices = list()
1✔
923
        for key in DEVICE_FLAG_MAP.keys():
1✔
924
            if experiment_description.get('devices', {}).get(key, None) is not None:
1✔
925
                devices.append(key)
1✔
926
        # In case of widefield the sync also needs to be in it's own folder
927
        if 'widefield' in devices:
1✔
928
            devices.append('sync')
1✔
929

930
        expected_flags = [session_path.joinpath(f'{DEVICE_FLAG_MAP[dev]}_data_transferred.flag') for dev in devices]
1✔
931

932
        expected = []
1✔
933
        flag_files = []
1✔
934
        for dev, fl in zip(devices, expected_flags):
1✔
935
            status = check_status(dev, fl)
1✔
936
            if status:
1✔
937
                flag_files.append(fl)
1✔
938
            expected.append(status)
1✔
939

940
        # In this case all the copying has completed
941
        if all(expected):
1✔
942
            # make raw session flag
943
            flags.write_flag_file(session_path.joinpath("raw_session.flag"))
1✔
944
            # and unlink individual copy flags
945
            for fl in flag_files:
1✔
946
                fl.unlink()
1✔
947

948
        return
1✔
949

950
    ephys = session_path.joinpath("ephys_data_transferred.flag")
1✔
951
    video = session_path.joinpath("video_data_transferred.flag")
1✔
952

953
    sett = raw.load_settings(session_path)
1✔
954
    if sett is None:
1✔
955
        log.error(f"No flag created for {session_path}")
×
956
        return
×
957

958
    is_biased = True if "biased" in sett["PYBPOD_PROTOCOL"] else False
1✔
959
    is_training = True if "training" in sett["PYBPOD_PROTOCOL"] else False
1✔
960
    is_habituation = True if "habituation" in sett["PYBPOD_PROTOCOL"] else False
1✔
961
    if video.exists() and (is_biased or is_training or is_habituation):
1✔
962
        flags.write_flag_file(session_path.joinpath("raw_session.flag"))
1✔
963
        video.unlink()
1✔
964
    if video.exists() and ephys.exists():
1✔
965
        flags.write_flag_file(session_path.joinpath("raw_session.flag"))
1✔
966
        ephys.unlink()
1✔
967
        video.unlink()
1✔
968

969

970
def rename_ephys_files(session_folder: str) -> None:
1✔
971
    """rename_ephys_files is system agnostic (3A, 3B1, 3B2).
972
    Renames all ephys files to Alyx compatible filenames. Uses get_new_filename.
973

974
    :param session_folder: Session folder path
975
    :type session_folder: str
976
    :return: None - Changes names of files on filesystem
977
    :rtype: None
978
    """
979
    session_path = Path(session_folder)
1✔
980
    ap_files = session_path.rglob("*.ap.*")
1✔
981
    lf_files = session_path.rglob("*.lf.*")
1✔
982
    nidq_files = session_path.rglob("*.nidq.*")
1✔
983

984
    for apf in ap_files:
1✔
985
        new_filename = get_new_filename(apf.name)
1✔
986
        shutil.move(str(apf), str(apf.parent / new_filename))
1✔
987

988
    for lff in lf_files:
1✔
989
        new_filename = get_new_filename(lff.name)
1✔
990
        shutil.move(str(lff), str(lff.parent / new_filename))
1✔
991

992
    for nidqf in nidq_files:
1✔
993
        # Ignore wiring files: these are usually created after the file renaming however this
994
        # function may be called a second time upon failed transfer.
995
        if 'wiring' in nidqf.name:
1✔
996
            continue
×
997
        new_filename = get_new_filename(nidqf.name)
1✔
998
        shutil.move(str(nidqf), str(nidqf.parent / new_filename))
1✔
999

1000

1001
def get_new_filename(filename: str) -> str:
1✔
1002
    """get_new_filename is system agnostic (3A, 3B1, 3B2).
1003
    Gets an alyx compatible filename from any spikeglx ephys file.
1004

1005
    :param filename: Name of an ephys file
1006
    :return: New name for ephys file
1007
    """
1008
    root = "_spikeglx_ephysData"
1✔
1009
    parts = filename.split('.')
1✔
1010
    if len(parts) < 3:
1✔
1011
        raise ValueError(fr'unrecognized filename "{filename}"')
1✔
1012
    pattern = r'.*(?P<gt>_g\d+_t\d+)'
1✔
1013
    if not (match := re.match(pattern, parts[0])):
1✔
1014
        raise ValueError(fr'unrecognized filename "{filename}"')
1✔
1015
    return '.'.join([root + match.group(1), *parts[1:]])
1✔
1016

1017

1018
def move_ephys_files(session_folder: str) -> None:
1✔
1019
    """move_ephys_files is system agnostic (3A, 3B1, 3B2).
1020
    Moves all properly named ephys files to appropriate locations for transfer.
1021
    Use rename_ephys_files function before this one.
1022

1023
    :param session_folder: Session folder path
1024
    :type session_folder: str
1025
    :return: None - Moves files on filesystem
1026
    :rtype: None
1027
    """
1028
    session_path = Path(session_folder)
1✔
1029
    raw_ephys_data_path = session_path / "raw_ephys_data"
1✔
1030

1031
    imec_files = session_path.rglob("*.imec*")
1✔
1032
    for imf in imec_files:
1✔
1033
        # For 3B system probe0x == imecx
1034
        probe_number = re.match(r'_spikeglx_ephysData_g\d_t\d.imec(\d+).*', imf.name)
1✔
1035
        if not probe_number:
1✔
1036
            # For 3A system imec files must be in a 'probexx' folder
1037
            probe_label = re.search(r'probe\d+', str(imf))
1✔
1038
            assert probe_label, f'Cannot assign probe number to file {imf}'
1✔
1039
            probe_label = probe_label.group()
1✔
1040
        else:
1041
            probe_number, = probe_number.groups()
1✔
1042
            probe_label = f'probe{probe_number.zfill(2)}'
1✔
1043
        raw_ephys_data_path.joinpath(probe_label).mkdir(exist_ok=True)
1✔
1044
        shutil.move(imf, raw_ephys_data_path.joinpath(probe_label, imf.name))
1✔
1045

1046
    # NIDAq files (3B system only)
1047
    nidq_files = session_path.rglob("*.nidq.*")
1✔
1048
    for nidqf in nidq_files:
1✔
1049
        shutil.move(str(nidqf), str(raw_ephys_data_path / nidqf.name))
1✔
1050
    # Delete all empty folders recursively
1051
    delete_empty_folders(raw_ephys_data_path, dry=False, recursive=True)
1✔
1052

1053

1054
def create_custom_ephys_wirings(iblscripts_folder: str):
1✔
1055
    iblscripts_path = Path(iblscripts_folder)
×
1056
    PARAMS = load_ephyspc_params()
×
1057
    probe_set = set(v for k, v in PARAMS.items() if k.startswith('PROBE_TYPE'))
×
1058

1059
    params_path = iblscripts_path.parent / "iblscripts_params"
×
1060
    params_path.mkdir(parents=True, exist_ok=True)
×
1061
    wirings_path = iblscripts_path / "deploy" / "ephyspc" / "wirings"
×
1062
    for k, v in PARAMS.items():
×
1063
        if not k.startswith('PROBE_TYPE_'):
×
1064
            continue
×
1065
        probe_label = f'probe{k[-2:]}'
×
1066
        if v not in ('3A', '3B'):
×
1067
            raise ValueError(f'Unsupported probe type "{v}"')
×
1068
        shutil.copy(
×
1069
            wirings_path / f"{v}.wiring.json", params_path / f"{v}_{probe_label}.wiring.json"
1070
        )
1071
        print(f"Created {v}.wiring.json in {params_path} for {probe_label}")
×
1072
    if "3B" in probe_set:
×
1073
        shutil.copy(wirings_path / "nidq.wiring.json", params_path / "nidq.wiring.json")
×
1074
        print(f"Created nidq.wiring.json in {params_path}")
×
1075
    print(f"\nYou can now modify your wiring files from folder {params_path}")
×
1076

1077

1078
def get_iblscripts_folder():
1✔
1079
    return str(Path().cwd().parent.parent)
×
1080

1081

1082
def copy_wiring_files(session_folder, iblscripts_folder):
1✔
1083
    """Run after moving files to probe folders"""
1084
    PARAMS = load_ephyspc_params()
×
1085
    if PARAMS["PROBE_TYPE_00"] != PARAMS["PROBE_TYPE_01"]:
×
1086
        print("Having different probe types is not supported")
×
1087
        raise NotImplementedError()
×
1088
    session_path = Path(session_folder)
×
1089
    iblscripts_path = Path(iblscripts_folder)
×
1090
    iblscripts_params_path = iblscripts_path.parent / "iblscripts_params"
×
1091
    wirings_path = iblscripts_path / "deploy" / "ephyspc" / "wirings"
×
1092
    termination = '.wiring.json'
×
1093
    # Determine system
1094
    ephys_system = PARAMS["PROBE_TYPE_00"]
×
1095
    # Define where to get the files from (determine if custom wiring applies)
1096
    src_wiring_path = iblscripts_params_path if iblscripts_params_path.exists() else wirings_path
×
1097
    probe_wiring_file_path = src_wiring_path / f"{ephys_system}{termination}"
×
1098

1099
    if ephys_system == "3B":
×
1100
        # Copy nidq file
1101
        nidq_files = session_path.rglob("*.nidq.bin")
×
1102
        for nidqf in nidq_files:
×
1103
            nidq_wiring_name = ".".join(str(nidqf.name).split(".")[:-1]) + termination
×
1104
            shutil.copy(
×
1105
                str(src_wiring_path / f"nidq{termination}"),
1106
                str(session_path / "raw_ephys_data" / nidq_wiring_name),
1107
            )
1108
    # If system is either (3A OR 3B) copy a wiring file for each ap.bin file
1109
    for binf in session_path.rglob("*.ap.bin"):
×
1110
        probe_label = re.search(r'probe\d+', str(binf))
×
1111
        if probe_label:
×
1112
            wiring_name = ".".join(str(binf.name).split(".")[:-2]) + termination
×
1113
            dst_path = session_path / "raw_ephys_data" / probe_label.group() / wiring_name
×
1114
            shutil.copy(probe_wiring_file_path, dst_path)
×
1115

1116

1117
def multi_parts_flags_creation(root_paths: Union[list, str, Path]) -> List[Path]:
1✔
1118
    """
1119
    Creates the sequence files to run spike sorting in batches
1120
    A sequence file is a json file with the following fields:
1121
     sha1: a unique hash of the metafiles involved
1122
     probe: a string with the probe name
1123
     index: the index within the sequence
1124
     nrecs: the length of the sequence
1125
     files: a list of files
1126
    :param root_paths:
1127
    :return:
1128
    """
1129
    from one.alf import io as alfio
1✔
1130
    # "001/raw_ephys_data/probe00/_spikeglx_ephysData_g0_t0.imec0.ap.meta",
1131
    if isinstance(root_paths, str) or isinstance(root_paths, Path):
1✔
1132
        root_paths = [root_paths]
1✔
1133
    recordings = {}
1✔
1134
    for root_path in root_paths:
1✔
1135
        for meta_file in root_path.rglob("*.ap.meta"):
1✔
1136
            # we want to make sure that the file is just under session_path/raw_ephys_data/{probe_label}
1137
            session_path = alfio.files.get_session_path(meta_file)
1✔
1138
            raw_ephys_path = session_path.joinpath('raw_ephys_data')
1✔
1139
            if meta_file.parents[1] != raw_ephys_path:
1✔
1140
                log.warning(f"{meta_file} is not in a probe directory and will be skipped")
×
1141
                continue
×
1142
            # stack the meta-file in the probe label key of the recordings dictionary
1143
            plabel = meta_file.parts[-2]
1✔
1144
            recordings[plabel] = recordings.get(plabel, []) + [meta_file]
1✔
1145
    # once we have all of the files
1146
    for k in recordings:
1✔
1147
        nrecs = len(recordings[k])
1✔
1148
        recordings[k].sort()
1✔
1149
        # the identifier of the overarching recording sequence is the hash of hashes of the files
1150
        m = hashlib.sha1()
1✔
1151
        for i, meta_file in enumerate(recordings[k]):
1✔
1152
            hash = hashfile.sha1(meta_file)
1✔
1153
            m.update(hash.encode())
1✔
1154
        # writes the sequence files
1155
        for i, meta_file in enumerate(recordings[k]):
1✔
1156
            sequence_file = meta_file.parent.joinpath(meta_file.name.replace('ap.meta', 'sequence.json'))
1✔
1157
            with open(sequence_file, 'w+') as fid:
1✔
1158
                json.dump(dict(sha1=m.hexdigest(), probe=k, index=i, nrecs=len(recordings[k]),
1✔
1159
                               files=list(map(str, recordings[k]))), fid)
1160
            log.info(f"{k}: {i}/{nrecs} written sequence file {recordings}")
1✔
1161
    return recordings
1✔
1162

1163

1164
class WindowsInhibitor:
1✔
1165
    """Prevent OS sleep/hibernate in windows; code from:
1✔
1166
    https://github.com/h3llrais3r/Deluge-PreventSuspendPlus/blob/master/preventsuspendplus/core.py
1167
    API documentation:
1168
    https://msdn.microsoft.com/en-us/library/windows/desktop/aa373208(v=vs.85).aspx"""
1169
    ES_CONTINUOUS = 0x80000000
1✔
1170
    ES_SYSTEM_REQUIRED = 0x00000001
1✔
1171

1172
    @staticmethod
1✔
1173
    def _set_thread_execution_state(state: int) -> None:
1✔
1174
        result = ctypes.windll.kernel32.SetThreadExecutionState(state)
×
1175
        if result == 0:
×
1176
            log.error("Failed to set thread execution state.")
×
1177

1178
    @staticmethod
1✔
1179
    def inhibit(quiet: bool = False):
1✔
1180
        if quiet:
×
1181
            log.debug("Preventing Windows from going to sleep")
×
1182
        else:
1183
            print("Preventing Windows from going to sleep")
×
1184
        WindowsInhibitor._set_thread_execution_state(WindowsInhibitor.ES_CONTINUOUS | WindowsInhibitor.ES_SYSTEM_REQUIRED)
×
1185

1186
    @staticmethod
1✔
1187
    def uninhibit(quiet: bool = False):
1✔
1188
        if quiet:
×
1189
            log.debug("Allowing Windows to go to sleep")
×
1190
        else:
1191
            print("Allowing Windows to go to sleep")
×
1192
        WindowsInhibitor._set_thread_execution_state(WindowsInhibitor.ES_CONTINUOUS)
×
1193

1194

1195
def sleepless(func: Callable[..., Any]) -> Callable[..., Any]:
1✔
1196
    """
1197
    Decorator to ensure that the system doesn't enter sleep or idle mode during a long-running task.
1198

1199
    This decorator wraps a function and sets the thread execution state to prevent
1200
    the system from entering sleep or idle mode while the decorated function is
1201
    running.
1202

1203
    Parameters
1204
    ----------
1205
    func : callable
1206
        The function to decorate.
1207

1208
    Returns
1209
    -------
1210
    callable
1211
        The decorated function.
1212
    """
1213

1214
    @wraps(func)
1✔
1215
    def inner(*args, **kwargs) -> Any:
1✔
1216
        if os.name == 'nt':
1✔
1217
            WindowsInhibitor().inhibit(quiet=True)
×
1218
        result = func(*args, **kwargs)
1✔
1219
        if os.name == 'nt':
1✔
1220
            WindowsInhibitor().uninhibit(quiet=True)
×
1221
        return result
1✔
1222
    return inner
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc