• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

int-brain-lab / ibllib / 1761696499260742

05 Oct 2023 09:46AM UTC coverage: 55.27% (-1.4%) from 56.628%
1761696499260742

Pull #655

continuous-integration/UCL

bimac
add @sleepless decorator
Pull Request #655: add @sleepless decorator

21 of 21 new or added lines in 1 file covered. (100.0%)

10330 of 18690 relevant lines covered (55.27%)

0.55 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

56.6
/ibllib/pipes/misc.py
1
import ctypes
1✔
2
import hashlib
1✔
3
import json
1✔
4
import os
1✔
5
import re
1✔
6
import shutil
1✔
7
import subprocess
1✔
8
import sys
1✔
9
import time
1✔
10
import logging
1✔
11
from pathlib import Path
1✔
12
from typing import Union, List, Callable, Any
1✔
13
from inspect import signature
1✔
14
import uuid
1✔
15
import socket
1✔
16
import traceback
1✔
17

18
import spikeglx
1✔
19
from iblutil.io import hashfile, params
1✔
20
from iblutil.util import range_str
1✔
21
from one.alf.files import get_session_path
1✔
22
from one.alf.spec import is_uuid_string, is_session_path, describe
1✔
23
from one.api import ONE
1✔
24

25
import ibllib.io.flags as flags
1✔
26
import ibllib.io.raw_data_loaders as raw
1✔
27
from ibllib.io.misc import delete_empty_folders
1✔
28
import ibllib.io.session_params as sess_params
1✔
29

30
log = logging.getLogger(__name__)
1✔
31

32
DEVICE_FLAG_MAP = {'neuropixel': 'ephys',
1✔
33
                   'cameras': 'video',
34
                   'widefield': 'widefield',
35
                   'sync': 'sync'}
36

37

38
def subjects_data_folder(folder: Path, rglob: bool = False) -> Path:
1✔
39
    """Given a root_data_folder will try to find a 'Subjects' data folder.
40
    If Subjects folder is passed will return it directly."""
41
    if not isinstance(folder, Path):
1✔
42
        folder = Path(folder)
1✔
43
    if rglob:
1✔
44
        func = folder.rglob
1✔
45
    else:
46
        func = folder.glob
×
47

48
    # Try to find Subjects folder one level
49
    if folder.name.lower() != 'subjects':
1✔
50
        # Try to find Subjects folder if folder.glob
51
        spath = [x for x in func('*') if x.name.lower() == 'subjects']
1✔
52
        if not spath:
1✔
53
            raise ValueError('No "Subjects" folder in children folders')
×
54
        elif len(spath) > 1:
1✔
55
            raise ValueError(f'Multiple "Subjects" folder in children folders: {spath}')
×
56
        else:
57
            folder = folder / spath[0]
1✔
58

59
    return folder
1✔
60

61

62
def cli_ask_default(prompt: str, default: str):
1✔
63
    """
64
    Prompt the user for input, display the default option and return user input or default
65
    :param prompt: String to display to user
66
    :param default: The default value to return if user doesn't enter anything
67
    :return: User input or default
68
    """
69
    return input(f'{prompt} [default: {default}]: ') or default
1✔
70

71

72
def cli_ask_options(prompt: str, options: list, default_idx: int = 0) -> str:
1✔
73
    parsed_options = [str(x) for x in options]
×
74
    if default_idx is not None:
×
75
        parsed_options[default_idx] = f"[{parsed_options[default_idx]}]"
×
76
    options_str = " (" + " | ".join(parsed_options) + ")> "
×
77
    ans = input(prompt + options_str) or str(options[default_idx])
×
78
    if ans not in [str(x) for x in options]:
×
79
        return cli_ask_options(prompt, options, default_idx=default_idx)
×
80
    return ans
×
81

82

83
def behavior_exists(session_path: str, include_devices=False) -> bool:
1✔
84
    """
85
    Returns True if the session has a task behaviour folder
86
    :param session_path:
87
    :return:
88
    """
89
    session_path = Path(session_path)
1✔
90
    if include_devices and session_path.joinpath("_devices").exists():
1✔
91
        return True
×
92
    if session_path.joinpath("raw_behavior_data").exists():
1✔
93
        return True
1✔
94
    return any(session_path.glob('raw_task_data_*'))
1✔
95

96

97
def check_transfer(src_session_path, dst_session_path):
1✔
98
    """
99
    Check all the files in the source directory match those in the destination directory. Function
100
    will throw assertion errors/exceptions if number of files do not match, file names do not
101
    match, or if file sizes do not match.
102

103
    :param src_session_path: The source directory that was copied
104
    :param dst_session_path: The copy target directory
105
    """
106
    src_files = sorted([x for x in Path(src_session_path).rglob('*') if x.is_file()])
1✔
107
    dst_files = sorted([x for x in Path(dst_session_path).rglob('*') if x.is_file()])
1✔
108
    assert len(src_files) == len(dst_files), 'Not all files transferred'
1✔
109
    for s, d in zip(src_files, dst_files):
1✔
110
        assert s.name == d.name, 'file name mismatch'
1✔
111
        assert s.stat().st_size == d.stat().st_size, 'file size mismatch'
1✔
112

113

114
def rename_session(session_path: str, new_subject=None, new_date=None, new_number=None,
1✔
115
                   ask: bool = False) -> Path:
116
    """Rename a session.  Prompts the user for the new subject name, data and number and then moves
117
    session path to new session path.
118

119
    :param session_path: A session path to rename
120
    :type session_path: str
121
    :param new_subject: A new subject name, if none provided, the user is prompted for one
122
    :param new_date: A new session date, if none provided, the user is prompted for one
123
    :param new_number: A new session number, if none provided, the user is prompted for one
124
    :param ask: used to ensure prompt input from user, defaults to False
125
    :type ask: bool
126
    :return: The renamed session path
127
    :rtype: Path
128
    """
129
    session_path = get_session_path(session_path)
1✔
130
    if session_path is None:
1✔
131
        raise ValueError('Session path not valid ALF session folder')
1✔
132
    mouse = session_path.parts[-3]
1✔
133
    date = session_path.parts[-2]
1✔
134
    sess = session_path.parts[-1]
1✔
135
    new_mouse = new_subject or mouse
1✔
136
    new_date = new_date or date
1✔
137
    new_sess = new_number or sess
1✔
138
    if ask:
1✔
139
        new_mouse = input(f"Please insert subject NAME [current value: {mouse}]> ")
1✔
140
        new_date = input(f"Please insert new session DATE [current value: {date}]> ")
1✔
141
        new_sess = input(f"Please insert new session NUMBER [current value: {sess}]> ")
1✔
142

143
    new_session_path = Path(*session_path.parts[:-3]).joinpath(new_mouse, new_date,
1✔
144
                                                               new_sess.zfill(3))
145
    assert is_session_path(new_session_path), 'invalid subject, date or number'
1✔
146

147
    if new_session_path.exists():
1✔
148
        ans = input(f'Warning: session path {new_session_path} already exists.\nWould you like to '
×
149
                    f'move {new_session_path} to a backup directory? [y/N] ')
150
        if (ans or 'n').lower() in ['n', 'no']:
×
151
            print(f'Manual intervention required, data exists in the following directory: '
×
152
                  f'{session_path}')
153
            return
×
154
        if backup_session(new_session_path):
×
155
            print(f'Backup was successful, removing directory {new_session_path}...')
×
156
            shutil.rmtree(str(new_session_path), ignore_errors=True)
×
157
    shutil.move(str(session_path), str(new_session_path))
1✔
158
    print(session_path, "--> renamed to:")
1✔
159
    print(new_session_path)
1✔
160

161
    return new_session_path
1✔
162

163

164
def backup_session(session_path):
1✔
165
    """Used to move the contents of a session to a backup folder, likely before the folder is
166
    removed.
167

168
    :param session_path: A session path to be backed up
169
    :return: True if directory was backed up or exits if something went wrong
170
    :rtype: Bool
171
    """
172
    bk_session_path = Path()
1✔
173
    if Path(session_path).exists():
1✔
174
        try:
1✔
175
            bk_session_path = Path(*session_path.parts[:-4]).joinpath(
1✔
176
                "Subjects_backup_renamed_sessions", Path(*session_path.parts[-3:]))
177
            Path(bk_session_path.parent).mkdir(parents=True)
1✔
178
            print(f"Created path: {bk_session_path.parent}")
1✔
179
            # shutil.copytree(session_path, bk_session_path, dirs_exist_ok=True)
180
            shutil.copytree(session_path, bk_session_path)  # python 3.7 compatibility
1✔
181
            print(f"Copied contents from {session_path} to {bk_session_path}")
1✔
182
            return True
1✔
183
        except FileExistsError:
1✔
184
            log.error(f"A backup session for the given path already exists: {bk_session_path}, "
1✔
185
                      f"manual intervention is necessary.")
186
            raise
1✔
187
        except shutil.Error:
×
188
            log.error(f'Some kind of copy error occurred when moving files from {session_path} to '
×
189
                      f'{bk_session_path}')
190
            log.error(shutil.Error)
×
191
    else:
192
        log.error(f"The given session path does not exist: {session_path}")
1✔
193
        return False
1✔
194

195

196
def copy_with_check(src, dst, **kwargs):
1✔
197
    dst = Path(dst)
×
198
    if dst.exists() and Path(src).stat().st_size == dst.stat().st_size:
×
199
        return dst
×
200
    elif dst.exists():
×
201
        dst.unlink()
×
202
    return shutil.copy2(src, dst, **kwargs)
×
203

204

205
def transfer_session_folders(local_sessions: list, remote_subject_folder, subfolder_to_transfer):
1✔
206
    """
207
    Used to determine which local session folders should be transferred to which remote session folders, will prompt the user
208
    when necessary.
209

210
    Parameters
211
    ----------
212
    local_sessions : list
213
        Required list of local session folder paths to sync to local server.
214
    remote_subject_folder : str, pathlib.Path
215
        The remote location of the subject folder (typically pulled from the params).
216
    subfolder_to_transfer : str
217
        Which subfolder to sync
218

219
    Returns
220
    -------
221
    list of tuples
222
        For each session, a tuple of (source, destination) of attempted file transfers.
223
    list of bool
224
        A boolean True/False for success/failure of the transfer.
225
    """
226
    transfer_list = []  # list of sessions to transfer
1✔
227
    skip_list = ""  # "list" of sessions to skip and the reason for the skip
1✔
228
    # Iterate through all local sessions in the given list
229
    for local_session in local_sessions:
1✔
230
        # Set expected remote_session location and perform simple error state checks
231
        remote_session = remote_subject_folder.joinpath(*local_session.parts[-3:])
1✔
232
        # Skip session if ...
233
        if subfolder_to_transfer:
1✔
234
            if not local_session.joinpath(subfolder_to_transfer).exists():
1✔
235
                msg = f"{local_session} - skipping session, no '{subfolder_to_transfer}' folder found locally"
×
236
                log.warning(msg)
×
237
                skip_list += msg + "\n"
×
238
                continue
×
239
        if not remote_session.parent.exists():
1✔
240
            msg = f"{local_session} - no matching remote session date folder found for the given local session"
1✔
241
            log.info(msg)
1✔
242
            skip_list += msg + "\n"
1✔
243
            continue
1✔
244
        if not behavior_exists(remote_session):
1✔
245
            msg = f"{local_session} - skipping session, no behavior data found in remote folder {remote_session}"
1✔
246
            log.warning(msg)
1✔
247
            skip_list += msg + "\n"
1✔
248
            continue
1✔
249

250
        # Determine if there are multiple session numbers from the date path
251
        local_sessions_for_date = get_session_numbers_from_date_path(local_session.parent)
1✔
252
        remote_sessions_for_date = get_session_numbers_from_date_path(remote_session.parent)
1✔
253
        remote_session_pick = None
1✔
254
        if len(local_sessions_for_date) > 1 or len(remote_sessions_for_date) > 1:
1✔
255
            # Format folder size output for end user to review
256
            local_session_numbers_with_size = remote_session_numbers_with_size = ""
1✔
257
            for lsfd in local_sessions_for_date:
1✔
258
                size_in_gb = round(get_directory_size(local_session.parent / lsfd, in_gb=True), 2)
1✔
259
                local_session_numbers_with_size += lsfd + " (" + str(size_in_gb) + " GB)\n"
1✔
260
            for rsfd in remote_sessions_for_date:
1✔
261
                size_in_gb = round(get_directory_size(remote_session.parent / rsfd, in_gb=True), 2)
1✔
262
                remote_session_numbers_with_size += rsfd + " (" + str(size_in_gb) + " GB)\n"
1✔
263
            log.info(f"\n\nThe following local session folder(s) were found on this acquisition PC:\n\n"
1✔
264
                     f"{''.join(local_session_numbers_with_size)}\nThe following remote session folder(s) were found on the "
265
                     f"server:\n\n{''.join(remote_session_numbers_with_size)}\n")
266

267
            def _remote_session_picker(sessions_for_date):
1✔
268
                resp = "s"
1✔
269
                resp_invalid = True
1✔
270
                while resp_invalid:  # loop until valid user input
1✔
271
                    resp = input(f"\n\n--- USER INPUT NEEDED ---\nWhich REMOTE session number would you like to transfer your "
1✔
272
                                 f"local session to? Options {range_str(map(int, sessions_for_date))} or "
273
                                 f"[s]kip/[h]elp/[e]xit> ").strip().lower()
274
                    if resp == "h":
1✔
275
                        print("An example session filepath:\n")
×
276
                        describe("number")  # Explain what a session number is
×
277
                        input("Press enter to continue")
×
278
                    elif resp == "s" or resp == "e":  # exit loop
1✔
279
                        resp_invalid = False
×
280
                    elif len(resp) <= 3:
1✔
281
                        resp_invalid = False if [i for i in sessions_for_date if int(resp) == int(i)] else None
1✔
282
                    else:
283
                        print("Invalid response. Please try again.")
×
284
                return resp
1✔
285

286
            log.info(f"Evaluation for local session "
1✔
287
                     f"{local_session.parts[-3]}/{local_session.parts[-2]}/{local_session.parts[-1]}...")
288
            user_response = _remote_session_picker(remote_sessions_for_date)
1✔
289
            if user_response == "s":
1✔
290
                msg = f"{local_session} - Local session skipped due to user input"
×
291
                log.info(msg)
×
292
                skip_list += msg + "\n"
×
293
                continue
×
294
            elif user_response == "e":
1✔
295
                log.info("Exiting, no files transferred.")
×
296
                return
×
297
            else:
298
                remote_session_pick = remote_session.parent / user_response.zfill(3)
1✔
299

300
        # Append to the transfer_list
301
        transfer_tuple = (local_session, remote_session_pick) if remote_session_pick else (local_session, remote_session)
1✔
302
        transfer_list.append(transfer_tuple)
1✔
303
        log.info(f"{transfer_tuple[0]}, {transfer_tuple[1]} - Added to the transfer list")
1✔
304

305
    # Verify that the number of local transfer_list entries match the number of remote transfer_list entries
306
    if len(transfer_list) != len(set(dst for _, dst in transfer_list)):
1✔
307
        raise RuntimeError(
×
308
            "An invalid combination of sessions were picked; the most likely cause of this error is multiple local "
309
            "sessions being selected for a single remote session. Please rerun the script."
310
        )
311

312
    # Call rsync/rdiff function for every entry in the transfer list
313
    success = []
1✔
314
    for src, dst in transfer_list:
1✔
315
        if subfolder_to_transfer:
1✔
316
            success.append(rsync_paths(src / subfolder_to_transfer, dst / subfolder_to_transfer))
1✔
317
        else:
318
            success.append(rsync_paths(src, dst))
×
319
        if not success[-1]:
1✔
320
            log.error("File transfer failed, check log for reason.")
1✔
321

322
    # Notification to user for any transfers were skipped
323
    log.warning(f"Video transfers that were not completed:\n\n{skip_list}") if skip_list else log.info("No transfers skipped.")
1✔
324
    return transfer_list, success
1✔
325

326

327
def transfer_folder(src: Path, dst: Path, force: bool = False) -> None:
1✔
328
    """functionality has been replaced by transfer_session_folders function"""
329
    print(f"Attempting to copy:\n{src}\n--> {dst}")
×
330
    if force:
×
331
        print(f"Removing {dst}")
×
332
        shutil.rmtree(dst, ignore_errors=True)
×
333
    else:
334
        try:
×
335
            check_transfer(src, dst)
×
336
            print("All files already copied, use force=True to re-copy")
×
337
            return
×
338
        except AssertionError:
×
339
            pass
×
340
    print(f"Copying all files:\n{src}\n--> {dst}")
×
341
    # rsync_folder(src, dst, '**transfer_me.flag')
342
    if sys.version_info.minor < 8:
×
343
        # dirs_exist_ok kwarg not supported in < 3.8
344
        shutil.rmtree(dst, ignore_errors=True)
×
345
        shutil.copytree(src, dst, copy_function=copy_with_check)
×
346
    else:
347
        shutil.copytree(src, dst, dirs_exist_ok=True, copy_function=copy_with_check)
×
348
    # If folder was created delete the src_flag_file
349
    if check_transfer(src, dst) is None:
×
350
        print("All files copied")
×
351
    # rdiff-backup --compare /tmp/tmpw9o1zgn0 /tmp/tmp82gg36rm
352
    # No changes found.  Directory matches archive data.
353

354

355
def load_params_dict(params_fname: str) -> dict:
1✔
356
    params_fpath = Path(params.getfile(params_fname))
×
357
    if not params_fpath.exists():
×
358
        return None
×
359
    with open(params_fpath, "r") as f:
×
360
        out = json.load(f)
×
361
    return out
×
362

363

364
def load_videopc_params():
1✔
365
    if not load_params_dict("videopc_params"):
×
366
        create_videopc_params()
×
367
    return load_params_dict("videopc_params")
×
368

369

370
def load_ephyspc_params():
1✔
371
    if not load_params_dict("ephyspc_params"):
×
372
        create_ephyspc_params()
×
373
    return load_params_dict("ephyspc_params")
×
374

375

376
def create_basic_transfer_params(param_str='transfer_params', local_data_path=None,
1✔
377
                                 remote_data_path=None, clobber=False, **kwargs):
378
    """Create some basic parameters common to all acquisition rigs.
379

380
    Namely prompt user for the local root data path and the remote (lab server) data path.
381
    NB: All params stored in uppercase by convention.
382

383
    Parameters
384
    ----------
385
    param_str : str
386
        The name of the parameters to load/save.
387
    local_data_path : str, pathlib.Path
388
        The local root data path, stored with the DATA_FOLDER_PATH key.  If None, user is prompted.
389
    remote_data_path : str, pathlib.Path, bool
390
        The local root data path, stored with the REMOTE_DATA_FOLDER_PATH key.  If None, user is prompted.
391
        If False, the REMOTE_DATA_PATH key is not updated or is set to False if clobber = True.
392
    clobber : bool
393
        If True, any parameters in existing parameter file not found as keyword args will be removed,
394
        otherwise the user is prompted for these also.
395
    **kwargs
396
        Extra parameters to set. If value is None, the user is prompted.
397

398
    Returns
399
    -------
400
    dict
401
        The parameters written to disc.
402

403
    Examples
404
    --------
405
    Set up basic transfer parameters for modality acquisition PC
406

407
    >>> par = create_basic_transfer_params()
408

409
    Set up basic transfer paramers without prompting the user
410

411
    >>> par = create_basic_transfer_params(
412
    ...     local_data_path='/iblrig_data/Subjects',
413
    ...     remote_data_path='/mnt/iblserver.champalimaud.pt/ibldata/Subjects')
414

415
    Prompt user for extra parameter using custom prompt (will call function with current default)
416

417
    >>> from functools import partial
418
    >>> par = create_basic_transfer_params(
419
    ...     custom_arg=partial(cli_ask_default, 'Please enter custom arg value'))
420

421
    Set up with no remote path (NB: if not the first time, use clobber=True to save param key)
422

423
    >>> par = create_basic_transfer_params(remote_data_path=False)
424

425
    """
426
    parameters = params.as_dict(params.read(param_str, {})) or {}
1✔
427
    if local_data_path is None:
1✔
428
        local_data_path = parameters.get('DATA_FOLDER_PATH')
1✔
429
        if not local_data_path or clobber:
1✔
430
            local_data_path = cli_ask_default("Where's your LOCAL 'Subjects' data folder?", local_data_path)
1✔
431
    parameters['DATA_FOLDER_PATH'] = local_data_path
1✔
432

433
    if remote_data_path is None:
1✔
434
        remote_data_path = parameters.get('REMOTE_DATA_FOLDER_PATH')
1✔
435
        if remote_data_path in (None, '') or clobber:
1✔
436
            remote_data_path = cli_ask_default("Where's your REMOTE 'Subjects' data folder?", remote_data_path)
1✔
437
    if remote_data_path is not False:
1✔
438
        parameters['REMOTE_DATA_FOLDER_PATH'] = remote_data_path
1✔
439
    elif 'REMOTE_DATA_FOLDER_PATH' not in parameters or clobber:
1✔
440
        parameters['REMOTE_DATA_FOLDER_PATH'] = False  # Always assume no remote path
1✔
441

442
    # Deal with extraneous parameters
443
    for k, v in kwargs.items():
1✔
444
        if callable(v):  # expect function handle with default value as input
1✔
445
            n_pars = len(signature(v).parameters)
1✔
446
            parameters[k.upper()] = v(parameters.get(k.upper())) if n_pars > 0 else v()
1✔
447
        elif v is None:  # generic prompt for key
1✔
448
            parameters[k.upper()] = cli_ask_default(
1✔
449
                f'Enter a value for parameter {k.upper()}', parameters.get(k.upper())
450
            )
451
        else:  # assign value to parameter
452
            parameters[k.upper()] = str(v)
1✔
453

454
    defined = list(map(str.upper, ('DATA_FOLDER_PATH', 'REMOTE_DATA_FOLDER_PATH', 'TRANSFER_LABEL', *kwargs.keys())))
1✔
455
    if clobber:
1✔
456
        # Delete any parameters in parameter dict that were not passed as keyword args into function
457
        parameters = {k: v for k, v in parameters.items() if k in defined}
1✔
458
    else:
459
        # Prompt for any other parameters that weren't passed into function
460
        for k in filter(lambda x: x not in defined, map(str.upper, parameters.keys())):
1✔
461
            parameters[k] = cli_ask_default(f'Enter a value for parameter {k}', parameters.get(k))
1✔
462

463
    if 'TRANSFER_LABEL' not in parameters:
1✔
464
        parameters['TRANSFER_LABEL'] = f'{socket.gethostname()}_{uuid.getnode()}'
1✔
465

466
    # Write parameters
467
    params.write(param_str, parameters)
1✔
468
    return parameters
1✔
469

470

471
def create_videopc_params(force=False, silent=False):
1✔
472
    if Path(params.getfile("videopc_params")).exists() and not force:
×
473
        print(f"{params.getfile('videopc_params')} exists already, exiting...")
×
474
        print(Path(params.getfile("videopc_params")).exists())
×
475
        return
×
476
    if silent:
×
477
        data_folder_path = r"D:\iblrig_data\Subjects"
×
478
        remote_data_folder_path = r"\\iblserver.champalimaud.pt\ibldata\Subjects"
×
479
        body_cam_idx = 0
×
480
        left_cam_idx = 1
×
481
        right_cam_idx = 2
×
482
    else:
483
        data_folder_path = cli_ask_default(
×
484
            r"Where's your LOCAL 'Subjects' data folder?", r"D:\iblrig_data\Subjects"
485
        )
486
        remote_data_folder_path = cli_ask_default(
×
487
            r"Where's your REMOTE 'Subjects' data folder?",
488
            r"\\iblserver.champalimaud.pt\ibldata\Subjects",
489
        )
490
        body_cam_idx = cli_ask_default("Please select the index of the BODY camera", "0")
×
491
        left_cam_idx = cli_ask_default("Please select the index of the LEFT camera", "1")
×
492
        right_cam_idx = cli_ask_default("Please select the index of the RIGHT camera", "2")
×
493

494
    param_dict = {
×
495
        "DATA_FOLDER_PATH": data_folder_path,
496
        "REMOTE_DATA_FOLDER_PATH": remote_data_folder_path,
497
        "BODY_CAM_IDX": body_cam_idx,
498
        "LEFT_CAM_IDX": left_cam_idx,
499
        "RIGHT_CAM_IDX": right_cam_idx,
500
    }
501
    params.write("videopc_params", param_dict)
×
502
    print(f"Created {params.getfile('videopc_params')}")
×
503
    print(param_dict)
×
504
    return param_dict
×
505

506

507
def create_ephyspc_params(force=False, silent=False):
1✔
508
    if Path(params.getfile("ephyspc_params")).exists() and not force:
×
509
        print(f"{params.getfile('ephyspc_params')} exists already, exiting...")
×
510
        print(Path(params.getfile("ephyspc_params")).exists())
×
511
        return
×
512
    if silent:
×
513
        data_folder_path = r"D:\iblrig_data\Subjects"
×
514
        remote_data_folder_path = r"\\iblserver.champalimaud.pt\ibldata\Subjects"
×
515
        probe_types = {"PROBE_TYPE_00": "3A", "PROBE_TYPE_01": "3B"}
×
516
    else:
517
        data_folder_path = cli_ask_default(
×
518
            r"Where's your LOCAL 'Subjects' data folder?", r"D:\iblrig_data\Subjects"
519
        )
520
        remote_data_folder_path = cli_ask_default(
×
521
            r"Where's your REMOTE 'Subjects' data folder?",
522
            r"\\iblserver.champalimaud.pt\ibldata\Subjects",
523
        )
524
        n_probes = int(cli_ask_default("How many probes are you using?", '2'))
×
525
        assert 100 > n_probes > 0, 'Please enter number between 1, 99 inclusive'
×
526
        probe_types = {}
×
527
        for i in range(n_probes):
×
528
            probe_types[f'PROBE_TYPE_{i:02}'] = cli_ask_options(
×
529
                f"What's the type of PROBE {i:02}?", ["3A", "3B"])
530
    param_dict = {
×
531
        "DATA_FOLDER_PATH": data_folder_path,
532
        "REMOTE_DATA_FOLDER_PATH": remote_data_folder_path,
533
        **probe_types
534
    }
535
    params.write("ephyspc_params", param_dict)
×
536
    print(f"Created {params.getfile('ephyspc_params')}")
×
537
    print(param_dict)
×
538
    return param_dict
×
539

540

541
def rdiff_install() -> bool:
1✔
542
    """
543
    For windows:
544
    * if the rdiff-backup executable does not already exist on the system
545
      * downloads rdiff-backup zip file
546
      * copies the executable to the C:\tools folder
547

548
    For linux/mac:
549
    * runs a pip install rdiff-backup
550

551
    Returns:
552
        True when install is successful, False when an error is encountered
553
    """
554
    if os.name == "nt":
1✔
555
        # ensure tools folder exists
556
        tools_folder = "C:\\tools\\"
×
557
        os.mkdir(tools_folder) if not Path(tools_folder).exists() else None
×
558

559
        rdiff_cmd_loc = tools_folder + "rdiff-backup.exe"
×
560
        if not Path(rdiff_cmd_loc).exists():
×
561
            import requests
×
562
            import zipfile
×
563
            from io import BytesIO
×
564

565
            url = "https://github.com/rdiff-backup/rdiff-backup/releases/download/v2.0.5/rdiff-backup-2.0.5.win32exe.zip"
×
566
            log.info("Downloading zip file for rdiff-backup.")
×
567
            # Download the file by sending the request to the URL, ensure success by status code
568
            if requests.get(url).status_code == 200:
×
569
                log.info("Download complete for rdiff-backup zip file.")
×
570
                # extracting the zip file contents
571
                zipfile = zipfile.ZipFile(BytesIO(requests.get(url).content))
×
572
                zipfile.extractall("C:\\Temp")
×
573
                rdiff_folder_name = zipfile.namelist()[0]  # attempting a bit of future-proofing
×
574
                # move the executable to the C:\tools folder
575
                shutil.copy("C:\\Temp\\" + rdiff_folder_name + "rdiff-backup.exe", rdiff_cmd_loc)
×
576
                shutil.rmtree("C:\\Temp\\" + rdiff_folder_name)  # cleanup temp folder
×
577
                try:  # attempt to call the rdiff command
×
578
                    subprocess.run([rdiff_cmd_loc, "--version"], check=True)
×
579
                except (FileNotFoundError, subprocess.CalledProcessError) as e:
×
580
                    log.error("rdiff-backup installation did not complete.\n", e)
×
581
                    return False
×
582
                return True
×
583
            else:
584
                log.error("Download request status code not 200, something did not go as expected.")
×
585
                return False
×
586
    else:  # anything not Windows
587
        try:  # package should not be installed via the requirements.txt to accommodate windows
1✔
588
            subprocess.run(["pip", "install", "rdiff-backup"], check=True)
1✔
589
        except subprocess.CalledProcessError as e:
×
590
            log.error("rdiff-backup pip install did not complete.\n", e)
×
591
            return False
×
592
        return True
1✔
593

594

595
def get_directory_size(dir_path: Path, in_gb=False) -> float:
1✔
596
    """
597
    Used to determine total size of all files in a given session_path, including all child directories
598

599
    Args:
600
        dir_path (Path): path we want to get the total size of
601
        in_gb (bool): set to True for returned value to be in gigabytes
602

603
    Returns:
604
        float: sum of all files in the given directory path (in bytes by default, in GB if specified)
605
    """
606
    total = 0
1✔
607
    with iter(os.scandir(dir_path)) as it:
1✔
608
        for entry in it:
1✔
609
            if entry.is_file():
1✔
610
                total += entry.stat().st_size
1✔
611
            elif entry.is_dir():
1✔
612
                total += get_directory_size(entry.path)
1✔
613
    if in_gb:
1✔
614
        return total / 1024 / 1024 / 1024  # in GB
1✔
615
    return total  # in bytes
1✔
616

617

618
def get_session_numbers_from_date_path(date_path: Path) -> list:
1✔
619
    """
620
    Retrieves session numbers when given a date path
621

622
    Args:
623
        date_path (Path): path to date, i.e. \\\\server\\some_lab\\Subjects\\Date"
624

625
    Returns:
626
        (list): Found sessions as a sorted list
627
    """
628
    contents = Path(date_path).glob('*')
1✔
629
    folders = filter(lambda x: x.is_dir() and re.match(r'^\d{3}$', x.name), contents)
1✔
630
    sessions_as_set = set(map(lambda x: x.name, folders))
1✔
631
    sessions_as_sorted_list = sorted(sessions_as_set)
1✔
632
    return sessions_as_sorted_list
1✔
633

634

635
def rsync_paths(src: Path, dst: Path) -> bool:
1✔
636
    """
637
    Used to run the rsync algorithm via a rdiff-backup command on the paths contained on the provided source and destination.
638
    This function relies on the rdiff-backup package and is run from the command line, i.e. subprocess.run(). Full documentation
639
    can be found here - https://rdiff-backup.net/docs/rdiff-backup.1.html
640

641
    Parameters
642
    ----------
643
    src : Path
644
        source path that contains data to be transferred
645
    dst : Path
646
        destination path that will receive the transferred data
647

648
    Returns
649
    -------
650
    bool
651
        True for success, False for failure
652

653
    Raises
654
    ------
655
    FileNotFoundError, subprocess.CalledProcessError
656
    """
657
    # Set rdiff_cmd_loc based on OS type (assuming C:\tools is not in Windows PATH environ)
658
    rdiff_cmd_loc = "C:\\tools\\rdiff-backup.exe" if os.name == "nt" else "rdiff-backup"
1✔
659
    try:  # Check if rdiff-backup command is available
1✔
660
        subprocess.run([rdiff_cmd_loc, "--version"], check=True)
1✔
661
    except (FileNotFoundError, subprocess.CalledProcessError) as e:
1✔
662
        if not rdiff_install():  # Attempt to install rdiff
1✔
663
            log.error("rdiff-backup command is unavailable, transfers can not continue.\n", e)
×
664
            raise
×
665

666
    log.info("Attempting to transfer data: " + str(src) + " -> " + str(dst))
1✔
667
    WindowsInhibitor().inhibit() if os.name == "nt" else None  # prevent Windows from going to sleep
1✔
668
    try:
1✔
669
        rsync_command = [rdiff_cmd_loc, "--verbosity", str(0),
1✔
670
                         "--create-full-path", "--backup-mode", "--no-acls", "--no-eas",
671
                         "--no-file-statistics", "--exclude", "**transfer_me.flag",
672
                         str(src), str(dst)]
673
        subprocess.run(rsync_command, check=True)
1✔
674
        time.sleep(1)  # give rdiff-backup a second to complete all logging operations
×
675
    except (FileNotFoundError, subprocess.CalledProcessError) as e:
1✔
676
        log.error("Transfer failed with code %i.\n", e.returncode)
1✔
677
        if e.stderr:
1✔
678
            log.error(e.stderr)
×
679
        return False
1✔
680
    log.info("Validating transfer completed...")
×
681
    try:  # Validate the transfers succeeded
×
682
        rsync_validate = [rdiff_cmd_loc, "--verify", str(dst)]
×
683
        subprocess.run(rsync_validate, check=True)
×
684
    except (FileNotFoundError, subprocess.CalledProcessError) as e:
×
685
        log.error(f"Validation for destination {dst} failed.\n", e)
×
686
        return False
×
687
    log.info("Cleaning up rdiff files...")
×
688
    shutil.rmtree(dst / "rdiff-backup-data")
×
689
    WindowsInhibitor().uninhibit() if os.name == 'nt' else None  # allow Windows to go to sleep
×
690
    return True
×
691

692

693
def confirm_ephys_remote_folder(local_folder=False, remote_folder=False, force=False, iblscripts_folder=False,
1✔
694
                                session_path=None):
695
    """
696
    :param local_folder: The full path to the local Subjects folder
697
    :param remote_folder:  the full path to the remote Subjects folder
698
    :param force:
699
    :param iblscripts_folder:
700
    :return:
701
    """
702
    # FIXME: session_path can be relative
703
    pars = load_ephyspc_params()
×
704
    if not iblscripts_folder:
×
705
        import deploy
×
706
        iblscripts_folder = Path(deploy.__file__).parent.parent
×
707
    if not local_folder:
×
708
        local_folder = pars["DATA_FOLDER_PATH"]
×
709
    if not remote_folder:
×
710
        remote_folder = pars["REMOTE_DATA_FOLDER_PATH"]
×
711
    local_folder = Path(local_folder)
×
712
    remote_folder = Path(remote_folder)
×
713
    # Check for Subjects folder
714
    local_folder = subjects_data_folder(local_folder, rglob=True)
×
715
    remote_folder = subjects_data_folder(remote_folder, rglob=True)
×
716

717
    log.info(f"local folder: {local_folder}")
×
718
    log.info(f"remote folder: {remote_folder}")
×
719
    if session_path is None:
×
720
        src_session_paths = [x.parent for x in local_folder.rglob("transfer_me.flag")]
×
721
    else:
722
        src_session_paths = session_path if isinstance(session_path, list) else [session_path]
×
723

724
    if not src_session_paths:
×
725
        log.info("Nothing to transfer, exiting...")
×
726
        return
×
727
    for session_path in src_session_paths:
×
728
        log.info(f"Found : {session_path}")
×
729
    log.info(f"Found: {len(src_session_paths)} sessions to transfer, starting transferring now")
×
730

731
    for session_path in src_session_paths:
×
732
        log.info(f"Transferring session: {session_path}")
×
733
        # Rename ephys files
734
        # FIXME: if transfer has failed and wiring file is there renaming will fail!
735
        rename_ephys_files(str(session_path))
×
736
        # Move ephys files
737
        move_ephys_files(str(session_path))
×
738
        # Copy wiring files
739
        copy_wiring_files(str(session_path), iblscripts_folder)
×
740
        try:
×
741
            create_alyx_probe_insertions(str(session_path))
×
742
        except BaseException:
×
743
            log.error(traceback.print_exc())
×
744
            log.info("Probe creation failed, please create the probe insertions manually. Continuing transfer...")
×
745
        msg = f"Transfer {session_path }to {remote_folder} with the same name?"
×
746
        resp = input(msg + "\n[y]es/[r]ename/[s]kip/[e]xit\n ^\n> ") or "y"
×
747
        resp = resp.lower()
×
748
        log.info(resp)
×
749
        if resp not in ["y", "r", "s", "e", "yes", "rename", "skip", "exit"]:
×
750
            return confirm_ephys_remote_folder(
×
751
                local_folder=local_folder,
752
                remote_folder=remote_folder,
753
                force=force,
754
                iblscripts_folder=iblscripts_folder,
755
            )
756
        elif resp == "y" or resp == "yes":
×
757
            pass
×
758
        elif resp == "r" or resp == "rename":
×
759
            session_path = rename_session(session_path)
×
760
            if not session_path:
×
761
                continue
×
762
        elif resp == "s" or resp == "skip":
×
763
            continue
×
764
        elif resp == "e" or resp == "exit":
×
765
            return
×
766

767
        remote_session_path = remote_folder / Path(*session_path.parts[-3:])
×
768
        if not behavior_exists(remote_session_path, include_devices=True):
×
769
            log.error(f"No behavior folder found in {remote_session_path}: skipping session...")
×
770
            return
×
771
        # TODO: Check flagfiles on src.and dst + alf dir in session folder then remove
772
        # Try catch? wher catch condition is force transfer maybe
773
        transfer_folder(session_path / "raw_ephys_data", remote_session_path / "raw_ephys_data", force=force)
×
774
        # if behavior extract_me.flag exists remove it, because of ephys flag
775
        flag_file = session_path / "transfer_me.flag"
×
776
        if flag_file.exists():  # this file only exists for the iblrig v7 and lower
×
777
            flag_file.unlink()
×
778
            if (remote_session_path / "extract_me.flag").exists():
×
779
                (remote_session_path / "extract_me.flag").unlink()
×
780
            # Create remote flags
781
            create_ephys_transfer_done_flag(remote_session_path)
×
782
            check_create_raw_session_flag(remote_session_path)
×
783

784

785
def probe_labels_from_session_path(session_path: Union[str, Path]) -> List[str]:
1✔
786
    """
787
    Finds ephys probes according to the metadata spikeglx files. Only returns first subfolder
788
    name under raw_ephys_data folder, ie. raw_ephys_data/probe00/copy_of_probe00 won't be returned
789
    If there is a NP2.4 probe with several shanks, create several probes
790
    :param session_path:
791
    :return: list of strings
792
    """
793
    plabels = []
1✔
794
    raw_ephys_folder = Path(session_path).joinpath('raw_ephys_data')
1✔
795
    for meta_file in raw_ephys_folder.rglob('*.ap.meta'):
1✔
796
        if meta_file.parents[1] != raw_ephys_folder:
1✔
797
            continue
1✔
798
        meta = spikeglx.read_meta_data(meta_file)
1✔
799
        nshanks = spikeglx._get_nshanks_from_meta(meta)
1✔
800
        if nshanks > 1:
1✔
801
            for i in range(nshanks):
1✔
802
                plabels.append(meta_file.parts[-2] + 'abcdefghij'[i])
1✔
803
        else:
804
            plabels.append(meta_file.parts[-2])
1✔
805
    plabels.sort()
1✔
806
    return plabels
1✔
807

808

809
def create_alyx_probe_insertions(
1✔
810
    session_path: str,
811
    force: bool = False,
812
    one: object = None,
813
    model: str = None,
814
    labels: list = None,
815
):
816
    if one is None:
1✔
817
        one = ONE(cache_rest=None, mode='local')
×
818
    eid = session_path if is_uuid_string(session_path) else one.path2eid(session_path)
1✔
819
    if eid is None:
1✔
820
        log.warning("Session not found on Alyx: please create session before creating insertions")
×
821
    if model is None:
1✔
822
        probe_model = spikeglx.get_neuropixel_version_from_folder(session_path)
×
823
        pmodel = "3B2" if probe_model == "3B" else probe_model
×
824
    else:
825
        pmodel = model
1✔
826
    labels = labels or probe_labels_from_session_path(session_path)
1✔
827
    # create the qc fields in the json field
828
    qc_dict = {}
1✔
829
    qc_dict.update({"qc": "NOT_SET"})
1✔
830
    qc_dict.update({"extended_qc": {}})
1✔
831

832
    # create the dictionary
833
    insertions = []
1✔
834
    for plabel in labels:
1✔
835
        insdict = {"session": eid, "name": plabel, "model": pmodel, "json": qc_dict}
1✔
836
        # search for the corresponding insertion in Alyx
837
        alyx_insertion = one.alyx.get(f'/insertions?&session={eid}&name={plabel}', clobber=True)
1✔
838
        # if it doesn't exist, create it
839
        if len(alyx_insertion) == 0:
×
840
            alyx_insertion = one.alyx.rest("insertions", "create", data=insdict)
×
841
        else:
842
            iid = alyx_insertion[0]["id"]
×
843
            if force:
×
844
                alyx_insertion = one.alyx.rest("insertions", "update", id=iid, data=insdict)
×
845
            else:
846
                alyx_insertion = alyx_insertion[0]
×
847
        insertions.append(alyx_insertion)
×
848
    return insertions
×
849

850

851
def create_ephys_flags(session_folder: str):
1✔
852
    """
853
    Create flags for processing an ephys session.  Should be called after move_ephys_files
854
    :param session_folder: A path to an ephys session
855
    :return:
856
    """
857
    session_path = Path(session_folder)
1✔
858
    flags.write_flag_file(session_path.joinpath("extract_ephys.flag"))
1✔
859
    flags.write_flag_file(session_path.joinpath("raw_ephys_qc.flag"))
1✔
860
    for probe_path in session_path.joinpath('raw_ephys_data').glob('probe*'):
1✔
861
        flags.write_flag_file(probe_path.joinpath("spike_sorting.flag"))
1✔
862

863

864
def create_ephys_transfer_done_flag(session_folder: str) -> None:
1✔
865
    session_path = Path(session_folder)
1✔
866
    flags.write_flag_file(session_path.joinpath("ephys_data_transferred.flag"))
1✔
867

868

869
def create_video_transfer_done_flag(session_folder: str) -> None:
1✔
870
    session_path = Path(session_folder)
1✔
871
    flags.write_flag_file(session_path.joinpath("video_data_transferred.flag"))
1✔
872

873

874
def create_transfer_done_flag(session_folder: str, flag_name: str) -> None:
1✔
875
    session_path = Path(session_folder)
×
876
    flags.write_flag_file(session_path.joinpath(f"{flag_name}_data_transferred.flag"))
×
877

878

879
def check_create_raw_session_flag(session_folder: str) -> None:
1✔
880
    session_path = Path(session_folder)
1✔
881

882
    # if we have an experiment description file read in whether we expect video, ephys widefield etc, don't do it just based
883
    # on the task protocol
884
    experiment_description = sess_params.read_params(session_path)
1✔
885

886
    def check_status(expected, flag):
1✔
887
        if expected is not False and flag.exists():
1✔
888
            return True
1✔
889
        if expected is False and not flag.exists():
1✔
890
            return True
×
891
        else:
892
            return False
1✔
893

894
    if experiment_description is not None:
1✔
895

896
        if any(session_path.joinpath('_devices').glob('*')):
1✔
897
            return
×
898

899
        # Find the devices in the experiment description file
900
        devices = list()
1✔
901
        for key in DEVICE_FLAG_MAP.keys():
1✔
902
            if experiment_description.get('devices', {}).get(key, None) is not None:
1✔
903
                devices.append(key)
1✔
904
        # In case of widefield the sync also needs to be in it's own folder
905
        if 'widefield' in devices:
1✔
906
            devices.append('sync')
1✔
907

908
        expected_flags = [session_path.joinpath(f'{DEVICE_FLAG_MAP[dev]}_data_transferred.flag') for dev in devices]
1✔
909

910
        expected = []
1✔
911
        flag_files = []
1✔
912
        for dev, fl in zip(devices, expected_flags):
1✔
913
            status = check_status(dev, fl)
1✔
914
            if status:
1✔
915
                flag_files.append(fl)
1✔
916
            expected.append(status)
1✔
917

918
        # In this case all the copying has completed
919
        if all(expected):
1✔
920
            # make raw session flag
921
            flags.write_flag_file(session_path.joinpath("raw_session.flag"))
1✔
922
            # and unlink individual copy flags
923
            for fl in flag_files:
1✔
924
                fl.unlink()
1✔
925

926
        return
1✔
927

928
    ephys = session_path.joinpath("ephys_data_transferred.flag")
1✔
929
    video = session_path.joinpath("video_data_transferred.flag")
1✔
930

931
    sett = raw.load_settings(session_path)
1✔
932
    if sett is None:
1✔
933
        log.error(f"No flag created for {session_path}")
×
934
        return
×
935

936
    is_biased = True if "biased" in sett["PYBPOD_PROTOCOL"] else False
1✔
937
    is_training = True if "training" in sett["PYBPOD_PROTOCOL"] else False
1✔
938
    is_habituation = True if "habituation" in sett["PYBPOD_PROTOCOL"] else False
1✔
939
    if video.exists() and (is_biased or is_training or is_habituation):
1✔
940
        flags.write_flag_file(session_path.joinpath("raw_session.flag"))
1✔
941
        video.unlink()
1✔
942
    if video.exists() and ephys.exists():
1✔
943
        flags.write_flag_file(session_path.joinpath("raw_session.flag"))
1✔
944
        ephys.unlink()
1✔
945
        video.unlink()
1✔
946

947

948
def rename_ephys_files(session_folder: str) -> None:
1✔
949
    """rename_ephys_files is system agnostic (3A, 3B1, 3B2).
950
    Renames all ephys files to Alyx compatible filenames. Uses get_new_filename.
951

952
    :param session_folder: Session folder path
953
    :type session_folder: str
954
    :return: None - Changes names of files on filesystem
955
    :rtype: None
956
    """
957
    session_path = Path(session_folder)
1✔
958
    ap_files = session_path.rglob("*.ap.*")
1✔
959
    lf_files = session_path.rglob("*.lf.*")
1✔
960
    nidq_files = session_path.rglob("*.nidq.*")
1✔
961

962
    for apf in ap_files:
1✔
963
        new_filename = get_new_filename(apf.name)
1✔
964
        shutil.move(str(apf), str(apf.parent / new_filename))
1✔
965

966
    for lff in lf_files:
1✔
967
        new_filename = get_new_filename(lff.name)
1✔
968
        shutil.move(str(lff), str(lff.parent / new_filename))
1✔
969

970
    for nidqf in nidq_files:
1✔
971
        # Ignore wiring files: these are usually created after the file renaming however this
972
        # function may be called a second time upon failed transfer.
973
        if 'wiring' in nidqf.name:
1✔
974
            continue
×
975
        new_filename = get_new_filename(nidqf.name)
1✔
976
        shutil.move(str(nidqf), str(nidqf.parent / new_filename))
1✔
977

978

979
def get_new_filename(filename: str) -> str:
1✔
980
    """get_new_filename is system agnostic (3A, 3B1, 3B2).
981
    Gets an alyx compatible filename from any spikeglx ephys file.
982

983
    :param filename: Name of an ephys file
984
    :return: New name for ephys file
985
    """
986
    root = "_spikeglx_ephysData"
1✔
987
    parts = filename.split('.')
1✔
988
    if len(parts) < 3:
1✔
989
        raise ValueError(fr'unrecognized filename "{filename}"')
1✔
990
    pattern = r'.*(?P<gt>_g\d+_t\d+)'
1✔
991
    if not (match := re.match(pattern, parts[0])):
1✔
992
        raise ValueError(fr'unrecognized filename "{filename}"')
1✔
993
    return '.'.join([root + match.group(1), *parts[1:]])
1✔
994

995

996
def move_ephys_files(session_folder: str) -> None:
1✔
997
    """move_ephys_files is system agnostic (3A, 3B1, 3B2).
998
    Moves all properly named ephys files to appropriate locations for transfer.
999
    Use rename_ephys_files function before this one.
1000

1001
    :param session_folder: Session folder path
1002
    :type session_folder: str
1003
    :return: None - Moves files on filesystem
1004
    :rtype: None
1005
    """
1006
    session_path = Path(session_folder)
1✔
1007
    raw_ephys_data_path = session_path / "raw_ephys_data"
1✔
1008

1009
    imec_files = session_path.rglob("*.imec*")
1✔
1010
    for imf in imec_files:
1✔
1011
        # For 3B system probe0x == imecx
1012
        probe_number = re.match(r'_spikeglx_ephysData_g\d_t\d.imec(\d+).*', imf.name)
1✔
1013
        if not probe_number:
1✔
1014
            # For 3A system imec files must be in a 'probexx' folder
1015
            probe_label = re.search(r'probe\d+', str(imf))
1✔
1016
            assert probe_label, f'Cannot assign probe number to file {imf}'
1✔
1017
            probe_label = probe_label.group()
1✔
1018
        else:
1019
            probe_number, = probe_number.groups()
1✔
1020
            probe_label = f'probe{probe_number.zfill(2)}'
1✔
1021
        raw_ephys_data_path.joinpath(probe_label).mkdir(exist_ok=True)
1✔
1022
        shutil.move(imf, raw_ephys_data_path.joinpath(probe_label, imf.name))
1✔
1023

1024
    # NIDAq files (3B system only)
1025
    nidq_files = session_path.rglob("*.nidq.*")
1✔
1026
    for nidqf in nidq_files:
1✔
1027
        shutil.move(str(nidqf), str(raw_ephys_data_path / nidqf.name))
1✔
1028
    # Delete all empty folders recursively
1029
    delete_empty_folders(raw_ephys_data_path, dry=False, recursive=True)
1✔
1030

1031

1032
def create_custom_ephys_wirings(iblscripts_folder: str):
1✔
1033
    iblscripts_path = Path(iblscripts_folder)
×
1034
    PARAMS = load_ephyspc_params()
×
1035
    probe_set = set(v for k, v in PARAMS.items() if k.startswith('PROBE_TYPE'))
×
1036

1037
    params_path = iblscripts_path.parent / "iblscripts_params"
×
1038
    params_path.mkdir(parents=True, exist_ok=True)
×
1039
    wirings_path = iblscripts_path / "deploy" / "ephyspc" / "wirings"
×
1040
    for k, v in PARAMS.items():
×
1041
        if not k.startswith('PROBE_TYPE_'):
×
1042
            continue
×
1043
        probe_label = f'probe{k[-2:]}'
×
1044
        if v not in ('3A', '3B'):
×
1045
            raise ValueError(f'Unsupported probe type "{v}"')
×
1046
        shutil.copy(
×
1047
            wirings_path / f"{v}.wiring.json", params_path / f"{v}_{probe_label}.wiring.json"
1048
        )
1049
        print(f"Created {v}.wiring.json in {params_path} for {probe_label}")
×
1050
    if "3B" in probe_set:
×
1051
        shutil.copy(wirings_path / "nidq.wiring.json", params_path / "nidq.wiring.json")
×
1052
        print(f"Created nidq.wiring.json in {params_path}")
×
1053
    print(f"\nYou can now modify your wiring files from folder {params_path}")
×
1054

1055

1056
def get_iblscripts_folder():
1✔
1057
    return str(Path().cwd().parent.parent)
×
1058

1059

1060
def copy_wiring_files(session_folder, iblscripts_folder):
1✔
1061
    """Run after moving files to probe folders"""
1062
    PARAMS = load_ephyspc_params()
×
1063
    if PARAMS["PROBE_TYPE_00"] != PARAMS["PROBE_TYPE_01"]:
×
1064
        print("Having different probe types is not supported")
×
1065
        raise NotImplementedError()
×
1066
    session_path = Path(session_folder)
×
1067
    iblscripts_path = Path(iblscripts_folder)
×
1068
    iblscripts_params_path = iblscripts_path.parent / "iblscripts_params"
×
1069
    wirings_path = iblscripts_path / "deploy" / "ephyspc" / "wirings"
×
1070
    termination = '.wiring.json'
×
1071
    # Determine system
1072
    ephys_system = PARAMS["PROBE_TYPE_00"]
×
1073
    # Define where to get the files from (determine if custom wiring applies)
1074
    src_wiring_path = iblscripts_params_path if iblscripts_params_path.exists() else wirings_path
×
1075
    probe_wiring_file_path = src_wiring_path / f"{ephys_system}{termination}"
×
1076

1077
    if ephys_system == "3B":
×
1078
        # Copy nidq file
1079
        nidq_files = session_path.rglob("*.nidq.bin")
×
1080
        for nidqf in nidq_files:
×
1081
            nidq_wiring_name = ".".join(str(nidqf.name).split(".")[:-1]) + termination
×
1082
            shutil.copy(
×
1083
                str(src_wiring_path / f"nidq{termination}"),
1084
                str(session_path / "raw_ephys_data" / nidq_wiring_name),
1085
            )
1086
    # If system is either (3A OR 3B) copy a wiring file for each ap.bin file
1087
    for binf in session_path.rglob("*.ap.bin"):
×
1088
        probe_label = re.search(r'probe\d+', str(binf))
×
1089
        if probe_label:
×
1090
            wiring_name = ".".join(str(binf.name).split(".")[:-2]) + termination
×
1091
            dst_path = session_path / "raw_ephys_data" / probe_label.group() / wiring_name
×
1092
            shutil.copy(probe_wiring_file_path, dst_path)
×
1093

1094

1095
def multi_parts_flags_creation(root_paths: Union[list, str, Path]) -> List[Path]:
1✔
1096
    """
1097
    Creates the sequence files to run spike sorting in batches
1098
    A sequence file is a json file with the following fields:
1099
     sha1: a unique hash of the metafiles involved
1100
     probe: a string with the probe name
1101
     index: the index within the sequence
1102
     nrecs: the length of the sequence
1103
     files: a list of files
1104
    :param root_paths:
1105
    :return:
1106
    """
1107
    from one.alf import io as alfio
1✔
1108
    # "001/raw_ephys_data/probe00/_spikeglx_ephysData_g0_t0.imec0.ap.meta",
1109
    if isinstance(root_paths, str) or isinstance(root_paths, Path):
1✔
1110
        root_paths = [root_paths]
1✔
1111
    recordings = {}
1✔
1112
    for root_path in root_paths:
1✔
1113
        for meta_file in root_path.rglob("*.ap.meta"):
1✔
1114
            # we want to make sure that the file is just under session_path/raw_ephys_data/{probe_label}
1115
            session_path = alfio.files.get_session_path(meta_file)
1✔
1116
            raw_ephys_path = session_path.joinpath('raw_ephys_data')
1✔
1117
            if meta_file.parents[1] != raw_ephys_path:
1✔
1118
                log.warning(f"{meta_file} is not in a probe directory and will be skipped")
×
1119
                continue
×
1120
            # stack the meta-file in the probe label key of the recordings dictionary
1121
            plabel = meta_file.parts[-2]
1✔
1122
            recordings[plabel] = recordings.get(plabel, []) + [meta_file]
1✔
1123
    # once we have all of the files
1124
    for k in recordings:
1✔
1125
        nrecs = len(recordings[k])
1✔
1126
        recordings[k].sort()
1✔
1127
        # the identifier of the overarching recording sequence is the hash of hashes of the files
1128
        m = hashlib.sha1()
1✔
1129
        for i, meta_file in enumerate(recordings[k]):
1✔
1130
            hash = hashfile.sha1(meta_file)
1✔
1131
            m.update(hash.encode())
1✔
1132
        # writes the sequence files
1133
        for i, meta_file in enumerate(recordings[k]):
1✔
1134
            sequence_file = meta_file.parent.joinpath(meta_file.name.replace('ap.meta', 'sequence.json'))
1✔
1135
            with open(sequence_file, 'w+') as fid:
1✔
1136
                json.dump(dict(sha1=m.hexdigest(), probe=k, index=i, nrecs=len(recordings[k]),
1✔
1137
                               files=list(map(str, recordings[k]))), fid)
1138
            log.info(f"{k}: {i}/{nrecs} written sequence file {recordings}")
1✔
1139
    return recordings
1✔
1140

1141

1142
class WindowsInhibitor:
1✔
1143
    """Prevent OS sleep/hibernate in windows; code from:
1✔
1144
    https://github.com/h3llrais3r/Deluge-PreventSuspendPlus/blob/master/preventsuspendplus/core.py
1145
    API documentation:
1146
    https://msdn.microsoft.com/en-us/library/windows/desktop/aa373208(v=vs.85).aspx"""
1147
    ES_CONTINUOUS = 0x80000000
1✔
1148
    ES_SYSTEM_REQUIRED = 0x00000001
1✔
1149

1150
    @staticmethod
1✔
1151
    def inhibit(quiet: bool = False):
1✔
1152
        if quiet:
×
1153
            log.debug("Preventing Windows from going to sleep")
×
1154
        else:
1155
            print("Preventing Windows from going to sleep")
×
1156
        ctypes.windll.kernel32.SetThreadExecutionState(WindowsInhibitor.ES_CONTINUOUS | WindowsInhibitor.ES_SYSTEM_REQUIRED)
×
1157

1158
    @staticmethod
1✔
1159
    def uninhibit(quiet: bool = False):
1✔
1160
        if quiet:
×
1161
            log.debug("Allowing Windows from going to sleep")
×
1162
        else:
1163
            print("Allowing Windows to go to sleep")
×
1164
        ctypes.windll.kernel32.SetThreadExecutionState(WindowsInhibitor.ES_CONTINUOUS)
×
1165

1166

1167
def sleepless(func: Callable[..., Any]) -> Callable[..., Any]:
1✔
1168
    """
1169
    Decorator to ensure that the system doesn't enter sleep or idle mode during a long-running task.
1170

1171
    This decorator wraps a function and sets the thread execution state to prevent
1172
    the system from entering sleep or idle mode while the decorated function is
1173
    running.
1174

1175
    Parameters
1176
    ----------
1177
    func : callable
1178
        The function to decorate.
1179

1180
    Returns
1181
    -------
1182
    callable
1183
        The decorated function.
1184
    """
1185
    def inner(*args, **kwargs) -> Any:
×
1186
        if os.name == 'nt':
×
1187
            WindowsInhibitor().inhibit(quiet=True)
×
1188
        result = func(*args, **kwargs)
×
1189
        if os.name == 'nt':
×
1190
            WindowsInhibitor().uninhibit(quiet=True)
×
1191
        return result
×
1192
    return inner
×
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc