• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

uc-cdis / fence / 12951974564

24 Jan 2025 02:52PM UTC coverage: 75.275%. Remained the same
12951974564

push

github

k-burt-uch
Logging

7858 of 10439 relevant lines covered (75.28%)

0.75 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

82.02
fence/sync/sync_users.py
1
import backoff
1✔
2
import glob
1✔
3
import jwt
1✔
4
import os
1✔
5
import re
1✔
6
import subprocess as sp
1✔
7
import yaml
1✔
8
import copy
1✔
9
import datetime
1✔
10
import uuid
1✔
11
import collections
1✔
12
import hashlib
1✔
13

14
from contextlib import contextmanager
1✔
15
from collections import defaultdict
1✔
16
from csv import DictReader
1✔
17
from io import StringIO
1✔
18
from stat import S_ISDIR
1✔
19

20
import paramiko
1✔
21
from cdislogging import get_logger
1✔
22
from email_validator import validate_email, EmailNotValidError
1✔
23
from gen3authz.client.arborist.errors import ArboristError
1✔
24
from gen3users.validation import validate_user_yaml
1✔
25
from paramiko.proxy import ProxyCommand
1✔
26
from sqlalchemy.exc import IntegrityError
1✔
27
from sqlalchemy import func
1✔
28

29
from fence.config import config
1✔
30
from fence.models import (
1✔
31
    AccessPrivilege,
32
    AuthorizationProvider,
33
    Project,
34
    Tag,
35
    User,
36
    query_for_user,
37
    Client,
38
    IdentityProvider,
39
    get_project_to_authz_mapping,
40
)
41
from fence.resources.google.utils import get_or_create_proxy_group_id
1✔
42
from fence.resources.storage import StorageManager
1✔
43
from fence.resources.google.access_utils import update_google_groups_for_users
1✔
44
from fence.resources.google.access_utils import GoogleUpdateException
1✔
45
from fence.sync import utils
1✔
46
from fence.sync.passport_sync.ras_sync import RASVisa
1✔
47
from fence.utils import get_SQLAlchemyDriver, DEFAULT_BACKOFF_SETTINGS
1✔
48

49
logger = get_logger(__name__)
1✔
50

51

52
def _format_policy_id(path, privilege):
1✔
53
    resource = ".".join(name for name in path.split("/") if name)
1✔
54
    return "{}-{}".format(resource, privilege)
1✔
55

56

57
def download_dir(sftp, remote_dir, local_dir):
1✔
58
    """
59
    Recursively download file from remote_dir to local_dir
60
    Args:
61
        remote_dir(str)
62
        local_dir(str)
63
    Returns: None
64
    """
65
    dir_items = sftp.listdir_attr(remote_dir)
×
66

67
    for item in dir_items:
×
68
        remote_path = remote_dir + "/" + item.filename
×
69
        local_path = os.path.join(local_dir, item.filename)
×
70
        if S_ISDIR(item.st_mode):
×
71
            download_dir(sftp, remote_path, local_path)
×
72
        else:
73
            sftp.get(remote_path, local_path)
×
74

75

76
def arborist_role_for_permission(permission):
1✔
77
    """
78
    For the programs/projects in the existing fence access control model, in order to
79
    use arborist for checking permissions we generate a policy for each combination of
80
    program/project and privilege. The roles involved all contain only one permission,
81
    for one privilege from the project access model.
82
    """
83
    return {
1✔
84
        "id": permission,
85
        "permissions": [
86
            {"id": permission, "action": {"service": "*", "method": permission}}
87
        ],
88
    }
89

90

91
@contextmanager
1✔
92
def _read_file(filepath, encrypted=True, key=None, logger=None):
1✔
93
    """
94
    Context manager for reading and optionally decrypting file it only
95
    decrypts files encrypted by unix 'crypt' tool which is used by dbGaP.
96

97
    Args:
98
        filepath (str): path to the file
99
        encrypted (bool): whether the file is encrypted
100

101
    Returns:
102
        Generator[file-like class]: file like object for the file
103
    """
104
    if encrypted:
1✔
105
        has_crypt = sp.call(["which", "mcrypt"])
×
106
        if has_crypt != 0:
×
107
            if logger:
×
108
                logger.error("Need to install mcrypt to decrypt files from dbgap")
×
109
            # TODO (rudyardrichter, 2019-01-08): raise error and move exit out to script
110
            exit(1)
×
111
        p = sp.Popen(
×
112
            [
113
                "mcrypt",
114
                "-a",
115
                "enigma",
116
                "-o",
117
                "scrypt",
118
                "-m",
119
                "stream",
120
                "--bare",
121
                "--key",
122
                key,
123
                "--force",
124
            ],
125
            stdin=open(filepath, "r"),
126
            stdout=sp.PIPE,
127
            stderr=open(os.devnull, "w"),
128
            universal_newlines=True,
129
        )
130
        try:
×
131
            yield StringIO(p.communicate()[0])
×
132
        except UnicodeDecodeError:
×
133
            logger.error("Could not decode file. Check the decryption key.")
×
134
    else:
135
        f = open(filepath, "r")
1✔
136
        yield f
1✔
137
        f.close()
1✔
138

139

140
class UserYAML(object):
1✔
141
    """
142
    Representation of the information in a YAML file describing user, project, and ABAC
143
    information for access control.
144
    """
145

146
    def __init__(
1✔
147
        self,
148
        projects=None,
149
        user_info=None,
150
        policies=None,
151
        clients=None,
152
        authz=None,
153
        project_to_resource=None,
154
        logger=None,
155
        user_abac=None,
156
    ):
157
        self.projects = projects or {}
1✔
158
        self.user_info = user_info or {}
1✔
159
        self.user_abac = user_abac or {}
1✔
160
        self.policies = policies or {}
1✔
161
        self.clients = clients or {}
1✔
162
        self.authz = authz or {}
1✔
163
        self.project_to_resource = project_to_resource or {}
1✔
164
        self.logger = logger
1✔
165

166
    @classmethod
1✔
167
    def from_file(cls, filepath, encrypted=True, key=None, logger=None):
1✔
168
        """
169
        Add access by "auth_id" to "self.projects" to update the Fence DB.
170
        Add access by "resource" to "self.user_abac" to update Arborist.
171
        """
172
        data = {}
1✔
173
        if filepath:
1✔
174
            with _read_file(filepath, encrypted=encrypted, key=key, logger=logger) as f:
1✔
175
                file_contents = f.read()
1✔
176
                validate_user_yaml(file_contents)  # run user.yaml validation tests
1✔
177
                data = yaml.safe_load(file_contents)
1✔
178
        else:
179
            if logger:
1✔
180
                logger.info("Did not sync a user.yaml, no file path provided.")
1✔
181

182
        projects = dict()
1✔
183
        user_info = dict()
1✔
184
        policies = dict()
1✔
185

186
        # resources should be the resource tree to construct in arborist
187
        user_abac = dict()
1✔
188

189
        # Fall back on rbac block if no authz. Remove when rbac in useryaml fully deprecated.
190
        if not data.get("authz") and data.get("rbac"):
1✔
191
            if logger:
×
192
                logger.info(
×
193
                    "No authz block found but rbac block present. Using rbac block"
194
                )
195
            data["authz"] = data["rbac"]
×
196

197
        # get user project mapping to arborist resources if it exists
198
        project_to_resource = data.get("authz", dict()).get(
1✔
199
            "user_project_to_resource", dict()
200
        )
201

202
        # read projects and privileges for each user
203
        users = data.get("users", {})
1✔
204
        for username, details in users.items():
1✔
205
            # users should occur only once each; skip if already processed
206
            if username in projects:
1✔
207
                msg = "invalid yaml file: user `{}` occurs multiple times".format(
×
208
                    username
209
                )
210
                if logger:
×
211
                    logger.error(msg)
×
212
                raise EnvironmentError(msg)
×
213

214
            privileges = {}
1✔
215
            resource_permissions = dict()
1✔
216
            for project in details.get("projects", {}):
1✔
217
                try:
1✔
218
                    privileges[project["auth_id"]] = set(project["privilege"])
1✔
219
                except KeyError as e:
×
220
                    if logger:
×
221
                        logger.error("project {} missing field: {}".format(project, e))
×
222
                    continue
×
223

224
                # project may not have `resource` field.
225
                # prefer resource field;
226
                # if no resource or mapping, assume auth_id is resource.
227
                resource = project.get("resource", project["auth_id"])
1✔
228

229
                if project["auth_id"] not in project_to_resource:
1✔
230
                    project_to_resource[project["auth_id"]] = resource
1✔
231
                resource_permissions[resource] = set(project["privilege"])
1✔
232

233
            user_info[username] = {
1✔
234
                "email": details.get("email", ""),
235
                "display_name": details.get("display_name", ""),
236
                "phone_number": details.get("phone_number", ""),
237
                "tags": details.get("tags", {}),
238
                "admin": details.get("admin", False),
239
            }
240
            if not details.get("email"):
1✔
241
                try:
1✔
242
                    valid = validate_email(
1✔
243
                        username, allow_smtputf8=False, check_deliverability=False
244
                    )
245
                    user_info[username]["email"] = valid.email
1✔
246
                except EmailNotValidError:
1✔
247
                    pass
1✔
248
            projects[username] = privileges
1✔
249
            user_abac[username] = resource_permissions
1✔
250

251
            # list of policies we want to grant to this user, which get sent to arborist
252
            # to check if they're allowed to do certain things
253
            policies[username] = details.get("policies", [])
1✔
254

255
        if logger:
1✔
256
            logger.info(
1✔
257
                "Got user project to arborist resource mapping:\n{}".format(
258
                    str(project_to_resource)
259
                )
260
            )
261

262
        authz = data.get("authz", dict())
1✔
263
        if not authz:
1✔
264
            # older version: resources in root, no `authz` section or `rbac` section
265
            if logger:
1✔
266
                logger.warning(
1✔
267
                    "access control YAML file is using old format (missing `authz`/`rbac`"
268
                    " section in the root); assuming that if it exists `resources` will"
269
                    " be on the root level, and continuing"
270
                )
271
            # we're going to throw it into the `authz` dictionary anyways, so the rest of
272
            # the code can pretend it's in the normal place that we expect
273
            resources = data.get("resources", [])
1✔
274
            # keep authz empty dict if resources is not specified
275
            if resources:
1✔
276
                authz["resources"] = data.get("resources", [])
×
277

278
        clients = data.get("clients", {})
1✔
279

280
        return cls(
1✔
281
            projects=projects,
282
            user_info=user_info,
283
            user_abac=user_abac,
284
            policies=policies,
285
            clients=clients,
286
            authz=authz,
287
            project_to_resource=project_to_resource,
288
            logger=logger,
289
        )
290

291
    def persist_project_to_resource(self, db_session):
1✔
292
        """
293
        Store the mappings from Project.auth_id to authorization resource (Project.authz)
294

295
        The mapping comes from an external source, this function persists what was parsed
296
        into memory into the database for future use.
297
        """
298
        for auth_id, authz_resource in self.project_to_resource.items():
1✔
299
            project = (
1✔
300
                db_session.query(Project).filter(Project.auth_id == auth_id).first()
301
            )
302
            if project:
1✔
303
                project.authz = authz_resource
1✔
304
            else:
305
                project = Project(name=auth_id, auth_id=auth_id, authz=authz_resource)
×
306
                db_session.add(project)
×
307
        db_session.commit()
1✔
308

309

310
class UserSyncer(object):
1✔
311
    def __init__(
1✔
312
        self,
313
        dbGaP,
314
        DB,
315
        project_mapping,
316
        storage_credentials=None,
317
        db_session=None,
318
        is_sync_from_dbgap_server=False,
319
        sync_from_local_csv_dir=None,
320
        sync_from_local_yaml_file=None,
321
        arborist=None,
322
        folder=None,
323
    ):
324
        """
325
        Syncs ACL files from dbGap to auth database and storage backends
326
        Args:
327
            dbGaP: a list of dict containing creds to access dbgap sftp
328
            DB: database connection string
329
            project_mapping: a dict containing how dbgap ids map to projects
330
            storage_credentials: a dict containing creds for storage backends
331
            sync_from_dir: path to an alternative dir to sync from instead of
332
                           dbGaP
333
            arborist:
334
                ArboristClient instance if the syncer should also create
335
                resources in arborist
336
            folder: a local folder where dbgap telemetry files will sync to
337
        """
338
        self.sync_from_local_csv_dir = sync_from_local_csv_dir
1✔
339
        self.sync_from_local_yaml_file = sync_from_local_yaml_file
1✔
340
        self.is_sync_from_dbgap_server = is_sync_from_dbgap_server
1✔
341
        self.dbGaP = dbGaP
1✔
342
        self.session = db_session
1✔
343
        self.driver = get_SQLAlchemyDriver(DB)
1✔
344
        self.project_mapping = project_mapping or {}
1✔
345
        self._projects = dict()
1✔
346
        self._created_roles = set()
1✔
347
        self._created_policies = set()
1✔
348
        self._dbgap_study_to_resources = dict()
1✔
349
        self.logger = get_logger(
1✔
350
            "user_syncer", log_level="debug" if config["DEBUG"] is True else "info"
351
        )
352
        self.arborist_client = arborist
1✔
353
        self.folder = folder
1✔
354

355
        self.auth_source = defaultdict(set)
1✔
356
        # auth_source used for logging. username : [source1, source2]
357
        self.visa_types = config.get("USERSYNC", {}).get("visa_types", {})
1✔
358
        self.parent_to_child_studies_mapping = {}
1✔
359
        for dbgap_config in dbGaP:
1✔
360
            self.parent_to_child_studies_mapping.update(
1✔
361
                dbgap_config.get("parent_to_child_studies_mapping", {})
362
            )
363
        if storage_credentials:
1✔
364
            self.storage_manager = StorageManager(
1✔
365
                storage_credentials, logger=self.logger
366
            )
367
        self.id_patterns = []
1✔
368

369
    @staticmethod
1✔
370
    def _match_pattern(filepath, id_patterns, encrypted=True):
1✔
371
        """
372
        Check if the filename matches dbgap access control file pattern
373

374
        Args:
375
            filepath (str): path to file
376
            encrypted (bool): whether the file is encrypted
377

378
        Returns:
379
            bool: whether the pattern matches
380
        """
381
        id_patterns.append(r"authentication_file_phs(\d{6}).(csv|txt)")
1✔
382
        for pattern in id_patterns:
1✔
383
            if encrypted:
1✔
384
                pattern += r".enc"
×
385
            pattern += r"$"
1✔
386
            # when converting the YAML from fence-config,
387
            # python reads it as Python string literal. So "\" turns into "\\"
388
            # which messes with the regex match
389
            pattern.replace("\\\\", "\\")
1✔
390
            if re.match(pattern, os.path.basename(filepath)):
1✔
391
                return True
1✔
392
        return False
1✔
393

394
    def _get_from_sftp_with_proxy(self, server, path):
1✔
395
        """
396
        Download all data from sftp sever to a local dir
397

398
        Args:
399
            server (dict) : dictionary containing info to access sftp server
400
            path (str): path to local directory
401

402
        Returns:
403
            None
404
        """
405
        proxy = None
1✔
406
        if server.get("proxy", "") != "":
1✔
407
            command = "ssh -i ~/.ssh/id_rsa {user}@{proxy} nc {host} {port}".format(
×
408
                user=server.get("proxy_user", ""),
409
                proxy=server.get("proxy", ""),
410
                host=server.get("host", ""),
411
                port=server.get("port", 22),
412
            )
413
            self.logger.info("SSH proxy command: {}".format(command))
×
414

415
            proxy = ProxyCommand(command)
×
416

417
        with paramiko.SSHClient() as client:
1✔
418
            client.set_log_channel(self.logger.name)
1✔
419

420
            client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
1✔
421
            parameters = {
1✔
422
                "hostname": str(server.get("host", "")),
423
                "username": str(server.get("username", "")),
424
                "password": str(server.get("password", "")),
425
                "port": int(server.get("port", 22)),
426
            }
427
            if proxy:
1✔
428
                parameters["sock"] = proxy
×
429

430
            self.logger.info(
1✔
431
                "SSH connection hostname:post {}:{}".format(
432
                    parameters.get("hostname", "unknown"),
433
                    parameters.get("port", "unknown"),
434
                )
435
            )
436
            self._connect_with_ssh(ssh_client=client, parameters=parameters)
1✔
437
            with client.open_sftp() as sftp:
×
438
                download_dir(sftp, "./", path)
1✔
439

440
        if proxy:
×
441
            proxy.close()
×
442

443
    @backoff.on_exception(backoff.expo, Exception, **DEFAULT_BACKOFF_SETTINGS)
1✔
444
    def _connect_with_ssh(self, ssh_client, parameters):
1✔
445
        ssh_client.connect(**parameters)
1✔
446

447
    def _get_from_ftp_with_proxy(self, server, path):
1✔
448
        """
449
        Download data from ftp sever to a local dir
450

451
        Args:
452
            server (dict): dictionary containing information for accessing server
453
            path(str): path to local files
454

455
        Returns:
456
            None
457
        """
458
        execstr = (
×
459
            'lftp -u {},{}  {} -e "set ftp:proxy http://{}; mirror . {}; exit"'.format(
460
                server.get("username", ""),
461
                server.get("password", ""),
462
                server.get("host", ""),
463
                server.get("proxy", ""),
464
                path,
465
            )
466
        )
467
        os.system(execstr)
×
468

469
    def _get_parse_consent_code(self, dbgap_config={}):
1✔
470
        return dbgap_config.get(
1✔
471
            "parse_consent_code", True
472
        )  # Should this really be true?
473

474
    def _parse_csv(self, file_dict, sess, dbgap_config={}, encrypted=True):
1✔
475
        """
476
        parse csv files to python dict
477

478
        Args:
479
            file_dict: a dictionary with key(file path) and value(privileges)
480
            sess: sqlalchemy session
481
            dbgap_config: a dictionary containing information about the dbGaP sftp server
482
                (comes from fence config)
483
            encrypted: boolean indicating whether those files are encrypted
484

485

486
        Return:
487
            Tuple[[dict, dict]]:
488
                (user_project, user_info) where user_project is a mapping from
489
                usernames to project permissions and user_info is a mapping
490
                from usernames to user details, such as email
491

492
        Example:
493

494
            (
495
                {
496
                    username: {
497
                        'project1': {'read-storage','write-storage'},
498
                        'project2': {'read-storage'},
499
                    }
500
                },
501
                {
502
                    username: {
503
                        'email': 'email@mail.com',
504
                        'display_name': 'display name',
505
                        'phone_number': '123-456-789',
506
                        'tags': {'dbgap_role': 'PI'}
507
                    }
508
                },
509
            )
510

511
        """
512
        user_projects = dict()
1✔
513
        user_info = defaultdict(dict)
1✔
514

515
        # parse dbGaP sftp server information
516
        dbgap_key = dbgap_config.get("decrypt_key", None)
1✔
517

518
        self.id_patterns += (
1✔
519
            [
520
                item.replace("\\\\", "\\")
521
                for item in dbgap_config.get("allowed_whitelist_patterns", [])
522
            ]
523
            if dbgap_config.get("allow_non_dbGaP_whitelist", False)
524
            else []
525
        )
526

527
        enable_common_exchange_area_access = dbgap_config.get(
1✔
528
            "enable_common_exchange_area_access", False
529
        )
530
        study_common_exchange_areas = dbgap_config.get(
1✔
531
            "study_common_exchange_areas", {}
532
        )
533
        parse_consent_code = self._get_parse_consent_code(dbgap_config)
1✔
534

535
        if parse_consent_code and enable_common_exchange_area_access:
1✔
536
            self.logger.info(
1✔
537
                f"using study to common exchange area mapping: {study_common_exchange_areas}"
538
            )
539

540
        project_id_patterns = [r"phs(\d{6})"]
1✔
541
        if "additional_allowed_project_id_patterns" in dbgap_config:
1✔
542
            patterns = dbgap_config.get("additional_allowed_project_id_patterns")
1✔
543
            patterns = [
1✔
544
                pattern.replace("\\\\", "\\") for pattern in patterns
545
            ]  # when converting the YAML from fence-config, python reads it as Python string literal. So "\" turns into "\\" which messes with the regex match
546
            project_id_patterns += patterns
1✔
547

548
        self.logger.info(f"Using these file paths: {file_dict.items()}")
1✔
549
        for filepath, privileges in file_dict.items():
1✔
550
            self.logger.info("Reading file {}".format(filepath))
1✔
551
            if os.stat(filepath).st_size == 0:
1✔
552
                self.logger.warning("Empty file {}".format(filepath))
×
553
                continue
×
554
            if not self._match_pattern(
1✔
555
                filepath, id_patterns=self.id_patterns, encrypted=encrypted
556
            ):
557
                self.logger.warning(
1✔
558
                    "Filename {} does not match dbgap access control filename pattern;"
559
                    " this could mean that the filename has an invalid format, or has"
560
                    " an unexpected .enc extension, or lacks the .enc extension where"
561
                    " expected. This file is NOT being processed by usersync!".format(
562
                        filepath
563
                    )
564
                )
565
                continue
1✔
566

567
            with _read_file(
1✔
568
                filepath, encrypted=encrypted, key=dbgap_key, logger=self.logger
569
            ) as f:
570
                csv = DictReader(f, quotechar='"', skipinitialspace=True)
1✔
571
                for row in csv:
1✔
572
                    username = row.get("login") or ""
1✔
573
                    if username == "":
1✔
574
                        continue
×
575

576
                    if dbgap_config.get("allow_non_dbGaP_whitelist", False):
1✔
577
                        phsid = (
1✔
578
                            row.get("phsid") or (row.get("project_id") or "")
579
                        ).split(".")
580
                    else:
581
                        phsid = (row.get("phsid") or "").split(".")
1✔
582

583
                    dbgap_project = phsid[0]
1✔
584
                    # There are issues where dbgap has a wrong entry in their whitelist. Since we do a bulk arborist request, there are wrong entries in it that invalidates the whole request causing other correct entries not to be added
585
                    skip = False
1✔
586
                    for pattern in project_id_patterns:
1✔
587
                        self.logger.debug(
1✔
588
                            "Checking pattern:{} with project_id:{}".format(
589
                                pattern, dbgap_project
590
                            )
591
                        )
592
                        if re.match(pattern, dbgap_project):
1✔
593
                            skip = False
1✔
594
                            break
1✔
595
                        else:
596
                            skip = True
1✔
597
                    if skip:
1✔
598
                        self.logger.warning(
1✔
599
                            "Skip processing from file {}, user {} with project {}".format(
600
                                filepath,
601
                                username,
602
                                dbgap_project,
603
                            )
604
                        )
605
                        continue
1✔
606
                    if len(phsid) > 1 and parse_consent_code:
1✔
607
                        consent_code = phsid[-1]
1✔
608

609
                        # c999 indicates full access to all consents and access
610
                        # to a study-specific exchange area
611
                        # access to at least one study-specific exchange area implies access
612
                        # to the parent study's common exchange area
613
                        #
614
                        # NOTE: Handling giving access to all consents is done at
615
                        #       a later time, when we have full information about possible
616
                        #       consents
617
                        self.logger.debug(
1✔
618
                            f"got consent code {consent_code} from dbGaP project "
619
                            f"{dbgap_project}"
620
                        )
621
                        if (
1✔
622
                            consent_code == "c999"
623
                            and enable_common_exchange_area_access
624
                            and dbgap_project in study_common_exchange_areas
625
                        ):
626
                            self.logger.info(
1✔
627
                                "found study with consent c999 and Fence "
628
                                "is configured to parse exchange area data. Giving user "
629
                                f"{username} {privileges} privileges in project: "
630
                                f"{study_common_exchange_areas[dbgap_project]}."
631
                            )
632
                            self._add_dbgap_project_for_user(
1✔
633
                                study_common_exchange_areas[dbgap_project],
634
                                privileges,
635
                                username,
636
                                sess,
637
                                user_projects,
638
                                dbgap_config,
639
                            )
640

641
                        dbgap_project += "." + consent_code
1✔
642

643
                    self._add_children_for_dbgap_project(
1✔
644
                        dbgap_project,
645
                        privileges,
646
                        username,
647
                        sess,
648
                        user_projects,
649
                        dbgap_config,
650
                    )
651

652
                    display_name = row.get("user name") or ""
1✔
653
                    tags = {"dbgap_role": row.get("role") or ""}
1✔
654

655
                    # some dbgap telemetry files have information about a researchers PI
656
                    if "downloader for" in row:
1✔
657
                        tags["pi"] = row["downloader for"]
1✔
658

659
                    # prefer name over previous "downloader for" if it exists
660
                    if "downloader for names" in row:
1✔
661
                        tags["pi"] = row["downloader for names"]
×
662

663
                    user_info[username] = {
1✔
664
                        "email": row.get("email")
665
                        or user_info[username].get("email")
666
                        or "",
667
                        "display_name": display_name,
668
                        "phone_number": row.get("phone")
669
                        or user_info[username].get("phone_number")
670
                        or "",
671
                        "tags": tags,
672
                    }
673

674
                    self._process_dbgap_project(
1✔
675
                        dbgap_project,
676
                        privileges,
677
                        username,
678
                        sess,
679
                        user_projects,
680
                        dbgap_config,
681
                    )
682

683
        return user_projects, user_info
1✔
684

685
    def _get_children(self, dbgap_project):
1✔
686
        return self.parent_to_child_studies_mapping.get(dbgap_project.split(".")[0])
1✔
687

688
    def _add_children_for_dbgap_project(
1✔
689
        self, dbgap_project, privileges, username, sess, user_projects, dbgap_config
690
    ):
691
        """
692
        Adds the configured child studies for the given dbgap_project, adding it to the provided user_projects. If
693
        parse_consent_code is true, then the consents granted in the provided dbgap_project will also be granted to the
694
        child studies.
695
        """
696
        parent_phsid = dbgap_project
1✔
697
        parse_consent_code = self._get_parse_consent_code(dbgap_config)
1✔
698
        child_suffix = ""
1✔
699
        if parse_consent_code and re.match(
1✔
700
            config["DBGAP_ACCESSION_WITH_CONSENT_REGEX"], dbgap_project
701
        ):
702
            parent_phsid_parts = dbgap_project.split(".")
1✔
703
            parent_phsid = parent_phsid_parts[0]
1✔
704
            child_suffix = "." + parent_phsid_parts[1]
1✔
705

706
        if parent_phsid not in self.parent_to_child_studies_mapping:
1✔
707
            return
1✔
708

709
        self.logger.info(
1✔
710
            f"found parent study {parent_phsid} and Fence "
711
            "is configured to provide additional access to child studies. Giving user "
712
            f"{username} {privileges} privileges in projects: "
713
            f"{{k + child_suffix: v + child_suffix for k, v in self.parent_to_child_studies_mapping.items()}}."
714
        )
715
        child_studies = self.parent_to_child_studies_mapping.get(parent_phsid, [])
1✔
716
        for child_study in child_studies:
1✔
717
            self._add_dbgap_project_for_user(
1✔
718
                child_study + child_suffix,
719
                privileges,
720
                username,
721
                sess,
722
                user_projects,
723
                dbgap_config,
724
            )
725

726
    def _add_dbgap_project_for_user(
1✔
727
        self, dbgap_project, privileges, username, sess, user_projects, dbgap_config
728
    ):
729
        """
730
        Helper function for csv parsing that adds a given dbgap project to Fence/Arborist
731
        and then updates the dictionary containing all user's project access
732
        """
733
        if dbgap_project not in self._projects:
1✔
734
            self.logger.debug(
1✔
735
                "creating Project in fence for dbGaP study: {}".format(dbgap_project)
736
            )
737

738
            project = self._get_or_create(sess, Project, auth_id=dbgap_project)
1✔
739

740
            # need to add dbgap project to arborist
741
            if self.arborist_client:
1✔
742
                self._determine_arborist_resource(dbgap_project, dbgap_config)
1✔
743

744
            if project.name is None:
1✔
745
                project.name = dbgap_project
1✔
746
            self._projects[dbgap_project] = project
1✔
747
        phsid_privileges = {dbgap_project: set(privileges)}
1✔
748
        if username in user_projects:
1✔
749
            user_projects[username].update(phsid_privileges)
1✔
750
        else:
751
            user_projects[username] = phsid_privileges
1✔
752

753
    @staticmethod
1✔
754
    def sync_two_user_info_dict(user_info1, user_info2):
1✔
755
        """
756
        Merge user_info1 into user_info2. Values in user_info2 are overriden
757
        by values in user_info1. user_info2 ends up containing the merged dict.
758

759
        Args:
760
            user_info1 (dict): nested dict
761
            user_info2 (dict): nested dict
762

763
            Example:
764
            {username: {'email': 'abc@email.com'}}
765

766
        Returns:
767
            None
768
        """
769
        user_info2.update(user_info1)
1✔
770

771
    def sync_two_phsids_dict(
1✔
772
        self,
773
        phsids1,
774
        phsids2,
775
        source1=None,
776
        source2=None,
777
        phsids2_overrides_phsids1=True,
778
    ):
779
        """
780
        Merge phsids1 into phsids2. If `phsids2_overrides_phsids1`, values in
781
        phsids1 are overriden by values in phsids2. phsids2 ends up containing
782
        the merged dict (see explanation below).
783
        `source1` and `source2`: for logging.
784

785
        Args:
786
            phsids1, phsids2: nested dicts mapping phsids to sets of permissions
787

788
            source1, source2: source of authz information (eg. dbgap, user_yaml, visas)
789

790
            Example:
791
            {
792
                username: {
793
                    phsid1: {'read-storage','write-storage'},
794
                    phsid2: {'read-storage'},
795
                }
796
            }
797

798
        Return:
799
            None
800

801
        Explanation:
802
            Consider merging projects of the same user:
803

804
                {user1: {phsid1: privillege1}}
805

806
                {user1: {phsid2: privillege2}}
807

808
            case 1: phsid1 != phsid2. Output:
809

810
                {user1: {phsid1: privillege1, phsid2: privillege2}}
811

812
            case 2: phsid1 == phsid2 and privillege1! = privillege2. Output:
813

814
                {user1: {phsid1: union(privillege1, privillege2)}}
815

816
            For the other cases, just simple addition
817
        """
818

819
        for user, projects1 in phsids1.items():
1✔
820
            if not phsids2.get(user):
1✔
821
                if source1:
1✔
822
                    self.auth_source[user].add(source1)
1✔
823
                phsids2[user] = projects1
1✔
824
            elif phsids2_overrides_phsids1:
1✔
825
                if source1:
1✔
826
                    self.auth_source[user].add(source1)
×
827
                if source2:
1✔
828
                    self.auth_source[user].add(source2)
×
829
                for phsid1, privilege1 in projects1.items():
1✔
830
                    if phsid1 not in phsids2[user]:
1✔
831
                        phsids2[user][phsid1] = set()
1✔
832
                    phsids2[user][phsid1].update(privilege1)
1✔
833
            elif source2:
×
834
                self.auth_source[user].add(source2)
×
835

836
    def sync_to_db_and_storage_backend(
1✔
837
        self,
838
        user_project,
839
        user_info,
840
        sess,
841
        do_not_revoke_from_db_and_storage=False,
842
        expires=None,
843
    ):
844
        """
845
        sync user access control to database and storage backend
846

847
        Args:
848
            user_project (dict): a dictionary of
849

850
                {
851
                    username: {
852
                        'project1': {'read-storage','write-storage'},
853
                        'project2': {'read-storage'}
854
                    }
855
                }
856

857
            user_info (dict): a dictionary of {username: user_info{}}
858
            sess: a sqlalchemy session
859

860
        Return:
861
            None
862
        """
863
        google_bulk_mapping = None
1✔
864
        if config["GOOGLE_BULK_UPDATES"]:
1✔
865
            google_bulk_mapping = {}
1✔
866

867
        self._init_projects(user_project, sess)
1✔
868

869
        auth_provider_list = [
1✔
870
            self._get_or_create(sess, AuthorizationProvider, name="dbGaP"),
871
            self._get_or_create(sess, AuthorizationProvider, name="fence"),
872
        ]
873

874
        cur_db_user_project_list = {
1✔
875
            (ua.user.username.lower(), ua.project.auth_id)
876
            for ua in sess.query(AccessPrivilege).all()
877
        }
878

879
        # we need to compare db -> whitelist case-insensitively for username.
880
        # db stores case-sensitively, but we need to query case-insensitively
881
        user_project_lowercase = {}
1✔
882
        syncing_user_project_list = set()
1✔
883
        for username, projects in user_project.items():
1✔
884
            user_project_lowercase[username.lower()] = projects
1✔
885
            for project, _ in projects.items():
1✔
886
                syncing_user_project_list.add((username.lower(), project))
1✔
887

888
        user_info_lowercase = {
1✔
889
            username.lower(): info for username, info in user_info.items()
890
        }
891

892
        to_delete = set.difference(cur_db_user_project_list, syncing_user_project_list)
1✔
893
        to_add = set.difference(syncing_user_project_list, cur_db_user_project_list)
1✔
894
        to_update = set.intersection(
1✔
895
            cur_db_user_project_list, syncing_user_project_list
896
        )
897

898
        # when updating users we want to maintain case sesitivity in the username so
899
        # pass the original, non-lowered user_info dict
900
        self._upsert_userinfo(sess, user_info)
1✔
901

902
        if not do_not_revoke_from_db_and_storage:
1✔
903
            self._revoke_from_storage(
1✔
904
                to_delete, sess, google_bulk_mapping=google_bulk_mapping
905
            )
906
            self._revoke_from_db(sess, to_delete)
1✔
907

908
        self._grant_from_storage(
1✔
909
            to_add,
910
            user_project_lowercase,
911
            sess,
912
            google_bulk_mapping=google_bulk_mapping,
913
            expires=expires,
914
        )
915

916
        self._grant_from_db(
1✔
917
            sess,
918
            to_add,
919
            user_info_lowercase,
920
            user_project_lowercase,
921
            auth_provider_list,
922
        )
923

924
        # re-grant
925
        self._grant_from_storage(
1✔
926
            to_update,
927
            user_project_lowercase,
928
            sess,
929
            google_bulk_mapping=google_bulk_mapping,
930
            expires=expires,
931
        )
932
        self._update_from_db(sess, to_update, user_project_lowercase)
1✔
933

934
        if not do_not_revoke_from_db_and_storage:
1✔
935
            self._validate_and_update_user_admin(sess, user_info_lowercase)
1✔
936

937
        sess.commit()
1✔
938

939
        if config["GOOGLE_BULK_UPDATES"]:
1✔
940
            self.logger.info("Doing bulk Google update...")
1✔
941
            update_google_groups_for_users(google_bulk_mapping)
1✔
942
            self.logger.info("Bulk Google update done!")
×
943

944
        sess.commit()
1✔
945

946
    def sync_to_storage_backend(
1✔
947
        self, user_project, user_info, sess, expires, skip_google_updates=False
948
    ):
949
        """
950
        sync user access control to storage backend with given expiration
951

952
        Args:
953
            user_project (dict): a dictionary of
954

955
                {
956
                    username: {
957
                        'project1': {'read-storage','write-storage'},
958
                        'project2': {'read-storage'}
959
                    }
960
                }
961

962
            user_info (dict): a dictionary of attributes for a user.
963
            sess: a sqlalchemy session
964
            expires (int): time at which synced Arborist policies and
965
                   inclusion in any GBAG are set to expire
966
            skip_google_updates (bool): True if google group updates should be skipped. False if otherwise.
967
        Return:
968
            None
969
        """
970
        if not expires:
1✔
971
            raise Exception(
×
972
                f"sync to storage backend requires an expiration. you provided: {expires}"
973
            )
974

975
        google_group_user_mapping = None
1✔
976
        if config["GOOGLE_BULK_UPDATES"]:
1✔
977
            google_group_user_mapping = {}
×
978
            get_or_create_proxy_group_id(
×
979
                expires=expires,
980
                user_id=user_info["user_id"],
981
                username=user_info["username"],
982
                session=sess,
983
                storage_manager=self.storage_manager,
984
            )
985

986
        # TODO: eventually it'd be nice to remove this step but it's required
987
        #       so that grant_from_storage can determine what storage backends
988
        #       are needed for a project.
989
        self._init_projects(user_project, sess)
1✔
990

991
        # we need to compare db -> whitelist case-insensitively for username.
992
        # db stores case-sensitively, but we need to query case-insensitively
993
        user_project_lowercase = {}
1✔
994
        syncing_user_project_list = set()
1✔
995
        for username, projects in user_project.items():
1✔
996
            user_project_lowercase[username.lower()] = projects
1✔
997
            for project, _ in projects.items():
1✔
998
                syncing_user_project_list.add((username.lower(), project))
1✔
999

1000
        to_add = set(syncing_user_project_list)
1✔
1001

1002
        # when updating users we want to maintain case sensitivity in the username so
1003
        # pass the original, non-lowered user_info dict
1004
        self._upsert_userinfo(sess, {user_info["username"].lower(): user_info})
1✔
1005

1006
        self._grant_from_storage(
1✔
1007
            to_add,
1008
            user_project_lowercase,
1009
            sess,
1010
            google_bulk_mapping=google_group_user_mapping,
1011
            expires=expires,
1012
        )
1013

1014
        if config["GOOGLE_BULK_UPDATES"] and not skip_google_updates:
1✔
1015
            self.logger.info("Updating user's google groups ...")
×
1016
            update_google_groups_for_users(google_group_user_mapping)
×
1017
            self.logger.info("Google groups update done!!")
×
1018

1019
        sess.commit()
1✔
1020

1021
    def _revoke_from_db(self, sess, to_delete):
1✔
1022
        """
1023
        Revoke user access to projects in the auth database
1024

1025
        Args:
1026
            sess: sqlalchemy session
1027
            to_delete: a set of (username, project.auth_id) to be revoked from db
1028
        Return:
1029
            None
1030
        """
1031
        for username, project_auth_id in to_delete:
1✔
1032
            q = (
1✔
1033
                sess.query(AccessPrivilege)
1034
                .filter(AccessPrivilege.project.has(auth_id=project_auth_id))
1035
                .join(AccessPrivilege.user)
1036
                .filter(func.lower(User.username) == username)
1037
                .all()
1038
            )
1039
            for access in q:
1✔
1040
                self.logger.info(
1✔
1041
                    "revoke {} access to {} in db".format(username, project_auth_id)
1042
                )
1043
                sess.delete(access)
1✔
1044

1045
    def _validate_and_update_user_admin(self, sess, user_info):
1✔
1046
        """
1047
        Make sure there is no admin user that is not in yaml/csv files
1048

1049
        Args:
1050
            sess: sqlalchemy session
1051
            user_info: a dict of
1052
            {
1053
                username: {
1054
                    'email': email,
1055
                    'display_name': display_name,
1056
                    'phone_number': phonenum,
1057
                    'tags': {'k1':'v1', 'k2': 'v2'}
1058
                    'admin': is_admin
1059
                }
1060
            }
1061
        Returns:
1062
            None
1063
        """
1064
        for admin_user in sess.query(User).filter_by(is_admin=True).all():
1✔
1065
            if admin_user.username.lower() not in user_info:
1✔
1066
                admin_user.is_admin = False
×
1067
                sess.add(admin_user)
×
1068
                self.logger.info(
×
1069
                    "remove admin access from {} in db".format(
1070
                        admin_user.username.lower()
1071
                    )
1072
                )
1073

1074
    def _update_from_db(self, sess, to_update, user_project):
1✔
1075
        """
1076
        Update user access to projects in the auth database
1077

1078
        Args:
1079
            sess: sqlalchemy session
1080
            to_update:
1081
                a set of (username, project.auth_id) to be updated from db
1082

1083
        Return:
1084
            None
1085
        """
1086

1087
        for username, project_auth_id in to_update:
1✔
1088
            q = (
1✔
1089
                sess.query(AccessPrivilege)
1090
                .filter(AccessPrivilege.project.has(auth_id=project_auth_id))
1091
                .join(AccessPrivilege.user)
1092
                .filter(func.lower(User.username) == username)
1093
                .all()
1094
            )
1095
            for access in q:
1✔
1096
                access.privilege = user_project[username][project_auth_id]
1✔
1097
                self.logger.info(
1✔
1098
                    "update {} with {} access to {} in db".format(
1099
                        username, access.privilege, project_auth_id
1100
                    )
1101
                )
1102

1103
    def _grant_from_db(self, sess, to_add, user_info, user_project, auth_provider_list):
1✔
1104
        """
1105
        Grant user access to projects in the auth database
1106
        Args:
1107
            sess: sqlalchemy session
1108
            to_add: a set of (username, project.auth_id) to be granted
1109
            user_project:
1110
                a dictionary of {username: {project: {'read','write'}}
1111
        Return:
1112
            None
1113
        """
1114
        for username, project_auth_id in to_add:
1✔
1115
            u = query_for_user(session=sess, username=username)
1✔
1116

1117
            auth_provider = auth_provider_list[0]
1✔
1118
            if "dbgap_role" not in user_info[username]["tags"]:
1✔
1119
                auth_provider = auth_provider_list[1]
1✔
1120
            user_access = AccessPrivilege(
1✔
1121
                user=u,
1122
                project=self._projects[project_auth_id],
1123
                privilege=list(user_project[username][project_auth_id]),
1124
                auth_provider=auth_provider,
1125
            )
1126
            self.logger.info(
1✔
1127
                "grant user {} to {} with access {}".format(
1128
                    username, user_access.project, user_access.privilege
1129
                )
1130
            )
1131
            sess.add(user_access)
1✔
1132

1133
    def _upsert_userinfo(self, sess, user_info):
1✔
1134
        """
1135
        update user info to database.
1136

1137
        Args:
1138
            sess: sqlalchemy session
1139
            user_info:
1140
                a dict of {username: {display_name, phone_number, tags, admin}
1141

1142
        Return:
1143
            None
1144
        """
1145

1146
        for username in user_info:
1✔
1147
            u = query_for_user(session=sess, username=username)
1✔
1148

1149
            if u is None:
1✔
1150
                self.logger.info("create user {}".format(username))
1✔
1151
                u = User(username=username)
1✔
1152
                sess.add(u)
1✔
1153

1154
            if self.arborist_client:
1✔
1155
                self.arborist_client.create_user({"name": username})
1✔
1156

1157
            u.email = user_info[username].get("email", "")
1✔
1158
            u.display_name = user_info[username].get("display_name", "")
1✔
1159
            u.phone_number = user_info[username].get("phone_number", "")
1✔
1160
            u.is_admin = user_info[username].get("admin", False)
1✔
1161

1162
            idp_name = user_info[username].get("idp_name", "")
1✔
1163
            if idp_name and not u.identity_provider:
1✔
1164
                idp = (
×
1165
                    sess.query(IdentityProvider)
1166
                    .filter(IdentityProvider.name == idp_name)
1167
                    .first()
1168
                )
1169
                if not idp:
×
1170
                    idp = IdentityProvider(name=idp_name)
×
1171
                u.identity_provider = idp
×
1172

1173
            # do not update if there is no tag
1174
            if not user_info[username].get("tags"):
1✔
1175
                continue
1✔
1176

1177
            # remove user db tags if they are not shown in new tags
1178
            for tag in u.tags:
1✔
1179
                if tag.key not in user_info[username]["tags"]:
1✔
1180
                    u.tags.remove(tag)
1✔
1181

1182
            # sync
1183
            for k, v in user_info[username]["tags"].items():
1✔
1184
                found = False
1✔
1185
                for tag in u.tags:
1✔
1186
                    if tag.key == k:
1✔
1187
                        found = True
1✔
1188
                        tag.value = v
1✔
1189
                # create new tag if not found
1190
                if not found:
1✔
1191
                    tag = Tag(key=k, value=v)
1✔
1192
                    u.tags.append(tag)
1✔
1193

1194
    def _revoke_from_storage(self, to_delete, sess, google_bulk_mapping=None):
1✔
1195
        """
1196
        If a project have storage backend, revoke user's access to buckets in
1197
        the storage backend.
1198

1199
        Args:
1200
            to_delete: a set of (username, project.auth_id) to be revoked
1201

1202
        Return:
1203
            None
1204
        """
1205
        for username, project_auth_id in to_delete:
1✔
1206
            project = (
1✔
1207
                sess.query(Project).filter(Project.auth_id == project_auth_id).first()
1208
            )
1209
            for sa in project.storage_access:
1✔
1210
                if not hasattr(self, "storage_manager"):
1✔
1211
                    self.logger.error(
×
1212
                        (
1213
                            "CANNOT revoke {} access to {} in {} because there is NO "
1214
                            "configured storage accesses at all. See configuration. "
1215
                            "Continuing anyway..."
1216
                        ).format(username, project_auth_id, sa.provider.name)
1217
                    )
1218
                    continue
×
1219

1220
                self.logger.info(
1✔
1221
                    "revoke {} access to {} in {}".format(
1222
                        username, project_auth_id, sa.provider.name
1223
                    )
1224
                )
1225
                self.storage_manager.revoke_access(
1✔
1226
                    provider=sa.provider.name,
1227
                    username=username,
1228
                    project=project,
1229
                    session=sess,
1230
                    google_bulk_mapping=google_bulk_mapping,
1231
                )
1232

1233
    def _grant_from_storage(
1✔
1234
        self, to_add, user_project, sess, google_bulk_mapping=None, expires=None
1235
    ):
1236
        """
1237
        If a project have storage backend, grant user's access to buckets in
1238
        the storage backend.
1239

1240
        Args:
1241
            to_add: a set of (username, project.auth_id)  to be granted
1242
            user_project: a dictionary like:
1243

1244
                    {username: {phsid: {'read-storage','write-storage'}}}
1245

1246
        Return:
1247
            dict of the users' storage usernames to their user_projects and the respective storage access.
1248
        """
1249
        storage_user_to_sa_and_user_project = defaultdict()
1✔
1250
        for username, project_auth_id in to_add:
1✔
1251
            project = self._projects[project_auth_id]
1✔
1252
            for sa in project.storage_access:
1✔
1253
                access = list(user_project[username][project_auth_id])
1✔
1254
                if not hasattr(self, "storage_manager"):
1✔
1255
                    self.logger.error(
×
1256
                        (
1257
                            "CANNOT grant {} access {} to {} in {} because there is NO "
1258
                            "configured storage accesses at all. See configuration. "
1259
                            "Continuing anyway..."
1260
                        ).format(username, access, project_auth_id, sa.provider.name)
1261
                    )
1262
                    continue
×
1263

1264
                self.logger.info(
1✔
1265
                    "grant {} access {} to {} in {}".format(
1266
                        username, access, project_auth_id, sa.provider.name
1267
                    )
1268
                )
1269
                storage_username = self.storage_manager.grant_access(
1✔
1270
                    provider=sa.provider.name,
1271
                    username=username,
1272
                    project=project,
1273
                    access=access,
1274
                    session=sess,
1275
                    google_bulk_mapping=google_bulk_mapping,
1276
                    expires=expires,
1277
                )
1278

1279
                storage_user_to_sa_and_user_project[storage_username] = (sa, project)
1✔
1280
        return storage_user_to_sa_and_user_project
1✔
1281

1282
    def _init_projects(self, user_project, sess):
1✔
1283
        """
1284
        initialize projects
1285
        """
1286
        if self.project_mapping:
1✔
1287
            for projects in list(self.project_mapping.values()):
1✔
1288
                for p in projects:
1✔
1289
                    self.logger.debug(
1✔
1290
                        "creating Project with info from project_mapping: {}".format(p)
1291
                    )
1292
                    project = self._get_or_create(sess, Project, **p)
1✔
1293
                    self._projects[p["auth_id"]] = project
1✔
1294
        for _, projects in user_project.items():
1✔
1295
            for auth_id in list(projects.keys()):
1✔
1296
                project = sess.query(Project).filter(Project.auth_id == auth_id).first()
1✔
1297
                if not project:
1✔
1298
                    data = {"name": auth_id, "auth_id": auth_id}
1✔
1299
                    try:
1✔
1300
                        project = self._get_or_create(sess, Project, **data)
1✔
1301
                    except IntegrityError as e:
×
1302
                        sess.rollback()
×
1303
                        self.logger.error(
×
1304
                            f"Project {auth_id} already exists. Detail {str(e)}"
1305
                        )
1306
                        raise Exception(
×
1307
                            "Project {} already exists. Detail {}. Please contact your system administrator.".format(
1308
                                auth_id, str(e)
1309
                            )
1310
                        )
1311
                if auth_id not in self._projects:
1✔
1312
                    self._projects[auth_id] = project
1✔
1313

1314
    @staticmethod
1✔
1315
    def _get_or_create(sess, model, **kwargs):
1✔
1316
        instance = sess.query(model).filter_by(**kwargs).first()
1✔
1317
        if not instance:
1✔
1318
            instance = model(**kwargs)
1✔
1319
            sess.add(instance)
1✔
1320
        return instance
1✔
1321

1322
    def _process_dbgap_files(self, dbgap_config, sess):
1✔
1323
        """
1324
        Args:
1325
            dbgap_config : a dictionary containing information about a single
1326
                           dbgap sftp server (from fence config)
1327
            sess: database session
1328

1329
        Return:
1330
            user_projects (dict)
1331
            user_info (dict)
1332
        """
1333
        dbgap_file_list = []
1✔
1334
        hostname = dbgap_config["info"]["host"]
1✔
1335
        username = dbgap_config["info"]["username"]
1✔
1336
        encrypted = dbgap_config["info"].get("encrypted", True)
1✔
1337
        folderdir = os.path.join(str(self.folder), str(hostname), str(username))
1✔
1338

1339
        try:
1✔
1340
            if os.path.exists(folderdir):
1✔
1341
                dbgap_file_list = glob.glob(
×
1342
                    os.path.join(folderdir, "*")
1343
                )  # get lists of file from folder
1344
            else:
1345
                self.logger.info("Downloading files from: {}".format(hostname))
1✔
1346
                dbgap_file_list = self._download(dbgap_config)
1✔
1347
        except Exception as e:
1✔
1348
            self.logger.error(e)
1✔
1349
            exit(1)
1✔
1350
        self.logger.info("dbgap files: {}".format(dbgap_file_list))
×
1351
        user_projects, user_info = self._get_user_permissions_from_csv_list(
×
1352
            dbgap_file_list,
1353
            encrypted=encrypted,
1354
            session=sess,
1355
            dbgap_config=dbgap_config,
1356
        )
1357

1358
        user_projects = self.parse_projects(user_projects)
×
1359
        return user_projects, user_info
×
1360

1361
    def _get_user_permissions_from_csv_list(
1✔
1362
        self, file_list, encrypted, session, dbgap_config={}
1363
    ):
1364
        """
1365
        Args:
1366
            file_list: list of files (represented as strings)
1367
            encrypted: boolean indicating whether those files are encrypted
1368
            session: sqlalchemy session
1369
            dbgap_config: a dictionary containing information about the dbGaP sftp server
1370
                    (comes from fence config)
1371

1372
        Return:
1373
            user_projects (dict)
1374
            user_info (dict)
1375
        """
1376
        permissions = [{"read-storage", "read"} for _ in file_list]
1✔
1377
        user_projects, user_info = self._parse_csv(
1✔
1378
            dict(list(zip(file_list, permissions))),
1379
            sess=session,
1380
            dbgap_config=dbgap_config,
1381
            encrypted=encrypted,
1382
        )
1383
        return user_projects, user_info
1✔
1384

1385
    def _merge_multiple_local_csv_files(
1✔
1386
        self, dbgap_file_list, encrypted, dbgap_configs, session
1387
    ):
1388
        """
1389
        Args:
1390
            dbgap_file_list (list): a list of whitelist file locations stored locally
1391
            encrypted (bool): whether the file is encrypted (comes from fence config)
1392
            dbgap_configs (list): list of dictionaries containing information about the dbgap server (comes from fence config)
1393
            session (sqlalchemy.Session): database session
1394

1395
        Return:
1396
            merged_user_projects (dict)
1397
            merged_user_info (dict)
1398
        """
1399
        merged_user_projects = {}
1✔
1400
        merged_user_info = {}
1✔
1401

1402
        for dbgap_config in dbgap_configs:
1✔
1403
            user_projects, user_info = self._get_user_permissions_from_csv_list(
1✔
1404
                dbgap_file_list,
1405
                encrypted,
1406
                session=session,
1407
                dbgap_config=dbgap_config,
1408
            )
1409
            self.sync_two_user_info_dict(user_info, merged_user_info)
1✔
1410
            self.sync_two_phsids_dict(user_projects, merged_user_projects)
1✔
1411
        return merged_user_projects, merged_user_info
1✔
1412

1413
    def _merge_multiple_dbgap_sftp(self, dbgap_servers, sess):
1✔
1414
        """
1415
        Args:
1416
            dbgap_servers : a list of dictionaries each containging config on
1417
                           dbgap sftp server (comes from fence config)
1418
            sess: database session
1419

1420
        Return:
1421
            merged_user_projects (dict)
1422
            merged_user_info (dict)
1423
        """
1424
        merged_user_projects = {}
1✔
1425
        merged_user_info = {}
1✔
1426
        for dbgap in dbgap_servers:
1✔
1427
            user_projects, user_info = self._process_dbgap_files(dbgap, sess)
1✔
1428
            # merge into merged_user_info
1429
            # user_info overrides original info in merged_user_info
1430
            self.sync_two_user_info_dict(user_info, merged_user_info)
1✔
1431

1432
            # merge all access info dicts into "merged_user_projects".
1433
            # the access info is combined - if the user_projects access is
1434
            # ["read"] and the merged_user_projects is ["read-storage"], the
1435
            # resulting access is ["read", "read-storage"].
1436
            self.sync_two_phsids_dict(user_projects, merged_user_projects)
1✔
1437
        return merged_user_projects, merged_user_info
1✔
1438

1439
    def parse_projects(self, user_projects):
1✔
1440
        """
1441
        helper function for parsing projects
1442
        """
1443
        return {key.lower(): value for key, value in user_projects.items()}
1✔
1444

1445
    def _process_dbgap_project(
1✔
1446
        self, dbgap_project, privileges, username, sess, user_projects, dbgap_config
1447
    ):
1448
        if dbgap_project not in self.project_mapping:
1✔
1449
            self._add_dbgap_project_for_user(
1✔
1450
                dbgap_project,
1451
                privileges,
1452
                username,
1453
                sess,
1454
                user_projects,
1455
                dbgap_config,
1456
            )
1457

1458
        for element_dict in self.project_mapping.get(dbgap_project, []):
1✔
1459
            try:
1✔
1460
                phsid_privileges = {element_dict["auth_id"]: set(privileges)}
1✔
1461

1462
                # need to add dbgap project to arborist
1463
                if self.arborist_client:
1✔
1464
                    self._determine_arborist_resource(
1✔
1465
                        element_dict["auth_id"], dbgap_config
1466
                    )
1467

1468
                if username not in user_projects:
1✔
1469
                    user_projects[username] = {}
1✔
1470
                user_projects[username].update(phsid_privileges)
1✔
1471

1472
            except ValueError as e:
×
1473
                self.logger.info(e)
×
1474

1475
    def _process_user_projects(
1✔
1476
        self,
1477
        user_projects,
1478
        enable_common_exchange_area_access,
1479
        study_common_exchange_areas,
1480
        dbgap_config,
1481
        sess,
1482
    ):
1483
        logger.info(user_projects)
1✔
1484
        user_projects_to_modify = copy.deepcopy(user_projects)
1✔
1485
        for username in user_projects.keys():
1✔
1486
            for project in user_projects[username].keys():
1✔
1487
                phsid = project.split(".")
1✔
1488
                dbgap_project = phsid[0]
1✔
1489
                privileges = user_projects[username][project]
1✔
1490
                if len(phsid) > 1 and self._get_parse_consent_code(dbgap_config):
1✔
1491
                    consent_code = phsid[-1]
1✔
1492

1493
                    # c999 indicates full access to all consents and access
1494
                    # to a study-specific exchange area
1495
                    # access to at least one study-specific exchange area implies access
1496
                    # to the parent study's common exchange area
1497
                    #
1498
                    # NOTE: Handling giving access to all consents is done at
1499
                    #       a later time, when we have full information about possible
1500
                    #       consents
1501
                    self.logger.debug(
1✔
1502
                        f"got consent code {consent_code} from dbGaP project "
1503
                        f"{dbgap_project}"
1504
                    )
1505
                    if (
1✔
1506
                        consent_code == "c999"
1507
                        and enable_common_exchange_area_access
1508
                        and dbgap_project in study_common_exchange_areas
1509
                    ):
1510
                        self.logger.info(
1✔
1511
                            "found study with consent c999 and Fence "
1512
                            "is configured to parse exchange area data. Giving user "
1513
                            f"{username} {privileges} privileges in project: "
1514
                            f"{study_common_exchange_areas[dbgap_project]}."
1515
                        )
1516
                        self._add_dbgap_project_for_user(
1✔
1517
                            study_common_exchange_areas[dbgap_project],
1518
                            privileges,
1519
                            username,
1520
                            sess,
1521
                            user_projects_to_modify,
1522
                            dbgap_config,
1523
                        )
1524

1525
                    dbgap_project += "." + consent_code
1✔
1526

1527
                self._process_dbgap_project(
1✔
1528
                    dbgap_project,
1529
                    privileges,
1530
                    username,
1531
                    sess,
1532
                    user_projects_to_modify,
1533
                    dbgap_config,
1534
                )
1535
        # for user in user_projects_to_modify.keys():
1536
        #     user_projects[user] = user_projects_to_modify[user]
1537

1538
    def sync(self):
1✔
1539
        if self.session:
1✔
1540
            self._sync(self.session)
1✔
1541
        else:
1542
            with self.driver.session as s:
×
1543
                self._sync(s)
×
1544

1545
    def download(self):
1✔
1546
        for dbgap_server in self.dbGaP:
×
1547
            self._download(dbgap_server)
×
1548

1549
    def _download(self, dbgap_config):
1✔
1550
        """
1551
        Download files from dbgap server.
1552
        """
1553
        server = dbgap_config["info"]
1✔
1554
        protocol = dbgap_config["protocol"]
1✔
1555
        hostname = server["host"]
1✔
1556
        username = server["username"]
1✔
1557
        folderdir = os.path.join(str(self.folder), str(hostname), str(username))
1✔
1558

1559
        if not os.path.exists(folderdir):
1✔
1560
            os.makedirs(folderdir)
1✔
1561

1562
        self.logger.info("Download from server")
1✔
1563
        try:
1✔
1564
            if protocol == "sftp":
1✔
1565
                self._get_from_sftp_with_proxy(server, folderdir)
1✔
1566
            else:
1567
                self._get_from_ftp_with_proxy(server, folderdir)
×
1568
            dbgap_files = glob.glob(os.path.join(folderdir, "*"))
×
1569
            return dbgap_files
×
1570
        except Exception as e:
1✔
1571
            self.logger.error(e)
1✔
1572
            raise
1✔
1573

1574
    def _sync(self, sess):
1✔
1575
        """
1576
        Collect files from dbgap server(s), sync csv and yaml files to storage
1577
        backend and fence DB
1578
        """
1579

1580
        # get all dbgap files
1581
        user_projects = {}
1✔
1582
        user_info = {}
1✔
1583
        if self.is_sync_from_dbgap_server:
1✔
1584
            self.logger.debug(
1✔
1585
                "Pulling telemetry files from {} dbgap sftp servers".format(
1586
                    len(self.dbGaP)
1587
                )
1588
            )
1589
            user_projects, user_info = self._merge_multiple_dbgap_sftp(self.dbGaP, sess)
1✔
1590

1591
        local_csv_file_list = []
1✔
1592
        if self.sync_from_local_csv_dir:
1✔
1593
            local_csv_file_list = glob.glob(
1✔
1594
                os.path.join(self.sync_from_local_csv_dir, "*")
1595
            )
1596
            # Sort the list so the order of of files is consistent across platforms
1597
            local_csv_file_list.sort()
1✔
1598

1599
        user_projects_csv, user_info_csv = self._merge_multiple_local_csv_files(
1✔
1600
            local_csv_file_list,
1601
            encrypted=False,
1602
            session=sess,
1603
            dbgap_configs=self.dbGaP,
1604
        )
1605

1606
        try:
1✔
1607
            user_yaml = UserYAML.from_file(
1✔
1608
                self.sync_from_local_yaml_file, encrypted=False, logger=self.logger
1609
            )
1610
        except (EnvironmentError, AssertionError) as e:
1✔
1611
            self.logger.error(str(e))
1✔
1612
            self.logger.error("aborting early")
1✔
1613
            raise
1✔
1614

1615
        # parse all projects
1616
        user_projects_csv = self.parse_projects(user_projects_csv)
1✔
1617
        user_projects = self.parse_projects(user_projects)
1✔
1618
        user_yaml.projects = self.parse_projects(user_yaml.projects)
1✔
1619

1620
        # merge all user info dicts into "user_info".
1621
        # the user info (such as email) in the user.yaml files
1622
        # overrides the user info from the CSV files.
1623
        self.sync_two_user_info_dict(user_info_csv, user_info)
1✔
1624
        self.sync_two_user_info_dict(user_yaml.user_info, user_info)
1✔
1625

1626
        # merge all access info dicts into "user_projects".
1627
        # the access info is combined - if the user.yaml access is
1628
        # ["read"] and the CSV file access is ["read-storage"], the
1629
        # resulting access is ["read", "read-storage"].
1630
        self.sync_two_phsids_dict(
1✔
1631
            user_projects_csv, user_projects, source1="local_csv", source2="dbgap"
1632
        )
1633
        self.sync_two_phsids_dict(
1✔
1634
            user_yaml.projects, user_projects, source1="user_yaml", source2="dbgap"
1635
        )
1636

1637
        # Note: if there are multiple dbgap sftp servers configured
1638
        # this parameter is always from the config for the first dbgap sftp server
1639
        # not any additional ones
1640
        for dbgap_config in self.dbGaP:
1✔
1641
            if self._get_parse_consent_code(dbgap_config):
1✔
1642
                self._grant_all_consents_to_c999_users(
1✔
1643
                    user_projects, user_yaml.project_to_resource
1644
                )
1645

1646
        google_update_ex = None
1✔
1647

1648
        try:
1✔
1649
            # update the Fence DB
1650
            if user_projects:
1✔
1651
                self.logger.info("Sync to db and storage backend")
1✔
1652
                self.sync_to_db_and_storage_backend(user_projects, user_info, sess)
1✔
1653
                self.logger.info("Finish syncing to db and storage backend")
1✔
1654
            else:
1655
                self.logger.info("No users for syncing")
×
1656
        except GoogleUpdateException as ex:
1✔
1657
            # save this to reraise later after all non-Google syncing has finished
1658
            # this way, any issues with Google only affect Google data access and don't
1659
            # cascade problems into non-Google AWS or Azure access
1660
            google_update_ex = ex
1✔
1661

1662
        # update the Arborist DB (resources, roles, policies, groups)
1663
        if user_yaml.authz:
1✔
1664
            if not self.arborist_client:
1✔
1665
                raise EnvironmentError(
×
1666
                    "yaml file contains authz section but sync is not configured with"
1667
                    " arborist client--did you run sync with --arborist <arborist client> arg?"
1668
                )
1669
            self.logger.info("Synchronizing arborist...")
1✔
1670
            success = self._update_arborist(sess, user_yaml)
1✔
1671
            if success:
1✔
1672
                self.logger.info("Finished synchronizing arborist")
1✔
1673
            else:
1674
                self.logger.error("Could not synchronize successfully")
×
1675
                exit(1)
×
1676
        else:
1677
            self.logger.info("No `authz` section; skipping arborist sync")
×
1678

1679
        # update the Arborist DB (user access)
1680
        if self.arborist_client:
1✔
1681
            self.logger.info("Synchronizing arborist with authorization info...")
1✔
1682
            success = self._update_authz_in_arborist(sess, user_projects, user_yaml)
1✔
1683
            if success:
1✔
1684
                self.logger.info(
1✔
1685
                    "Finished synchronizing authorization info to arborist"
1686
                )
1687
            else:
1688
                self.logger.error(
×
1689
                    "Could not synchronize authorization info successfully to arborist"
1690
                )
1691
                exit(1)
×
1692
        else:
1693
            self.logger.error("No arborist client set; skipping arborist sync")
×
1694

1695
        # Logging authz source
1696
        for u, s in self.auth_source.items():
1✔
1697
            self.logger.info("Access for user {} from {}".format(u, s))
1✔
1698

1699
        self.logger.info(
1✔
1700
            f"Persisting authz mapping to database: {user_yaml.project_to_resource}"
1701
        )
1702
        user_yaml.persist_project_to_resource(db_session=sess)
1✔
1703
        if google_update_ex is not None:
1✔
1704
            raise google_update_ex
1✔
1705

1706
    def _grant_all_consents_to_c999_users(
1✔
1707
        self, user_projects, user_yaml_project_to_resources
1708
    ):
1709
        access_number_matcher = re.compile(config["DBGAP_ACCESSION_WITH_CONSENT_REGEX"])
1✔
1710
        # combine dbgap/user.yaml projects into one big list (in case not all consents
1711
        # are in either)
1712
        all_projects = set(
1✔
1713
            list(self._projects.keys()) + list(user_yaml_project_to_resources.keys())
1714
        )
1715

1716
        self.logger.debug(f"all projects: {all_projects}")
1✔
1717

1718
        # construct a mapping from phsid (without consent) to all accessions with consent
1719
        consent_mapping = {}
1✔
1720
        for project in all_projects:
1✔
1721
            phs_match = access_number_matcher.match(project)
1✔
1722
            if phs_match:
1✔
1723
                accession_number = phs_match.groupdict()
1✔
1724

1725
                # TODO: This is not handling the .v1.p1 at all
1726
                consent_mapping.setdefault(accession_number["phsid"], set()).add(
1✔
1727
                    ".".join([accession_number["phsid"], accession_number["consent"]])
1728
                )
1729
                children = self._get_children(accession_number["phsid"])
1✔
1730
                if children:
1✔
1731
                    for child_phs in children:
1✔
1732
                        consent_mapping.setdefault(child_phs, set()).add(
1✔
1733
                            ".".join(
1734
                                [child_phs, accession_number["consent"]]
1735
                            )  # Assign parent consent to child study
1736
                        )
1737

1738
        self.logger.debug(f"consent mapping: {consent_mapping}")
1✔
1739

1740
        # go through existing access and find any c999's and make sure to give access to
1741
        # all accessions with consent for that phsid
1742
        for username, user_project_info in copy.deepcopy(user_projects).items():
1✔
1743
            for project, _ in user_project_info.items():
1✔
1744
                phs_match = access_number_matcher.match(project)
1✔
1745
                if phs_match and phs_match.groupdict()["consent"] == "c999":
1✔
1746
                    # give access to all consents
1747
                    all_phsids_with_consent = consent_mapping.get(
1✔
1748
                        phs_match.groupdict()["phsid"], []
1749
                    )
1750
                    self.logger.info(
1✔
1751
                        f"user {username} has c999 consent group for: {project}. "
1752
                        f"Granting access to all consents: {all_phsids_with_consent}"
1753
                    )
1754
                    # NOTE: Only giving read-storage at the moment (this is same
1755
                    #       permission we give for other dbgap projects)
1756
                    for phsid_with_consent in all_phsids_with_consent:
1✔
1757
                        user_projects[username].update(
1✔
1758
                            {phsid_with_consent: {"read-storage", "read"}}
1759
                        )
1760

1761
    def _update_arborist(self, session, user_yaml):
1✔
1762
        """
1763
        Create roles, resources, policies, groups in arborist from the information in
1764
        ``user_yaml``.
1765

1766
        The projects are sent to arborist as resources with paths like
1767
        ``/projects/{project}``. Roles are created with just the original names
1768
        for the privileges like ``"read-storage", "read"`` etc.
1769

1770
        Args:
1771
            session (sqlalchemy.Session)
1772
            user_yaml (UserYAML)
1773

1774
        Return:
1775
            bool: success
1776
        """
1777
        healthy = self._is_arborist_healthy()
1✔
1778
        if not healthy:
1✔
1779
            return False
×
1780

1781
        # Set up the resource tree in arborist by combining provided resources with any
1782
        # dbgap resources that were created before this.
1783
        #
1784
        # Why add dbgap resources if they've already been created?
1785
        #   B/C Arborist's PUT update will override existing subresources. So if a dbgap
1786
        #   resources was created under `/programs/phs000178` anything provided in
1787
        #   user.yaml under `/programs` would completely wipe it out.
1788
        resources = user_yaml.authz.get("resources", [])
1✔
1789

1790
        dbgap_resource_paths = []
1✔
1791
        for path_list in self._dbgap_study_to_resources.values():
1✔
1792
            dbgap_resource_paths.extend(path_list)
1✔
1793

1794
        self.logger.debug("user_yaml resources: {}".format(resources))
1✔
1795
        self.logger.debug("dbgap resource paths: {}".format(dbgap_resource_paths))
1✔
1796

1797
        combined_resources = utils.combine_provided_and_dbgap_resources(
1✔
1798
            resources, dbgap_resource_paths
1799
        )
1800

1801
        for resource in combined_resources:
1✔
1802
            try:
1✔
1803
                self.logger.debug(
1✔
1804
                    "attempting to update arborist resource: {}".format(resource)
1805
                )
1806
                self.arborist_client.update_resource("/", resource, merge=True)
1✔
1807
            except ArboristError as e:
×
1808
                self.logger.error(e)
×
1809
                # keep going; maybe just some conflicts from things existing already
1810

1811
        # update roles
1812
        roles = user_yaml.authz.get("roles", [])
1✔
1813
        for role in roles:
1✔
1814
            try:
1✔
1815
                response = self.arborist_client.update_role(role["id"], role)
1✔
1816
                if response:
1✔
1817
                    self._created_roles.add(role["id"])
1✔
1818
            except ArboristError as e:
×
1819
                self.logger.info(
×
1820
                    "couldn't update role '{}', creating instead".format(str(e))
1821
                )
1822
                try:
×
1823
                    response = self.arborist_client.create_role(role)
×
1824
                    if response:
×
1825
                        self._created_roles.add(role["id"])
×
1826
                except ArboristError as e:
×
1827
                    self.logger.error(e)
×
1828
                    # keep going; maybe just some conflicts from things existing already
1829

1830
        # update policies
1831
        policies = user_yaml.authz.get("policies", [])
1✔
1832
        for policy in policies:
1✔
1833
            policy_id = policy.pop("id")
1✔
1834
            try:
1✔
1835
                self.logger.debug(
1✔
1836
                    "Trying to upsert policy with id {}".format(policy_id)
1837
                )
1838
                response = self.arborist_client.update_policy(
1✔
1839
                    policy_id, policy, create_if_not_exist=True
1840
                )
1841
            except ArboristError as e:
×
1842
                self.logger.error(e)
×
1843
                # keep going; maybe just some conflicts from things existing already
1844
            else:
1845
                if response:
1✔
1846
                    self.logger.debug("Upserted policy with id {}".format(policy_id))
1✔
1847
                    self._created_policies.add(policy_id)
1✔
1848

1849
        # update groups
1850
        groups = user_yaml.authz.get("groups", [])
1✔
1851

1852
        # delete from arborist the groups that have been deleted
1853
        # from the user.yaml
1854
        arborist_groups = set(
1✔
1855
            g["name"] for g in self.arborist_client.list_groups().get("groups", [])
1856
        )
1857
        useryaml_groups = set(g["name"] for g in groups)
1✔
1858
        for deleted_group in arborist_groups.difference(useryaml_groups):
1✔
1859
            # do not try to delete built in groups
1860
            if deleted_group not in ["anonymous", "logged-in"]:
×
1861
                self.arborist_client.delete_group(deleted_group)
×
1862

1863
        # create/update the groups defined in the user.yaml
1864
        for group in groups:
1✔
1865
            missing = {"name", "users", "policies"}.difference(set(group.keys()))
×
1866
            if missing:
×
1867
                name = group.get("name", "{MISSING NAME}")
×
1868
                self.logger.error(
×
1869
                    "group {} missing required field(s): {}".format(name, list(missing))
1870
                )
1871
                continue
×
1872
            try:
×
1873
                response = self.arborist_client.put_group(
×
1874
                    group["name"],
1875
                    # Arborist doesn't handle group descriptions yet
1876
                    # description=group.get("description", ""),
1877
                    users=group["users"],
1878
                    policies=group["policies"],
1879
                )
1880
            except ArboristError as e:
×
1881
                self.logger.info("couldn't put group: {}".format(str(e)))
×
1882

1883
        # Update policies for built-in (`anonymous` and `logged-in`) groups
1884

1885
        # First recreate these groups in order to clear out old, possibly deleted policies
1886
        for builtin_group in ["anonymous", "logged-in"]:
1✔
1887
            try:
1✔
1888
                response = self.arborist_client.put_group(builtin_group)
1✔
1889
            except ArboristError as e:
×
1890
                self.logger.info("couldn't put group: {}".format(str(e)))
×
1891

1892
        # Now add back policies that are in the user.yaml
1893
        for policy in user_yaml.authz.get("anonymous_policies", []):
1✔
1894
            self.arborist_client.grant_group_policy("anonymous", policy)
×
1895

1896
        for policy in user_yaml.authz.get("all_users_policies", []):
1✔
1897
            self.arborist_client.grant_group_policy("logged-in", policy)
×
1898

1899
        return True
1✔
1900

1901
    def _revoke_all_policies_preserve_mfa(self, username, idp=None):
1✔
1902
        """
1903
        If MFA is enabled for the user's idp, check if they have the /multifactor_auth resource and restore the
1904
        mfa_policy after revoking all policies.
1905
        """
1906
        user_data_from_arborist = None
1✔
1907
        try:
1✔
1908
            user_data_from_arborist = self.arborist_client.get_user(username)
1✔
1909
        except ArboristError:
×
1910
            # user doesn't exist in Arborist, nothing to revoke
1911
            return
×
1912

1913
        is_mfa_enabled = "multifactor_auth_claim_info" in config["OPENID_CONNECT"].get(
1✔
1914
            idp, {}
1915
        )
1916
        if not is_mfa_enabled:
1✔
1917
            # TODO This should be a diff, not a revocation of all policies.
1918
            self.arborist_client.revoke_all_policies_for_user(username)
1✔
1919
            return
1✔
1920

1921
        policies = []
1✔
1922
        try:
1✔
1923
            policies = user_data_from_arborist["policies"]
1✔
1924
        except Exception as e:
×
1925
            self.logger.error(
×
1926
                f"Could not retrieve user's policies, revoking all policies anyway. {e}"
1927
            )
1928
        finally:
1929
            # TODO This should be a diff, not a revocation of all policies.
1930
            self.arborist_client.revoke_all_policies_for_user(username)
1✔
1931

1932
        if "mfa_policy" in policies:
1✔
1933
            status_code = self.arborist_client.grant_user_policy(username, "mfa_policy")
1✔
1934

1935
    def _update_authz_in_arborist(
1✔
1936
        self,
1937
        session,
1938
        user_projects,
1939
        user_yaml=None,
1940
        single_user_sync=False,
1941
        expires=None,
1942
    ):
1943
        """
1944
        Assign users policies in arborist from the information in
1945
        ``user_projects`` and optionally a ``user_yaml``.
1946

1947
        The projects are sent to arborist as resources with paths like
1948
        ``/projects/{project}``. Roles are created with just the original names
1949
        for the privileges like ``"read-storage", "read"`` etc.
1950

1951
        Args:
1952
            user_projects (dict)
1953
            user_yaml (UserYAML) optional, if there are policies for users in a user.yaml
1954
            single_user_sync (bool) whether authz update is for a single user
1955
            expires (int) time at which authz info in Arborist should expire
1956

1957
        Return:
1958
            bool: success
1959
        """
1960
        healthy = self._is_arborist_healthy()
1✔
1961
        if not healthy:
1✔
1962
            return False
×
1963

1964
        self.logger.debug("user_projects: {}".format(user_projects))
1✔
1965

1966
        if user_yaml:
1✔
1967
            self.logger.debug(
1✔
1968
                "useryaml abac before lowering usernames: {}".format(
1969
                    user_yaml.user_abac
1970
                )
1971
            )
1972
            user_yaml.user_abac = {
1✔
1973
                key.lower(): value for key, value in user_yaml.user_abac.items()
1974
            }
1975
            # update the project info with `projects` specified in user.yaml
1976
            self.sync_two_phsids_dict(user_yaml.user_abac, user_projects)
1✔
1977

1978
        # get list of users from arborist to make sure users that are completely removed
1979
        # from authorization sources get policies revoked
1980
        arborist_user_projects = {}
1✔
1981
        if not single_user_sync:
1✔
1982
            try:
1✔
1983
                arborist_users = self.arborist_client.get_users().json["users"]
1✔
1984

1985
                # construct user information, NOTE the lowering of the username. when adding/
1986
                # removing access, the case in the Fence db is used. For combining access, it is
1987
                # case-insensitive, so we lower
1988
                arborist_user_projects = {
1✔
1989
                    user["name"].lower(): {} for user in arborist_users
1990
                }
1991
            except (ArboristError, KeyError, AttributeError) as error:
×
1992
                # TODO usersync should probably exit with non-zero exit code at the end,
1993
                #      but sync should continue from this point so there are no partial
1994
                #      updates
1995
                self.logger.warning(
×
1996
                    "Could not get list of users in Arborist, continuing anyway. "
1997
                    "WARNING: this sync will NOT remove access for users no longer in "
1998
                    f"authorization sources. Error: {error}"
1999
                )
2000

2001
            # update the project info with users from arborist
2002
            self.sync_two_phsids_dict(arborist_user_projects, user_projects)
1✔
2003

2004
        policy_id_list = []
1✔
2005
        policies = []
1✔
2006

2007
        # prefer in-memory if available from user_yaml, if not, get from database
2008
        if user_yaml and user_yaml.project_to_resource:
1✔
2009
            project_to_authz_mapping = user_yaml.project_to_resource
1✔
2010
            self.logger.debug(
1✔
2011
                f"using in-memory project to authz resource mapping from "
2012
                f"user.yaml (instead of database): {project_to_authz_mapping}"
2013
            )
2014
        else:
2015
            project_to_authz_mapping = get_project_to_authz_mapping(session)
1✔
2016
            self.logger.debug(
1✔
2017
                f"using persisted project to authz resource mapping from database "
2018
                f"(instead of user.yaml - as it may not be available): {project_to_authz_mapping}"
2019
            )
2020

2021
        self.logger.debug(
1✔
2022
            f"_dbgap_study_to_resources: {self._dbgap_study_to_resources}"
2023
        )
2024
        all_resources = [
1✔
2025
            r
2026
            for resources in self._dbgap_study_to_resources.values()
2027
            for r in resources
2028
        ]
2029
        all_resources.extend(r for r in project_to_authz_mapping.values())
1✔
2030
        self._create_arborist_resources(all_resources)
1✔
2031

2032
        for username, user_project_info in user_projects.items():
1✔
2033
            self.logger.info("processing user `{}`".format(username))
1✔
2034
            user = query_for_user(session=session, username=username)
1✔
2035
            idp = None
1✔
2036
            if user:
1✔
2037
                username = user.username
1✔
2038
                idp = user.identity_provider.name if user.identity_provider else None
1✔
2039

2040
            self.arborist_client.create_user_if_not_exist(username)
1✔
2041
            if not single_user_sync:
1✔
2042
                self._revoke_all_policies_preserve_mfa(username, idp)
1✔
2043

2044
            # as of 2/11/2022, for single_user_sync, as RAS visa parsing has
2045
            # previously mapped each project to the same set of privileges
2046
            # (i.e.{'read', 'read-storage'}), unique_policies will just be a
2047
            # single policy with ('read', 'read-storage') being the single
2048
            # key
2049
            unique_policies = self._determine_unique_policies(
1✔
2050
                user_project_info, project_to_authz_mapping
2051
            )
2052

2053
            for roles in unique_policies.keys():
1✔
2054
                for role in roles:
1✔
2055
                    self._create_arborist_role(role)
1✔
2056

2057
            if single_user_sync:
1✔
2058
                for ordered_roles, ordered_resources in unique_policies.items():
1✔
2059
                    policy_hash = self._hash_policy_contents(
1✔
2060
                        ordered_roles, ordered_resources
2061
                    )
2062
                    self._create_arborist_policy(
1✔
2063
                        policy_hash,
2064
                        ordered_roles,
2065
                        ordered_resources,
2066
                        skip_if_exists=True,
2067
                    )
2068
                    # return here as it is not expected single_user_sync
2069
                    # will need any of the remaining user_yaml operations
2070
                    # left in _update_authz_in_arborist
2071
                    return self._grant_arborist_policy(
1✔
2072
                        username, policy_hash, expires=expires
2073
                    )
2074
            else:
2075
                for roles, resources in unique_policies.items():
1✔
2076
                    for role in roles:
1✔
2077
                        for resource in resources:
1✔
2078
                            # grant a policy to this user which is a single
2079
                            # role on a single resource
2080

2081
                            # format project '/x/y/z' -> 'x.y.z'
2082
                            # so the policy id will be something like 'x.y.z-create'
2083
                            policy_id = _format_policy_id(resource, role)
1✔
2084
                            if policy_id not in self._created_policies:
1✔
2085
                                try:
1✔
2086
                                    self.arborist_client.update_policy(
1✔
2087
                                        policy_id,
2088
                                        {
2089
                                            "description": "policy created by fence sync",
2090
                                            "role_ids": [role],
2091
                                            "resource_paths": [resource],
2092
                                        },
2093
                                        create_if_not_exist=True,
2094
                                    )
2095
                                except ArboristError as e:
×
2096
                                    self.logger.info(
×
2097
                                        "not creating policy in arborist; {}".format(
2098
                                            str(e)
2099
                                        )
2100
                                    )
2101
                                self._created_policies.add(policy_id)
1✔
2102

2103
                            self._grant_arborist_policy(
1✔
2104
                                username, policy_id, expires=expires
2105
                            )
2106

2107
            if user_yaml:
1✔
2108
                for policy in user_yaml.policies.get(username, []):
1✔
2109
                    self.arborist_client.grant_user_policy(
1✔
2110
                        username,
2111
                        policy,
2112
                        expires_at=expires,
2113
                    )
2114

2115
        if user_yaml:
1✔
2116
            for client_name, client_details in user_yaml.clients.items():
1✔
2117
                client_policies = client_details.get("policies", [])
×
2118
                clients = session.query(Client).filter_by(name=client_name).all()
×
2119
                # update existing clients, do not create new ones
2120
                if not clients:
×
2121
                    self.logger.warning(
×
2122
                        "client to update (`{}`) does not exist in fence: skipping".format(
2123
                            client_name
2124
                        )
2125
                    )
2126
                    continue
×
2127
                self.logger.debug(
×
2128
                    "updating client `{}` (found {} client IDs)".format(
2129
                        client_name, len(clients)
2130
                    )
2131
                )
2132
                # there may be more than 1 client with this name if credentials are being rotated,
2133
                # so we grant access to each client ID
2134
                for client in clients:
×
2135
                    try:
×
2136
                        self.arborist_client.update_client(
×
2137
                            client.client_id, client_policies
2138
                        )
2139
                    except ArboristError as e:
×
2140
                        self.logger.info(
×
2141
                            "not granting policies {} to client `{}` (`{}`); {}".format(
2142
                                client_policies, client_name, client.client_id, str(e)
2143
                            )
2144
                        )
2145

2146
        return True
1✔
2147

2148
    def _determine_unique_policies(self, user_project_info, project_to_authz_mapping):
1✔
2149
        """
2150
        Determine and return a dictionary of unique policies.
2151

2152
        Args (examples):
2153
            user_project_info (dict):
2154
            {
2155
                'phs000002.c1': { 'read-storage', 'read' },
2156
                'phs000001.c1': { 'read', 'read-storage' },
2157
                'phs000004.c1': { 'write', 'read' },
2158
                'phs000003.c1': { 'read', 'write' },
2159
                'phs000006.c1': { 'write-storage', 'write', 'read-storage', 'read' }
2160
                'phs000005.c1': { 'read', 'read-storage', 'write', 'write-storage' },
2161
            }
2162
            project_to_authz_mapping (dict):
2163
            {
2164
                'phs000001.c1': '/programs/DEV/projects/phs000001.c1'
2165
            }
2166

2167
        Return (for examples):
2168
            dict:
2169
            {
2170
                ('read', 'read-storage'): ('phs000001.c1', 'phs000002.c1'),
2171
                ('read', 'write'): ('phs000003.c1', 'phs000004.c1'),
2172
                ('read', 'read-storage', 'write', 'write-storage'): ('phs000005.c1', 'phs000006.c1'),
2173
            }
2174
        """
2175
        roles_to_resources = collections.defaultdict(list)
1✔
2176
        for study, roles in user_project_info.items():
1✔
2177
            ordered_roles = tuple(sorted(roles))
1✔
2178
            study_authz_paths = self._dbgap_study_to_resources.get(study, [study])
1✔
2179
            if study in project_to_authz_mapping:
1✔
2180
                study_authz_paths = [project_to_authz_mapping[study]]
1✔
2181
            roles_to_resources[ordered_roles].extend(study_authz_paths)
1✔
2182

2183
        policies = {}
1✔
2184
        for ordered_roles, unordered_resources in roles_to_resources.items():
1✔
2185
            policies[ordered_roles] = tuple(sorted(unordered_resources))
1✔
2186
        return policies
1✔
2187

2188
    def _create_arborist_role(self, role):
1✔
2189
        """
2190
        Wrapper around gen3authz's create_role with additional logging
2191

2192
        Args:
2193
            role (str): what the Arborist identity should be of the created role
2194

2195
        Return:
2196
            bool: True if the role was created successfully or it already
2197
                  exists. False otherwise
2198
        """
2199
        if role in self._created_roles:
1✔
2200
            return True
1✔
2201
        try:
1✔
2202
            response_json = self.arborist_client.create_role(
1✔
2203
                arborist_role_for_permission(role)
2204
            )
2205
        except ArboristError as e:
×
2206
            self.logger.error(
×
2207
                "could not create `{}` role in Arborist: {}".format(role, e)
2208
            )
2209
            return False
×
2210
        self._created_roles.add(role)
1✔
2211

2212
        if response_json is None:
1✔
2213
            self.logger.info("role `{}` already exists in Arborist".format(role))
×
2214
        else:
2215
            self.logger.info("created role `{}` in Arborist".format(role))
1✔
2216
        return True
1✔
2217

2218
    def _create_arborist_resources(self, resources):
1✔
2219
        """
2220
        Create resources in Arborist
2221

2222
        Args:
2223
            resources (list): a list of full Arborist resource paths to create
2224
            [
2225
                "/programs/DEV/projects/phs000001.c1",
2226
                "/programs/DEV/projects/phs000002.c1",
2227
                "/programs/DEV/projects/phs000003.c1"
2228
            ]
2229

2230
        Return:
2231
            bool: True if the resources were successfully created, False otherwise
2232

2233

2234
        As of 2/11/2022, for resources above,
2235
        utils.combine_provided_and_dbgap_resources({}, resources) returns:
2236
        [
2237
            { 'name': 'programs', 'subresources': [
2238
                { 'name': 'DEV', 'subresources': [
2239
                    { 'name': 'projects', 'subresources': [
2240
                        { 'name': 'phs000001.c1', 'subresources': []},
2241
                        { 'name': 'phs000002.c1', 'subresources': []},
2242
                        { 'name': 'phs000003.c1', 'subresources': []}
2243
                    ]}
2244
                ]}
2245
            ]}
2246
        ]
2247
        Because this list has a single object, only a single network request gets
2248
        sent to Arborist.
2249

2250
        However, for resources = ["/phs000001.c1", "/phs000002.c1", "/phs000003.c1"],
2251
        utils.combine_provided_and_dbgap_resources({}, resources) returns:
2252
        [
2253
            {'name': 'phs000001.c1', 'subresources': []},
2254
            {'name': 'phs000002.c1', 'subresources': []},
2255
            {'name': 'phs000003.c1', 'subresources': []}
2256
        ]
2257
        Because this list has 3 objects, 3 network requests get sent to Arborist.
2258

2259
        As a practical matter, for sync_single_user_visas, studies
2260
        should be nested under the `/programs` resource as in the former
2261
        example (i.e. only one network request gets made).
2262

2263
        TODO for the sake of simplicity, it would be nice if only one network
2264
        request was made no matter the input.
2265
        """
2266
        for request_body in utils.combine_provided_and_dbgap_resources({}, resources):
1✔
2267
            try:
1✔
2268
                response_json = self.arborist_client.update_resource(
1✔
2269
                    "/", request_body, merge=True
2270
                )
2271
            except ArboristError as e:
×
2272
                self.logger.error(
×
2273
                    "could not create Arborist resources using request body `{}`. error: {}".format(
2274
                        request_body, e
2275
                    )
2276
                )
2277
                return False
×
2278

2279
        self.logger.debug(
1✔
2280
            "created {} resource(s) in Arborist: `{}`".format(len(resources), resources)
2281
        )
2282
        return True
1✔
2283

2284
    def _create_arborist_policy(
1✔
2285
        self, policy_id, roles, resources, skip_if_exists=False
2286
    ):
2287
        """
2288
        Wrapper around gen3authz's create_policy with additional logging
2289

2290
        Args:
2291
            policy_id (str): what the Arborist identity should be of the created policy
2292
            roles (iterable): what roles the create policy should have
2293
            resources (iterable): what resources the created policy should have
2294
            skip_if_exists (bool): if True, this function will not treat an already
2295
                                   existent policy as an error
2296

2297
        Return:
2298
            bool: True if policy creation was successful. False otherwise
2299
        """
2300
        try:
1✔
2301
            response_json = self.arborist_client.create_policy(
1✔
2302
                {
2303
                    "id": policy_id,
2304
                    "role_ids": roles,
2305
                    "resource_paths": resources,
2306
                },
2307
                skip_if_exists=skip_if_exists,
2308
            )
2309
        except ArboristError as e:
×
2310
            self.logger.error(
×
2311
                "could not create policy `{}` in Arborist: {}".format(policy_id, e)
2312
            )
2313
            return False
×
2314

2315
        if response_json is None:
1✔
2316
            self.logger.info("policy `{}` already exists in Arborist".format(policy_id))
×
2317
        else:
2318
            self.logger.info("created policy `{}` in Arborist".format(policy_id))
1✔
2319
        return True
1✔
2320

2321
    def _hash_policy_contents(self, ordered_roles, ordered_resources):
1✔
2322
        """
2323
        Generate a sha256 hexdigest representing ordered_roles and ordered_resources.
2324

2325
        Args:
2326
            ordered_roles (iterable): policy roles in sorted order
2327
            ordered_resources (iterable): policy resources in sorted order
2328

2329
        Return:
2330
            str: SHA256 hex digest
2331
        """
2332

2333
        def escape(s):
1✔
2334
            return s.replace(",", "\,")
1✔
2335

2336
        canonical_roles = ",".join(escape(r) for r in ordered_roles)
1✔
2337
        canonical_resources = ",".join(escape(r) for r in ordered_resources)
1✔
2338
        canonical_policy = f"{canonical_roles},,f{canonical_resources}"
1✔
2339
        policy_hash = hashlib.sha256(canonical_policy.encode("utf-8")).hexdigest()
1✔
2340

2341
        return policy_hash
1✔
2342

2343
    def _grant_arborist_policy(self, username, policy_id, expires=None):
1✔
2344
        """
2345
        Wrapper around gen3authz's grant_user_policy with additional logging
2346

2347
        Args:
2348
            username (str): username of user in Arborist who policy should be
2349
                            granted to
2350
            policy_id (str): Arborist policy id
2351
            expires (int): POSIX timestamp for when policy should expire
2352

2353
        Return:
2354
            bool: True if granting of policy was successful, False otherwise
2355
        """
2356
        try:
1✔
2357
            response_json = self.arborist_client.grant_user_policy(
1✔
2358
                username,
2359
                policy_id,
2360
                expires_at=expires,
2361
            )
2362
        except ArboristError as e:
×
2363
            self.logger.error(
×
2364
                "could not grant policy `{}` to user `{}`: {}".format(
2365
                    policy_id, username, e
2366
                )
2367
            )
2368
            return False
×
2369

2370
        self.logger.debug(
1✔
2371
            "granted policy `{}` to user `{}`".format(policy_id, username)
2372
        )
2373
        return True
1✔
2374

2375
    def _determine_arborist_resource(self, dbgap_study, dbgap_config):
1✔
2376
        """
2377
        Determine the arborist resource path and add it to
2378
        _self._dbgap_study_to_resources
2379

2380
        Args:
2381
            dbgap_study (str): study phs identifier
2382
            dbgap_config (dict): dictionary of config for dbgap server
2383

2384
        """
2385
        default_namespaces = dbgap_config.get("study_to_resource_namespaces", {}).get(
1✔
2386
            "_default", ["/"]
2387
        )
2388
        namespaces = dbgap_config.get("study_to_resource_namespaces", {}).get(
1✔
2389
            dbgap_study, default_namespaces
2390
        )
2391

2392
        self.logger.debug(f"dbgap study namespaces: {namespaces}")
1✔
2393

2394
        arborist_resource_namespaces = [
1✔
2395
            namespace.rstrip("/") + "/programs/" for namespace in namespaces
2396
        ]
2397

2398
        for resource_namespace in arborist_resource_namespaces:
1✔
2399
            full_resource_path = resource_namespace + dbgap_study
1✔
2400
            if dbgap_study not in self._dbgap_study_to_resources:
1✔
2401
                self._dbgap_study_to_resources[dbgap_study] = []
1✔
2402
            self._dbgap_study_to_resources[dbgap_study].append(full_resource_path)
1✔
2403
        return arborist_resource_namespaces
1✔
2404

2405
    def _is_arborist_healthy(self):
1✔
2406
        if not self.arborist_client:
1✔
2407
            self.logger.warning("no arborist client set; skipping arborist dbgap sync")
×
2408
            return False
×
2409
        if not self.arborist_client.healthy():
1✔
2410
            # TODO (rudyardrichter, 2019-01-07): add backoff/retry here
2411
            self.logger.error(
×
2412
                "arborist service is unavailable; skipping main arborist dbgap sync"
2413
            )
2414
            return False
×
2415
        return True
1✔
2416

2417
    def _pick_sync_type(self, visa):
1✔
2418
        """
2419
        Pick type of visa to parse according to the visa provider
2420
        """
2421
        sync_client = None
1✔
2422
        if visa.type in self.visa_types["ras"]:
1✔
2423
            sync_client = self.ras_sync_client
1✔
2424
        else:
2425
            raise Exception(
×
2426
                "Visa type {} not recognized. Configure in fence-config".format(
2427
                    visa.type
2428
                )
2429
            )
2430
        if not sync_client:
1✔
2431
            raise Exception("Sync client for {} not configured".format(visa.type))
×
2432

2433
        return sync_client
1✔
2434

2435
    def sync_single_user_visas(
1✔
2436
        self, user, ga4gh_visas, sess=None, expires=None, skip_google_updates=False
2437
    ):
2438
        """
2439
        Sync a single user's visas during login or DRS/data access
2440

2441
        IMPORTANT NOTE: THIS DOES NOT VALIDATE THE VISA. ENSURE THIS IS DONE
2442
                        BEFORE THIS.
2443

2444
        Args:
2445
            user (userdatamodel.user.User): Fence user whose visas'
2446
                                            authz info is being synced
2447
            ga4gh_visas (list): a list of fence.models.GA4GHVisaV1 objects
2448
                                that are ALREADY VALIDATED
2449
            sess (sqlalchemy.orm.session.Session): database session
2450
            expires (int): time at which synced Arborist policies and
2451
                           inclusion in any GBAG are set to expire
2452
            skip_google_updates (bool): True if google group updates should be skipped. False if otherwise.
2453

2454
        Return:
2455
            list of successfully parsed visas
2456
        """
2457
        self.ras_sync_client = RASVisa(logger=self.logger)
1✔
2458
        dbgap_config = self.dbGaP[0]
1✔
2459
        parse_consent_code = self._get_parse_consent_code(dbgap_config)
1✔
2460
        enable_common_exchange_area_access = dbgap_config.get(
1✔
2461
            "enable_common_exchange_area_access", False
2462
        )
2463
        study_common_exchange_areas = dbgap_config.get(
1✔
2464
            "study_common_exchange_areas", {}
2465
        )
2466

2467
        try:
1✔
2468
            user_yaml = UserYAML.from_file(
1✔
2469
                self.sync_from_local_yaml_file, encrypted=False, logger=self.logger
2470
            )
2471
        except (EnvironmentError, AssertionError) as e:
×
2472
            self.logger.error(str(e))
×
2473
            self.logger.error("aborting early")
×
2474
            raise
×
2475

2476
        user_projects = dict()
1✔
2477
        projects = {}
1✔
2478
        info = {}
1✔
2479
        parsed_visas = []
1✔
2480

2481
        for visa in ga4gh_visas:
1✔
2482
            project = {}
1✔
2483
            visa_type = self._pick_sync_type(visa)
1✔
2484
            encoded_visa = visa.ga4gh_visa
1✔
2485

2486
            try:
1✔
2487
                project, info = visa_type._parse_single_visa(
1✔
2488
                    user,
2489
                    encoded_visa,
2490
                    visa.expires,
2491
                    parse_consent_code,
2492
                )
2493
            except Exception:
×
2494
                self.logger.warning(
×
2495
                    f"ignoring unsuccessfully parsed or expired visa: {encoded_visa}"
2496
                )
2497
                continue
×
2498

2499
            projects = {**projects, **project}
1✔
2500
            parsed_visas.append(visa)
1✔
2501

2502
        info["user_id"] = user.id
1✔
2503
        info["username"] = user.username
1✔
2504
        user_projects[user.username] = projects
1✔
2505

2506
        user_projects = self.parse_projects(user_projects)
1✔
2507

2508
        if parse_consent_code and enable_common_exchange_area_access:
1✔
2509
            self.logger.info(
1✔
2510
                f"using study to common exchange area mapping: {study_common_exchange_areas}"
2511
            )
2512

2513
        self._process_user_projects(
1✔
2514
            user_projects,
2515
            enable_common_exchange_area_access,
2516
            study_common_exchange_areas,
2517
            dbgap_config,
2518
            sess,
2519
        )
2520

2521
        if parse_consent_code:
1✔
2522
            self._grant_all_consents_to_c999_users(
1✔
2523
                user_projects, user_yaml.project_to_resource
2524
            )
2525

2526
        if user_projects:
1✔
2527
            self.logger.info("Sync to storage backend [sync_single_user_visas]")
1✔
2528
            self.sync_to_storage_backend(
1✔
2529
                user_projects,
2530
                info,
2531
                sess,
2532
                expires=expires,
2533
                skip_google_updates=skip_google_updates,
2534
            )
2535
        else:
2536
            self.logger.info("No users for syncing")
×
2537

2538
        # update arborist db (user access)
2539
        if self.arborist_client:
1✔
2540
            self.logger.info("Synchronizing arborist with authorization info...")
1✔
2541
            success = self._update_authz_in_arborist(
1✔
2542
                sess,
2543
                user_projects,
2544
                user_yaml=user_yaml,
2545
                single_user_sync=True,
2546
                expires=expires,
2547
            )
2548
            if success:
1✔
2549
                self.logger.info(
1✔
2550
                    "Finished synchronizing authorization info to arborist"
2551
                )
2552
            else:
2553
                self.logger.error(
×
2554
                    "Could not synchronize authorization info successfully to arborist"
2555
                )
2556
        else:
2557
            self.logger.error("No arborist client set; skipping arborist sync")
×
2558

2559
        return parsed_visas
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc