• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

SwissDataScienceCenter / renku-python / 6875247711

15 Nov 2023 09:16AM UTC coverage: 82.786% (-0.05%) from 82.831%
6875247711

Pull #3300

github

web-flow
Merge e2d3269e8 into 4726f660e
Pull Request #3300: chore: do not always retry load tests requests

25441 of 30731 relevant lines covered (82.79%)

3.12 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

88.86
/renku/core/dataset/providers/dataverse.py
1
# Copyright Swiss Data Science Center (SDSC). A partnership between
2
# École Polytechnique Fédérale de Lausanne (EPFL) and
3
# Eidgenössische Technische Hochschule Zürich (ETHZ).
4
#
5
# Licensed under the Apache License, Version 2.0 (the "License");
6
# you may not use this file except in compliance with the License.
7
# You may obtain a copy of the License at
8
#
9
#     http://www.apache.org/licenses/LICENSE-2.0
10
#
11
# Unless required by applicable law or agreed to in writing, software
12
# distributed under the License is distributed on an "AS IS" BASIS,
13
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
# See the License for the specific language governing permissions and
15
# limitations under the License.
16
"""Dataverse API integration."""
7✔
17

18
import json
7✔
19
import posixpath
7✔
20
import re
7✔
21
import urllib
7✔
22
from pathlib import Path
7✔
23
from string import Template
7✔
24
from typing import TYPE_CHECKING, Any, Dict, List, Optional
7✔
25
from urllib import parse as urlparse
7✔
26

27
from renku.core import errors
7✔
28
from renku.core.config import get_value, set_value
7✔
29
from renku.core.dataset.providers.api import (
7✔
30
    ExporterApi,
31
    ExportProviderInterface,
32
    ImportProviderInterface,
33
    ProviderApi,
34
    ProviderPriority,
35
)
36
from renku.core.dataset.providers.dataverse_metadata_templates import (
7✔
37
    AUTHOR_METADATA_TEMPLATE,
38
    CONTACT_METADATA_TEMPLATE,
39
    DATASET_METADATA_TEMPLATE,
40
    KEYWORDS_METADATA_TEMPLATE,
41
)
42
from renku.core.dataset.providers.doi import DOIProvider
7✔
43
from renku.core.dataset.providers.repository import RepositoryImporter, make_request
7✔
44
from renku.core.util import communication
7✔
45
from renku.core.util.datetime8601 import fix_datetime
7✔
46
from renku.core.util.doi import extract_doi, get_doi_url, is_doi
7✔
47
from renku.core.util.urls import remove_credentials
7✔
48
from renku.domain_model.project_context import project_context
7✔
49

50
if TYPE_CHECKING:
7✔
51
    from renku.core.dataset.providers.models import ProviderDataset, ProviderParameter
×
52
    from renku.domain_model.dataset import Dataset, DatasetTag
×
53

54
DATAVERSE_API_PATH = "api/v1"
7✔
55

56
DATAVERSE_VERSION_API = "info/version"
7✔
57
DATAVERSE_METADATA_API = "datasets/export"
7✔
58
DATAVERSE_VERSIONS_API = "datasets/:persistentId/versions"
7✔
59
DATAVERSE_FILE_API = "access/datafile/:persistentId/"
7✔
60
DATAVERSE_EXPORTER = "schema.org"
7✔
61

62
DATAVERSE_SUBJECTS = [
7✔
63
    "Agricultural Sciences",
64
    "Arts and Humanities",
65
    "Astronomy and Astrophysics",
66
    "Business and Management",
67
    "Chemistry",
68
    "Computer and Information Science",
69
    "Earth and Environmental Sciences",
70
    "Engineering",
71
    "Law",
72
    "Mathematical Sciences",
73
    "Medicine, Health and Life Sciences",
74
    "Physics",
75
    "Social Sciences",
76
    "Other",
77
]
78

79

80
class DataverseProvider(ProviderApi, ExportProviderInterface, ImportProviderInterface):
7✔
81
    """Dataverse API provider."""
82

83
    priority = ProviderPriority.HIGH
7✔
84
    name = "Dataverse"
7✔
85
    is_remote = True
7✔
86

87
    def __init__(self, uri: str, is_doi: bool = False):
7✔
88
        super().__init__(uri=uri)
2✔
89

90
        self.is_doi = is_doi
2✔
91
        self._server_url = None
2✔
92
        self._dataverse_name = None
2✔
93
        self._publish: bool = False
2✔
94

95
    @staticmethod
7✔
96
    def supports(uri):
7✔
97
        """Check if provider supports a given URI."""
98
        is_doi_ = is_doi(uri)
1✔
99

100
        is_dataverse_uri = is_doi_ is None and check_dataverse_uri(uri)
1✔
101
        is_dataverse_doi = is_doi_ and check_dataverse_doi(is_doi_.group(0))
1✔
102

103
        return is_dataverse_uri or is_dataverse_doi
1✔
104

105
    @staticmethod
7✔
106
    def get_export_parameters() -> List["ProviderParameter"]:
7✔
107
        """Returns parameters that can be set for export."""
108
        from renku.core.dataset.providers.models import ProviderParameter
7✔
109

110
        return [
7✔
111
            ProviderParameter("dataverse-server", help="Dataverse server URL.", type=str),
112
            ProviderParameter("dataverse-name", help="Dataverse name to export to.", type=str),
113
            ProviderParameter("publish", help="Publish the exported dataset.", is_flag=True),
114
        ]
115

116
    @staticmethod
7✔
117
    def record_id(uri):
7✔
118
        """Extract record id from URI."""
119
        parsed = urlparse.urlparse(uri)
1✔
120
        return urlparse.parse_qs(parsed.query)["persistentId"][0]
1✔
121

122
    def get_importer(self, **kwargs) -> "DataverseImporter":
7✔
123
        """Get importer for a record from Dataverse.
124

125
        Returns:
126
            DataverseImporter: The found record
127
        """
128

129
        def get_export_uri(uri):
1✔
130
            """Gets a dataverse api export URI from a dataverse entry."""
131
            record_id = DataverseProvider.record_id(uri)
1✔
132
            return make_records_url(record_id, uri)
1✔
133

134
        uri = self.uri
1✔
135
        if self.is_doi:
1✔
136
            doi = DOIProvider(uri=uri).get_importer()
1✔
137
            uri = doi.uri
1✔
138

139
        uri = get_export_uri(uri)
1✔
140
        response = make_request(uri)
1✔
141

142
        return DataverseImporter(json=response.json(), uri=uri, original_uri=self.uri)
1✔
143

144
    def get_exporter(
7✔
145
        self,
146
        dataset: "Dataset",
147
        *,
148
        tag: Optional["DatasetTag"],
149
        dataverse_server: Optional[str] = None,
150
        dataverse_name: Optional[str] = None,
151
        publish: bool = False,
152
        **kwargs,
153
    ) -> "ExporterApi":
154
        """Create export manager for given dataset."""
155

156
        def set_export_parameters():
2✔
157
            """Set and validate required parameters for exporting for a provider."""
158

159
            server = dataverse_server
2✔
160
            config_base_url = "server_url"
2✔
161
            if not server:
2✔
162
                server = get_value("dataverse", config_base_url)
2✔
163
            else:
164
                set_value("dataverse", config_base_url, server, global_only=True)
×
165

166
            if not server:
2✔
167
                raise errors.ParameterError("Dataverse server URL is required.")
1✔
168

169
            if not dataverse_name:
2✔
170
                raise errors.ParameterError("Dataverse name is required.")
1✔
171

172
            self._server_url = server  # type: ignore
1✔
173
            self._dataverse_name = dataverse_name  # type: ignore
1✔
174
            self._publish = publish
1✔
175

176
        set_export_parameters()
2✔
177
        return DataverseExporter(dataset=dataset, server_url=self._server_url, dataverse_name=self._dataverse_name)
1✔
178

179

180
class DataverseImporter(RepositoryImporter):
7✔
181
    """Dataverse record serializer."""
182

183
    def __init__(self, uri: str, original_uri: str, json: Dict[str, Any]):
7✔
184
        super().__init__(uri=uri, original_uri=original_uri)
1✔
185
        self._json: Dict[str, Any] = json
1✔
186

187
    def is_latest_version(self):
7✔
188
        """Check if record is at last possible version."""
189
        return True
1✔
190

191
    @staticmethod
7✔
192
    def _convert_json_property_name(property_name):
7✔
193
        """Removes '@' and converts names to snake_case."""
194
        property_name = property_name.strip("@")
1✔
195
        property_name = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", property_name)
1✔
196
        return re.sub("([a-z0-9])([A-Z])", r"\1_\2", property_name).lower()
1✔
197

198
    @property
7✔
199
    def version(self):
7✔
200
        """Get the major and minor version of this dataset."""
201
        uri = make_versions_url(DataverseProvider.record_id(self._uri), self._uri)
1✔
202
        response = make_request(uri).json()
1✔
203
        newest_version = response["data"][0]
1✔
204
        return "{}.{}".format(newest_version["versionNumber"], newest_version["versionMinorNumber"])
1✔
205

206
    @property
7✔
207
    def latest_uri(self):
7✔
208
        """Get URI of latest version."""
209
        return self._uri
1✔
210

211
    def get_files(self):
7✔
212
        """Get Dataverse files metadata as ``DataverseFileSerializer``."""
213
        files = []
1✔
214

215
        for f in self._json["distribution"]:
1✔
216
            mapped_file = {self._convert_json_property_name(k): v for k, v in f.items()}
1✔
217
            mapped_file["parent_url"] = self._uri
1✔
218
            files.append(mapped_file)
1✔
219

220
        if not files:
1✔
221
            raise LookupError("no files have been found - deposit is empty or protected")
×
222

223
        return [DataverseFileSerializer(**file) for file in files]
1✔
224

225
    def fetch_provider_dataset(self) -> "ProviderDataset":
7✔
226
        """Deserialize a ``Dataset``."""
227
        from marshmallow import post_load, pre_load
1✔
228

229
        from renku.command.schema.agent import PersonSchema
1✔
230
        from renku.core.dataset.providers.models import ProviderDataset, ProviderDatasetFile, ProviderDatasetSchema
1✔
231
        from renku.domain_model.dataset import Url, generate_default_slug
1✔
232

233
        class DataverseDatasetSchema(ProviderDatasetSchema):
1✔
234
            """Schema for Dataverse datasets."""
235

236
            @pre_load
1✔
237
            def fix_data(self, data, **kwargs):
1✔
238
                """Fix data that is received from Dataverse."""
239
                # Fix context
240
                context = data.get("@context")
1✔
241
                if context and isinstance(context, str):
1✔
242
                    if context == "http://schema.org":
1✔
243
                        context = "http://schema.org/"
1✔
244
                    data["@context"] = {"@base": context, "@vocab": context}
1✔
245

246
                # Add type to creators
247
                creators = data.get("creator", [])
1✔
248
                for c in creators:
1✔
249
                    c["@type"] = [str(t) for t in PersonSchema.opts.rdf_type]
1✔
250

251
                # Fix license to be a string
252
                license = data.get("license")
1✔
253
                if license and isinstance(license, dict):
1✔
254
                    data["license"] = license.get("url", "")
×
255

256
                return data
1✔
257

258
            @post_load
1✔
259
            def fix_timezone(self, obj, **kwargs):
1✔
260
                """Add timezone to datetime object."""
261
                if obj.get("date_modified"):
1✔
262
                    obj["date_modified"] = fix_datetime(obj["date_modified"])
1✔
263
                if obj.get("date_published"):
1✔
264
                    obj["date_published"] = fix_datetime(obj["date_published"])
1✔
265

266
                return obj
1✔
267

268
        files = self.get_files()
1✔
269
        dataset = ProviderDataset.from_jsonld(data=self._json, schema_class=DataverseDatasetSchema)
1✔
270
        dataset.version = self.version
1✔
271
        dataset.slug = generate_default_slug(name=dataset.name or "", version=dataset.version)
1✔
272
        dataset.same_as = (
1✔
273
            Url(url_str=get_doi_url(dataset.identifier))
274
            if is_doi(dataset.identifier)
275
            else Url(url_id=remove_credentials(self.original_uri))
276
        )
277

278
        if dataset.description and not dataset.description.strip():
1✔
279
            dataset.description = None
×
280

281
        for creator in dataset.creators:
1✔
282
            if creator.affiliation == "":
1✔
283
                creator.affiliation = None
×
284

285
        self._provider_dataset_files = [
1✔
286
            ProviderDatasetFile(
287
                source=file.remote_url.geturl(),
288
                filename=Path(file.name).name,
289
                checksum="",
290
                filesize=file.content_size,
291
                filetype=file.file_format,
292
                path="",
293
            )
294
            for file in files
295
        ]
296

297
        self._provider_dataset = dataset
1✔
298
        return self._provider_dataset
1✔
299

300

301
class DataverseFileSerializer:
7✔
302
    """Dataverse record file."""
303

304
    def __init__(
7✔
305
        self,
306
        *,
307
        content_size=None,
308
        content_url=None,
309
        description=None,
310
        file_format=None,
311
        id=None,
312
        identifier=None,
313
        name=None,
314
        parent_url=None,
315
        type=None,
316
        encoding_format=None,
317
    ):
318
        self.content_size = content_size
1✔
319
        self.content_url = content_url
1✔
320
        self.description = description
1✔
321
        self.file_format = file_format
1✔
322
        self.id = id
1✔
323
        self.identifier = identifier
1✔
324
        self.name = name
1✔
325
        self.parent_url = parent_url
1✔
326
        self.type = type
1✔
327
        self.encoding_format = encoding_format
1✔
328

329
    @property
7✔
330
    def remote_url(self):
7✔
331
        """Get remote URL as ``urllib.ParseResult``."""
332
        if self.content_url is not None:
1✔
333
            return urllib.parse.urlparse(self.content_url)
1✔
334

335
        if self.identifier is None:
×
336
            return None
×
337

338
        doi = extract_doi(self.identifier)
×
339

340
        if doi is None:
×
341
            return None
×
342

343
        file_url = make_file_url("doi:" + doi, self.parent_url)
×
344

345
        return urllib.parse.urlparse(file_url)
×
346

347

348
class DataverseExporter(ExporterApi):
7✔
349
    """Dataverse export manager."""
350

351
    def __init__(self, *, dataset, server_url=None, dataverse_name=None, publish=False):
7✔
352
        super().__init__(dataset)
2✔
353
        self._access_token = None
2✔
354
        self._server_url = server_url
2✔
355
        self._dataverse_name = dataverse_name
2✔
356
        self._publish = publish
2✔
357

358
    def set_access_token(self, access_token):
7✔
359
        """Set access token."""
360
        self._access_token = access_token
1✔
361

362
    def get_access_token_url(self):
7✔
363
        """Endpoint for creation of access token."""
364
        return urllib.parse.urljoin(self._server_url, "/dataverseuser.xhtml?selectTab=apiTokenTab")
×
365

366
    def export(self, **kwargs):
7✔
367
        """Execute export process."""
368
        from renku.domain_model.dataset import get_file_path_in_dataset
1✔
369

370
        deposition = _DataverseDeposition(server_url=self._server_url, access_token=self._access_token)
1✔
371
        metadata = self._get_dataset_metadata()
1✔
372
        response = deposition.create_dataset(dataverse_name=self._dataverse_name, metadata=metadata)
1✔
373
        dataset_pid = response.json()["data"]["persistentId"]
1✔
374
        repository = project_context.repository
1✔
375

376
        with communication.progress("Uploading files ...", total=len(self.dataset.files)) as progressbar:
1✔
377
            for file in self.dataset.files:
1✔
378
                filepath = repository.copy_content_to_file(path=file.entity.path, checksum=file.entity.checksum)
1✔
379
                path_in_dataset = get_file_path_in_dataset(dataset=self.dataset, dataset_file=file)
1✔
380
                deposition.upload_file(full_path=filepath, path_in_dataset=path_in_dataset)
1✔
381
                progressbar.update()
1✔
382

383
        if self._publish:
1✔
384
            deposition.publish_dataset()
×
385

386
        return dataset_pid
1✔
387

388
    def _get_dataset_metadata(self):
7✔
389
        authors, contacts = self._get_creators()
1✔
390
        subject = self._get_subject()
1✔
391
        keywords = self._get_keywords()
1✔
392
        metadata_template = Template(DATASET_METADATA_TEMPLATE)
1✔
393
        metadata = metadata_template.substitute(
1✔
394
            name=_escape_json_string(self.dataset.name),
395
            authors=json.dumps(authors),
396
            contacts=json.dumps(contacts),
397
            description=_escape_json_string(self.dataset.description),
398
            subject=subject,
399
            keywords=json.dumps(keywords),
400
        )
401
        return json.loads(metadata)
1✔
402

403
    @staticmethod
7✔
404
    def _get_subject():
7✔
405
        text_prompt = "Subject of this dataset: \n\n"
1✔
406
        text_prompt += "\n".join(f"{s}\t[{i}]" for i, s in enumerate(DATAVERSE_SUBJECTS, start=1))
1✔
407
        text_prompt += "\n\nSubject"
1✔
408

409
        selection = communication.prompt(text_prompt, type=int, default=len(DATAVERSE_SUBJECTS)) or 0
1✔
410

411
        return DATAVERSE_SUBJECTS[selection - 1]
1✔
412

413
    def _get_creators(self):
7✔
414
        authors = []
1✔
415
        contacts = []
1✔
416

417
        for creator in self.dataset.creators:
1✔
418
            name = creator.name or ""
1✔
419
            affiliation = creator.affiliation or ""
1✔
420
            email = creator.email or ""
1✔
421

422
            author_template = Template(AUTHOR_METADATA_TEMPLATE)
1✔
423
            author = author_template.substitute(
1✔
424
                name=_escape_json_string(name), affiliation=_escape_json_string(affiliation)
425
            )
426
            authors.append(json.loads(author))
1✔
427

428
            contact_template = Template(CONTACT_METADATA_TEMPLATE)
1✔
429
            contact = contact_template.substitute(name=_escape_json_string(name), email=email)
1✔
430
            contacts.append(json.loads(contact))
1✔
431

432
        return authors, contacts
1✔
433

434
    def _get_keywords(self):
7✔
435
        keywords = []
1✔
436

437
        for keyword in self.dataset.keywords:
1✔
438
            keyword_template = Template(KEYWORDS_METADATA_TEMPLATE)
×
439
            keyword_str = keyword_template.substitute(keyword=_escape_json_string(keyword))
×
440
            keywords.append(json.loads(keyword_str))
×
441

442
        return keywords
1✔
443

444

445
class _DataverseDeposition:
7✔
446
    """Dataverse record for deposit."""
447

448
    def __init__(self, *, access_token, server_url, dataset_pid=None):
7✔
449
        self.access_token = access_token
2✔
450
        self.server_url = server_url
2✔
451
        self.dataset_pid = dataset_pid
2✔
452

453
    DATASET_CREATE_PATH = "dataverses/{dataverseName}/datasets"
7✔
454
    FILE_UPLOAD_PATH = "datasets/:persistentId/add"
7✔
455
    DATASET_PUBLISH_PATH = "datasets/:persistentId/actions/:publish"
7✔
456

457
    def create_dataset(self, dataverse_name, metadata):
7✔
458
        """Create a dataset in a given dataverse."""
459
        api_path = self.DATASET_CREATE_PATH.format(dataverseName=dataverse_name)
1✔
460
        url = self._make_url(api_path=api_path)
1✔
461

462
        response = self._post(url=url, json=metadata)
1✔
463
        self._check_response(response)
1✔
464

465
        self.dataset_pid = response.json()["data"]["persistentId"]
1✔
466

467
        return response
1✔
468

469
    def upload_file(self, full_path, path_in_dataset):
7✔
470
        """Upload a file to a previously-created dataset."""
471
        if self.dataset_pid is None:
1✔
472
            raise errors.ExportError("Dataset not created.")
×
473

474
        url = self._make_url(self.FILE_UPLOAD_PATH, persistentId=self.dataset_pid)
1✔
475

476
        params = {"directoryLabel": str(path_in_dataset.parent)}
1✔
477
        data = dict(jsonData=json.dumps(params))
1✔
478

479
        files = {"file": (path_in_dataset.name, open(full_path, "rb"))}
1✔
480

481
        response = self._post(url=url, data=data, files=files)
1✔
482
        self._check_response(response)
1✔
483

484
        return response
1✔
485

486
    def publish_dataset(self):
7✔
487
        """Publish a previously-created dataset."""
488
        if self.dataset_pid is None:
×
489
            raise errors.ExportError("Dataset not created.")
×
490

491
        url = self._make_url(self.DATASET_PUBLISH_PATH, persistentId=self.dataset_pid, type="major")
×
492

493
        response = self._post(url=url)
×
494
        self._check_response(response)
×
495

496
        return response
×
497

498
    def _make_url(self, api_path, **query_params):
7✔
499
        """Create URL for creating a dataset."""
500
        url_parts = urlparse.urlparse(self.server_url)
1✔
501
        path = posixpath.join(DATAVERSE_API_PATH, api_path)
1✔
502

503
        query_params_str = urllib.parse.urlencode(query_params)
1✔
504
        url_parts = url_parts._replace(path=path, query=query_params_str)
1✔
505
        return urllib.parse.urlunparse(url_parts)
1✔
506

507
    def _post(self, url, json=None, data=None, files=None):
7✔
508
        from renku.core.util import requests
1✔
509

510
        headers = {"X-Dataverse-key": self.access_token}
1✔
511
        try:
1✔
512
            return requests.post(url=url, json=json, data=data, files=files, headers=headers)
1✔
513
        except errors.RequestError as e:
×
514
            raise errors.ExportError("Cannot POST to remote server.") from e
×
515

516
    @staticmethod
7✔
517
    def _check_response(response):
7✔
518
        from renku.core.util import requests
1✔
519

520
        try:
1✔
521
            requests.check_response(response=response)
1✔
522
        except errors.RequestError:
1✔
523
            json_res = response.json()
×
524
            raise errors.ExportError(
×
525
                "HTTP {} - Cannot export dataset: {}".format(
526
                    response.status_code, json_res["message"] if "message" in json_res else json_res["status"]
527
                )
528
            )
529

530

531
def _escape_json_string(value):
7✔
532
    """Create a JSON-safe string."""
533
    if isinstance(value, str):
1✔
534
        return json.dumps(value)[1:-1]
1✔
535
    return value
1✔
536

537

538
def check_dataverse_uri(url):
7✔
539
    """Check if an URL points to a dataverse instance."""
540
    from renku.core.util import requests
1✔
541

542
    url_parts = list(urlparse.urlparse(url))
1✔
543
    url_parts[2] = posixpath.join(DATAVERSE_API_PATH, DATAVERSE_VERSION_API)
1✔
544

545
    url_parts[3:6] = [""] * 3
1✔
546
    version_url = urlparse.urlunparse(url_parts)
1✔
547

548
    response = requests.get(version_url)
1✔
549

550
    if response.status_code != 200:
1✔
551
        return False
1✔
552

553
    version_data = response.json()
1✔
554

555
    if "status" not in version_data or "data" not in version_data:
1✔
556
        return False
×
557

558
    version_info = version_data["data"]
1✔
559

560
    if "version" not in version_info or "build" not in version_info:
1✔
561
        return False
×
562

563
    return True
1✔
564

565

566
def check_dataverse_doi(doi):
7✔
567
    """Check if a DOI points to a dataverse dataset."""
568
    try:
1✔
569
        doi = DOIProvider(uri=doi).get_importer()
1✔
570
    except LookupError:
1✔
571
        return False
1✔
572

573
    return check_dataverse_uri(doi.uri)
1✔
574

575

576
def make_records_url(record_id, base_url):
7✔
577
    """Create URL to access record by ID."""
578
    url_parts = list(urlparse.urlparse(base_url))
1✔
579
    url_parts[2] = posixpath.join(DATAVERSE_API_PATH, DATAVERSE_METADATA_API)
1✔
580
    args_dict = {"exporter": DATAVERSE_EXPORTER, "persistentId": record_id}
1✔
581
    url_parts[4] = urllib.parse.urlencode(args_dict)
1✔
582
    return urllib.parse.urlunparse(url_parts)
1✔
583

584

585
def make_versions_url(record_id, base_url):
7✔
586
    """Create URL to access the versions of a record."""
587
    url_parts = list(urlparse.urlparse(base_url))
1✔
588
    url_parts[2] = posixpath.join(DATAVERSE_API_PATH, DATAVERSE_VERSIONS_API)
1✔
589
    args_dict = {"exporter": DATAVERSE_EXPORTER, "persistentId": record_id}
1✔
590
    url_parts[4] = urllib.parse.urlencode(args_dict)
1✔
591
    return urllib.parse.urlunparse(url_parts)
1✔
592

593

594
def make_file_url(file_id, base_url):
7✔
595
    """Create URL to access record by ID."""
596
    url_parts = list(urlparse.urlparse(base_url))
×
597
    url_parts[2] = posixpath.join(DATAVERSE_API_PATH, DATAVERSE_FILE_API)
×
598
    args_dict = {"persistentId": file_id}
×
599
    url_parts[4] = urllib.parse.urlencode(args_dict)
×
600
    return urllib.parse.urlunparse(url_parts)
×
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc