• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

SwissDataScienceCenter / renku-python / 6875247711

15 Nov 2023 09:16AM UTC coverage: 82.786% (-0.05%) from 82.831%
6875247711

Pull #3300

github

web-flow
Merge e2d3269e8 into 4726f660e
Pull Request #3300: chore: do not always retry load tests requests

25441 of 30731 relevant lines covered (82.79%)

3.12 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

86.69
/renku/core/dataset/providers/renku.py
1
# Copyright Swiss Data Science Center (SDSC). A partnership between
2
# École Polytechnique Fédérale de Lausanne (EPFL) and
3
# Eidgenössische Technische Hochschule Zürich (ETHZ).
4
#
5
# Licensed under the Apache License, Version 2.0 (the "License");
6
# you may not use this file except in compliance with the License.
7
# You may obtain a copy of the License at
8
#
9
#     http://www.apache.org/licenses/LICENSE-2.0
10
#
11
# Unless required by applicable law or agreed to in writing, software
12
# distributed under the License is distributed on an "AS IS" BASIS,
13
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
# See the License for the specific language governing permissions and
15
# limitations under the License.
16
"""Renku dataset provider."""
7✔
17

18
import re
7✔
19
import shutil
7✔
20
import urllib
7✔
21
from collections import defaultdict
7✔
22
from pathlib import Path
7✔
23
from typing import TYPE_CHECKING, Any, Dict, List, Optional
7✔
24

25
from renku.core import errors
7✔
26
from renku.core.dataset.datasets_provenance import DatasetsProvenance
7✔
27
from renku.core.dataset.providers.api import ImporterApi, ImportProviderInterface, ProviderApi, ProviderPriority
7✔
28
from renku.core.login import read_renku_token
7✔
29
from renku.core.storage import pull_paths_from_storage
7✔
30
from renku.core.util import communication
7✔
31
from renku.core.util.git import clone_renku_repository, get_cache_directory_for_repository, get_file_size
7✔
32
from renku.core.util.metadata import is_external_file, make_project_temp_dir
7✔
33
from renku.core.util.urls import remove_credentials
7✔
34
from renku.domain_model.project_context import project_context
7✔
35

36
if TYPE_CHECKING:
7✔
37
    from renku.core.dataset.providers.models import DatasetAddMetadata, ProviderDataset, ProviderParameter
×
38
    from renku.domain_model.dataset import Dataset
×
39

40

41
class RenkuProvider(ProviderApi, ImportProviderInterface):
7✔
42
    """Renku API provider."""
43

44
    priority = ProviderPriority.HIGH
7✔
45
    name = "Renku"
7✔
46
    is_remote = True
7✔
47

48
    def __init__(self, uri: str, **_):
7✔
49
        super().__init__(uri=uri)
2✔
50

51
        self._accept = "application/json"
2✔
52
        self._authorization_header: Optional[Dict[str, str]] = None
2✔
53
        self._gitlab_token: Optional[str] = None
2✔
54
        self._renku_token: Optional[str] = None
2✔
55
        self._tag: Optional[str] = None
2✔
56

57
    @staticmethod
7✔
58
    def supports(uri):
7✔
59
        """Whether this provider supports a given URI."""
60
        parsed_url = urllib.parse.urlparse(uri)
2✔
61

62
        if not parsed_url.netloc:
2✔
63
            return False
1✔
64

65
        _, dataset_id = RenkuProvider._extract_project_and_dataset_ids(parsed_url)
2✔
66
        return dataset_id is not None
2✔
67

68
    @staticmethod
7✔
69
    def get_import_parameters() -> List["ProviderParameter"]:
7✔
70
        """Returns parameters that can be set for import."""
71
        from renku.core.dataset.providers.models import ProviderParameter
7✔
72

73
        return [ProviderParameter("tag", help="Import a specific tag instead of the latest version.", type=str)]
7✔
74

75
    def get_importer(self, tag: Optional[str] = None, gitlab_token: Optional[str] = None, **kwargs):
7✔
76
        """Retrieves a dataset import manager from Renku.
77

78
        Args:
79
            tag(Optional[str]): Dataset version to import.
80
            gitlab_token(Optional[str]): Gitlab access token.
81

82
        Returns:
83
            RenkuImporter: A Renku import manager.
84
        """
85
        self._tag = tag
2✔
86
        self._gitlab_token = gitlab_token
2✔
87

88
        self._prepare_auth(self.uri)
2✔
89

90
        slug, identifier, latest_version_uri, kg_url = self._fetch_dataset_info(self.uri)
2✔
91

92
        project_url_ssh, project_url_http = self._get_project_urls(kg_url)
1✔
93

94
        return RenkuImporter(
1✔
95
            uri=self.uri,
96
            slug=slug,
97
            identifier=identifier,
98
            tag=self._tag,
99
            latest_version_uri=latest_version_uri,
100
            project_url_ssh=project_url_ssh,
101
            project_url_http=project_url_http,
102
            gitlab_token=self._gitlab_token,
103
            renku_token=self._renku_token,
104
        )
105

106
    def _fetch_dataset_info(self, uri):
7✔
107
        """Return initial dataset identifier and urls of all projects that contain the dataset."""
108
        parsed_url = urllib.parse.urlparse(uri)
2✔
109

110
        project_id, dataset_slug_or_id = RenkuProvider._extract_project_and_dataset_ids(parsed_url)
2✔
111
        if not project_id and not dataset_slug_or_id:
2✔
112
            raise errors.ParameterError("Invalid URI", param_hint=uri)
×
113

114
        kg_path = f"/knowledge-graph/datasets/{dataset_slug_or_id}"
2✔
115
        dataset_kg_url = parsed_url._replace(path=kg_path).geturl()
2✔
116

117
        try:
2✔
118
            dataset_info = self._query_knowledge_graph(dataset_kg_url)
2✔
119
        except errors.NotFound:
2✔
120
            # NOTE: If URI is not found we assume that it contains dataset's slug instead of its id
121
            dataset_slug = dataset_slug_or_id
2✔
122
            identifier = None
2✔
123
            dataset_info = None
2✔
124
        else:
125
            # name was renamed to slug, name kept for backwards compatibility
126
            dataset_slug = dataset_info.get("slug", dataset_info.get("name"))
1✔
127
            identifier = dataset_info["identifier"]
1✔
128

129
        if project_id:
2✔
130
            kg_path = f"/knowledge-graph/projects/{project_id}"
2✔
131
            project_kg_url = parsed_url._replace(path=kg_path).geturl()
2✔
132
        elif not dataset_info:
1✔
133
            raise errors.NotFound(
1✔
134
                f"Resource not found in knowledge graph: {uri}\n"
135
                f"Hint: If the project is private you need to 'renku login {parsed_url.netloc}'"
136
            )
137
        else:
138
            project = dataset_info.get("project", {})
1✔
139
            links = project.get("_links", [])
1✔
140
            project_kg_url = next((link["href"] for link in links if link["rel"] == "project-details"), None)
1✔
141

142
            if not project_kg_url:
1✔
143
                raise errors.ParameterError("Cannot find project's KG URL from URI", param_hint=uri)
×
144

145
        latest_identifier, latest_version_uri = self._fetch_dataset_info_from_project(project_kg_url, dataset_slug)
2✔
146
        identifier = identifier or latest_identifier
1✔
147

148
        return dataset_slug, identifier, latest_version_uri, project_kg_url
1✔
149

150
    def _fetch_dataset_info_from_project(self, project_kg_url, dataset_slug):
7✔
151
        datasets_kg_url = f"{project_kg_url}/datasets"
2✔
152
        try:
2✔
153
            response = self._query_knowledge_graph(datasets_kg_url)
2✔
154
        except errors.NotFound:
2✔
155
            raise errors.NotFound(f"Cannot find project in the knowledge graph: {project_kg_url}")
2✔
156

157
        dataset = next((d for d in response if d.get("slug") == dataset_slug), None)
1✔
158
        if not dataset:
1✔
159
            raise errors.OperationError(f"Cannot fetch dataset with slug '{dataset_slug}' from '{project_kg_url}'")
1✔
160

161
        links = dataset.get("_links", [])
1✔
162
        latest_version_uri = next((link["href"] for link in links if link["rel"] == "details"), None)
1✔
163
        if latest_version_uri:
1✔
164
            latest_version_uri = latest_version_uri.replace("/knowledge-graph", "")
1✔
165

166
        return dataset["identifier"], latest_version_uri
1✔
167

168
    @staticmethod
7✔
169
    def _extract_project_and_dataset_ids(parsed_url):
7✔
170
        # https://<host>/projects/:namespace/:0-or-more-subgroups/:name/datasets/:dataset-slug
171
        # https://<host>/projects/:namespace/:0-or-more-subgroups/:name/datasets/:id
172
        # https://<host>/datasets/:id
173
        match = re.match(r"(?:/projects/((?:[^/]+/)+[^/]+))?/datasets/([^/]+)/?$", parsed_url.path)
2✔
174
        project_id, dataset_slug_or_id = match.groups() if match else (None, None)
2✔
175
        return project_id, dataset_slug_or_id
2✔
176

177
    def _query_knowledge_graph(self, url):
7✔
178
        from renku.core.util import requests
2✔
179

180
        if self._renku_token and not self._gitlab_token:
2✔
181
            # NOTE: Authorization with renku token requires going through the gateway route
182
            url = url.replace("/knowledge-graph/", "/api/kg/")
1✔
183

184
        try:
2✔
185
            response = requests.get(url, headers=self._authorization_header)
2✔
186
        except errors.RequestError as e:
×
187
            raise errors.OperationError(f"Cannot access knowledge graph: {url}") from e
×
188

189
        parsed_uri = urllib.parse.urlparse(self.uri)
2✔
190
        if response.status_code == 404:
2✔
191
            raise errors.NotFound(
2✔
192
                f"Resource not found in knowledge graph: {url}\n"
193
                f"Hint: If the project is private you need to 'renku login {parsed_uri.netloc}'"
194
            )
195
        elif response.status_code in [401, 403]:
2✔
196
            raise errors.OperationError(
1✔
197
                f"Unauthorized access to knowledge graph: Run 'renku login {parsed_uri.netloc}'"
198
            )
199
        elif response.status_code != 200:
1✔
200
            raise errors.OperationError(f"Cannot access knowledge graph: {url}\nResponse code: {response.status_code}")
×
201

202
        return response.json()
1✔
203

204
    def _get_project_urls(self, project_kg_url):
7✔
205
        json = self._query_knowledge_graph(project_kg_url)
1✔
206
        urls = json.get("urls", {})
1✔
207

208
        return urls.get("ssh"), urls.get("http")
1✔
209

210
    def _prepare_auth(self, uri):
7✔
211
        if self._gitlab_token:
2✔
212
            token = self._gitlab_token
×
213
        else:
214
            self._renku_token = read_renku_token(endpoint=uri)
2✔
215
            token = self._renku_token
2✔
216

217
        self._authorization_header = {"Authorization": f"Bearer {token}"} if token else {}
2✔
218

219

220
class RenkuImporter(ImporterApi):
7✔
221
    """Renku record serializer."""
222

223
    def __init__(
7✔
224
        self,
225
        uri,
226
        slug,
227
        identifier,
228
        tag,
229
        latest_version_uri,
230
        project_url_ssh,
231
        project_url_http,
232
        gitlab_token,
233
        renku_token,
234
    ):
235
        """Create a RenkuImporter from a Dataset."""
236
        super().__init__(uri=uri, original_uri=uri)
1✔
237

238
        self._slug = slug
1✔
239
        self._identifier = identifier
1✔
240
        self._tag = tag
1✔
241
        self._latest_version_uri = latest_version_uri
1✔
242
        self._project_url_ssh = project_url_ssh
1✔
243
        self._project_url_http = project_url_http
1✔
244
        self._gitlab_token = gitlab_token
1✔
245
        self._renku_token = renku_token
1✔
246

247
        self._project_url = None
1✔
248
        self._remote_repository = None
1✔
249
        self._remote_path: Optional[Path] = None
1✔
250

251
    def fetch_provider_dataset(self) -> "ProviderDataset":
7✔
252
        """Return encapsulated dataset instance."""
253
        self._provider_dataset = self._fetch_dataset()
1✔
254
        assert self._provider_dataset is not None, "Dataset wasn't fetched."
1✔
255
        return self._provider_dataset
1✔
256

257
    def download_files(self, destination: Path, extract: bool) -> List["DatasetAddMetadata"]:
7✔
258
        """Download dataset files from the remote provider."""
259
        from renku.core.dataset.providers.models import DatasetAddAction, DatasetAddMetadata
1✔
260
        from renku.domain_model.dataset import RemoteEntity
1✔
261

262
        url = remove_credentials(self.project_url)
1✔
263

264
        dataset_datadir = self.provider_dataset.get_datadir()
1✔
265
        remote_repository = self.repository
1✔
266

267
        if self.provider_dataset.version:  # NOTE: A tag was specified for import
1✔
268
            sources, checksums = zip(*[(f.path, f.checksum) for f in self.provider_dataset_files])  # type: ignore
1✔
269
        else:
270
            sources = [f.path for f in self.provider_dataset_files]  # type: ignore
1✔
271
            checksums = None
1✔
272

273
        assert destination.exists() and destination.is_dir(), "Destination dir must exist when importing a dataset"
1✔
274
        if checksums is not None:
1✔
275
            assert len(checksums) == len(sources), "Each source must have a corresponding checksum"  # type: ignore
1✔
276

277
        def add_file(src_entity_path: str, content_path: Path, checksum) -> None:
1✔
278
            """
279
            Create a DatasetAddMetadata.
280

281
            Args:
282
                src_entity_path: Entity path from the source dataset which is a relative path.
283
                content_path: Absolute path of the file content when copied with a checksum.
284
                checksum: Entity checksum.
285
            """
286
            try:
1✔
287
                relative_path = Path(src_entity_path).relative_to(dataset_datadir)
1✔
288
            except ValueError:  # Files that are not in dataset's data directory
1✔
289
                relative_path = Path(src_entity_path)
1✔
290

291
            dst = destination / relative_path
1✔
292
            path_in_dst_repo = dst.relative_to(project_context.path)
1✔
293

294
            already_copied = path_in_dst_repo in new_files  # A path with the same destination is already copied
1✔
295
            new_files[path_in_dst_repo].append(src_entity_path)
1✔
296
            if already_copied:
1✔
297
                return
×
298

299
            if is_external_file(path=src_entity_path, project_path=remote_repository.path):
1✔
300
                source = (remote_repository.path / src_entity_path).resolve()
×
301
                action = DatasetAddAction.SYMLINK
×
302
            else:
303
                source = content_path
1✔
304
                action = DatasetAddAction.MOVE
1✔
305

306
            checksum = checksum or remote_repository.get_object_hash(  # type: ignore
1✔
307
                revision="HEAD", path=src_entity_path
308
            )
309
            if not checksum:
1✔
310
                raise errors.FileNotFound(f"Cannot find '{file}' in the remote project")
×
311

312
            new_file = DatasetAddMetadata(
1✔
313
                entity_path=path_in_dst_repo,
314
                url=url,
315
                based_on=RemoteEntity(checksum=checksum, path=src_entity_path, url=url),
316
                action=action,
317
                source=source,
318
                destination=dst,
319
            )
320
            results.append(new_file)
1✔
321

322
        results: List["DatasetAddMetadata"] = []
1✔
323
        new_files: Dict[Path, List[str]] = defaultdict(list)
1✔
324

325
        if self.provider_dataset.storage:  # NOTE: Dataset with a backend storage
1✔
326
            results = [
×
327
                DatasetAddMetadata(
328
                    entity_path=Path(f.path),
329
                    url=url,
330
                    based_on=RemoteEntity(checksum=f.checksum, path=f.path, url=url),
331
                    action=DatasetAddAction.METADATA_ONLY,
332
                    source=remote_repository.path / f.path,
333
                    destination=destination / f.path,
334
                )
335
                for f in self.provider_dataset_files
336
            ]
337
        elif checksums is None:
1✔
338
            with project_context.with_path(remote_repository.path):
1✔
339
                pull_paths_from_storage(
1✔
340
                    project_context.repository, *(remote_repository.path / p for p in sources)  # type: ignore
341
                )
342

343
            for file in sources:
1✔
344
                add_file(file, content_path=remote_repository.path / file, checksum=None)  # type: ignore
1✔
345
        else:  # NOTE: Renku dataset import with a tag
346
            content_path_root = make_project_temp_dir(project_context.path)
1✔
347
            content_path_root.mkdir(parents=True, exist_ok=True)
1✔
348
            filename = 1
1✔
349

350
            for file, checksum in zip(sources, checksums):  # type: ignore
1✔
351
                content_path = content_path_root / str(filename)
1✔
352
                filename += 1
1✔
353

354
                try:
1✔
355
                    remote_repository.copy_content_to_file(  # type: ignore
1✔
356
                        path=file, checksum=checksum, output_path=content_path
357
                    )
358
                except errors.FileNotFound:
×
359
                    raise errors.FileNotFound(f"Cannot find '{file}' with hash '{checksum}' in the remote project")
×
360

361
                add_file(file, content_path=content_path, checksum=checksum)
1✔
362

363
        duplicates = [v for v in new_files.values() if len(v) > 1]
1✔
364
        if duplicates:
1✔
365
            files = {str(p) for paths in duplicates for p in paths}
×
366
            files_str = "/n/t".join(sorted(files))
×
367
            communication.warn(f"The following files overwrite each other in the destination project:/n/t{files_str}")
×
368

369
        return results
1✔
370

371
    def tag_dataset(self, slug: str) -> None:
7✔
372
        """Create a tag for the dataset ``slug`` if the remote dataset has a tag/version."""
373
        from renku.core.dataset.tag import add_dataset_tag
1✔
374

375
        if self.provider_dataset.tag:
1✔
376
            add_dataset_tag(
1✔
377
                dataset_slug=slug,
378
                tag=self.provider_dataset.tag.name,
379
                description=self.provider_dataset.tag.description,
380
            )
381
        elif self.provider_dataset.version:
1✔
382
            add_dataset_tag(
×
383
                dataset_slug=slug,
384
                tag=self.provider_dataset.version,
385
                description=f"Tag {self.provider_dataset.version} created by renku import",
386
            )
387

388
    def copy_extra_metadata(self, new_dataset: "Dataset") -> None:
7✔
389
        """Copy provider specific metadata once the dataset is created."""
390
        if not self.provider_dataset.images:
1✔
391
            return
1✔
392

393
        for image in self.provider_dataset.images:
×
394
            if image.is_absolute:
×
395
                continue
×
396

397
            assert isinstance(self._remote_path, Path)  # TODO: Replace with proper typing
×
398

399
            remote_image_path = self._remote_path / image.content_url
×
400
            local_image_path = project_context.path / image.content_url
×
401
            local_image_path.parent.mkdir(exist_ok=True, parents=True)
×
402

403
            shutil.copy(remote_image_path, local_image_path)
×
404

405
        new_dataset.images = self.provider_dataset.images or []
×
406

407
    def is_latest_version(self):
7✔
408
        """Check if dataset is at last possible version."""
409
        return self.latest_uri.endswith(self._identifier)
1✔
410

411
    def is_version_equal_to(self, dataset: Any) -> bool:
7✔
412
        """Check if a dataset has the identifier as the record."""
413
        same_as = getattr(dataset, "same_as", None)
×
414
        return same_as is not None and same_as.value.endswith(self._identifier)
×
415

416
    @property
7✔
417
    def project_url(self):
7✔
418
        """URL of the Renku project in Gitlab."""
419
        return self._project_url
1✔
420

421
    @property
7✔
422
    def repository(self):
7✔
423
        """The cloned repository that contains the dataset."""
424
        return self._remote_repository
1✔
425

426
    @staticmethod
7✔
427
    def _extract_dataset_id(uri):
7✔
428
        """Extract dataset id from uri."""
429
        u = urllib.parse.urlparse(uri)
×
430
        return Path(u.path).name
×
431

432
    @property
7✔
433
    def version(self):
7✔
434
        """Get record version."""
435
        return self._provider_dataset.version if self._provider_dataset else None
×
436

437
    @property
7✔
438
    def latest_uri(self):
7✔
439
        """Get URI of the latest version."""
440
        return self._latest_version_uri
1✔
441

442
    @property
7✔
443
    def datadir_exists(self):
7✔
444
        """Whether the dataset data directory exists (might be missing in git if empty)."""
445
        if self._remote_path is None:
×
446
            raise errors.DatasetImportError("Dataset not fetched.")
×
447
        return (self._remote_path / self.provider_dataset.get_datadir()).exists()
×
448

449
    def _fetch_dataset(self):
7✔
450
        from renku.core.dataset.providers.models import ProviderDataset, ProviderDatasetFile
1✔
451
        from renku.domain_model.dataset import Url
1✔
452

453
        remote_repository = None
1✔
454

455
        parsed_uri = urllib.parse.urlparse(self.uri)
1✔
456

457
        urls = (self._project_url_ssh, self._project_url_http)
1✔
458
        # Clone the project
459
        communication.echo(msg="Cloning remote repository...")
1✔
460
        for url in urls:
1✔
461
            try:
1✔
462
                remote_repository = clone_renku_repository(
1✔
463
                    url=url,
464
                    path=get_cache_directory_for_repository(url=url),
465
                    gitlab_token=self._gitlab_token,
466
                    deployment_hostname=parsed_uri.netloc,
467
                    depth=None,
468
                    reuse_existing_repository=True,
469
                    use_renku_credentials=True,
470
                )
471
            except errors.GitError:
1✔
472
                pass
1✔
473
            else:
474
                self._project_url = url
1✔
475
                break
1✔
476

477
        if self._project_url is None or remote_repository is None:
1✔
478
            raise errors.ParameterError("Cannot clone remote projects:\n\t" + "\n\t".join(urls), param_hint=self.uri)
1✔
479

480
        with project_context.with_path(remote_repository.path):
1✔
481
            self._remote_path = project_context.path
1✔
482

483
            self._migrate_project()
1✔
484
            self._remote_repository = remote_repository
1✔
485

486
            datasets_provenance = DatasetsProvenance()
1✔
487

488
            dataset = datasets_provenance.get_by_slug(self._slug)
1✔
489
            if not dataset:
1✔
490
                raise errors.ParameterError(f"Cannot find dataset '{self._slug}' in project '{self._project_url}'")
×
491

492
            if self._tag:
1✔
493
                tags = datasets_provenance.get_all_tags(dataset=dataset)
1✔
494
                tag = next((t for t in tags if t.name == self._tag), None)
1✔
495

496
                if tag is None:
1✔
497
                    raise errors.ParameterError(f"Cannot find tag '{self._tag}' for dataset '{self._slug}'")
×
498

499
                dataset = datasets_provenance.get_by_id(tag.dataset_id.value)
1✔
500
            else:
501
                tag = None
1✔
502

503
            assert dataset is not None
1✔
504
            provider_dataset = ProviderDataset.from_dataset(dataset)
1✔
505

506
            # NOTE: Set the dataset version to the given tag (to reset the version if no tag was provided)
507
            provider_dataset.version = self._tag
1✔
508
            # NOTE: Store the tag so that it can be checked later to see if a tag was specified for import
509
            provider_dataset.tag = tag
1✔
510

511
        provider_dataset.derived_from = None
1✔
512
        provider_dataset.same_as = Url(url_id=remove_credentials(self.latest_uri))
1✔
513

514
        self._provider_dataset_files = [
1✔
515
            ProviderDatasetFile(
516
                path=file.entity.path,
517
                checksum=file.entity.checksum,
518
                filename=Path(file.entity.path).name,
519
                filetype=Path(file.entity.path).suffix.replace(".", ""),
520
                filesize=get_file_size(self._remote_path, file.entity.path),
521
                source=file.source,
522
            )
523
            for file in dataset.files
524
        ]
525

526
        return provider_dataset
1✔
527

528
    @staticmethod
7✔
529
    def _migrate_project():
7✔
530
        from renku.core.migration.migrate import is_project_unsupported, migrate_project  # Slow import
1✔
531
        from renku.core.migration.models.migration import MigrationType
1✔
532

533
        if is_project_unsupported():
1✔
534
            return
×
535

536
        try:
1✔
537
            communication.disable()
1✔
538
            # NOTE: We are not interested in migrating workflows when importing datasets
539
            migrate_project(
1✔
540
                skip_template_update=True, skip_docker_update=True, migration_type=~MigrationType.WORKFLOWS, strict=True
541
            )
542
        finally:
543
            communication.enable()
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc