• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

VolumeGraphics / havocompare / 6683250950

29 Oct 2023 12:12PM UTC coverage: 85.175% (-0.08%) from 85.25%
6683250950

push

github

web-flow
Merge pull request #41 from ChrisRega/0.5.0-dev

Update json-diff library with array support, bump version for release…

3 of 3 new or added lines in 3 files covered. (100.0%)

2953 of 3467 relevant lines covered (85.17%)

2682.64 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

96.13
/src/csv/tokenizer/mod.rs
1
use super::Error;
2
use crate::csv::tokenizer::guess_format::guess_format_from_reader;
3
use crate::csv::value::Value;
4
use crate::csv::Delimiters;
5
use std::cmp::Ordering;
6
use std::io::{Read, Seek};
7
use tracing::debug;
8

9
mod guess_format;
10
const BOM: char = '\u{feff}';
11
const DEFAULT_FIELD_SEPARATOR: char = ',';
12
const ESCAPE_BYTE: u8 = b'\\';
13
const ESCAPE_QUOTE_BYTE: u8 = b'"';
14
const QUOTE: char = '\"';
15
const NEW_LINE: char = '\n';
16
const CARRIAGE_RETURN: char = '\r';
17

18
#[derive(PartialEq, Eq, Debug)]
29✔
19
pub enum Token<'a> {
20
    Field(&'a str),
21
    LineBreak,
22
}
23

24
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
2✔
25
enum LiteralTerminator {
26
    Quote,
27
}
28

29
impl LiteralTerminator {
30
    pub fn get_char(&self) -> char {
181✔
31
        match self {
181✔
32
            LiteralTerminator::Quote => QUOTE,
181✔
33
        }
181✔
34
    }
181✔
35
}
36

37
#[derive(PartialEq, Eq, Debug)]
2✔
38
enum SpecialCharacter {
39
    NewLine(usize),
40
    LiteralMarker(usize, LiteralTerminator),
41
    FieldStop(usize, char),
42
}
43

44
impl SpecialCharacter {
45
    pub fn get_position(&self) -> usize {
73,386✔
46
        match self {
73,386✔
47
            SpecialCharacter::NewLine(pos) => *pos,
29,859✔
48
            SpecialCharacter::LiteralMarker(pos, _) => *pos,
120✔
49
            SpecialCharacter::FieldStop(pos, _) => *pos,
43,407✔
50
        }
51
    }
73,386✔
52

53
    pub fn len(&self) -> usize {
73,214✔
54
        match self {
73,214✔
55
            SpecialCharacter::NewLine(_) => NEW_LINE.len_utf8(),
29,805✔
56
            SpecialCharacter::FieldStop(_, pat) => pat.len_utf8(),
43,350✔
57
            SpecialCharacter::LiteralMarker(_, marker) => marker.get_char().len_utf8(),
59✔
58
        }
59
    }
73,214✔
60

61
    #[cfg(test)]
62
    pub fn quote(pos: usize) -> SpecialCharacter {
2✔
63
        SpecialCharacter::LiteralMarker(pos, LiteralTerminator::Quote)
2✔
64
    }
2✔
65
}
66

67
impl PartialOrd<Self> for SpecialCharacter {
68
    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
×
69
        Some(self.cmp(other))
×
70
    }
×
71
}
72

73
impl Ord for SpecialCharacter {
74
    fn cmp(&self, other: &Self) -> Ordering {
×
75
        self.get_position().cmp(&other.get_position())
×
76
    }
×
77
}
78

79
fn find_next_unescaped(string: &str, pat: char) -> Option<usize> {
198✔
80
    let pos = string.find(pat);
198✔
81
    if let Some(pos) = pos {
198✔
82
        if pos > 0 {
188✔
83
            if let Some(byte_before) = string.as_bytes().get(pos - 1) {
124✔
84
                if *byte_before == ESCAPE_BYTE {
124✔
85
                    let remainder = &string[pos + pat.len_utf8()..];
2✔
86
                    return find_next_unescaped(remainder, pat)
2✔
87
                        .map(|ipos| ipos + pos + pat.len_utf8());
2✔
88
                }
122✔
89
            }
×
90
        }
64✔
91
        if pos < string.len() - 1 && pat == QUOTE {
186✔
92
            if let Some(byte_after) = string.as_bytes().get(pos + 1) {
70✔
93
                if *byte_after == ESCAPE_QUOTE_BYTE {
70✔
94
                    let new_start_offset = pos + pat.len_utf8() + 1;
11✔
95
                    let remainder = &string[new_start_offset..];
11✔
96
                    return find_next_unescaped(remainder, pat).map(|ipos| ipos + new_start_offset);
11✔
97
                }
59✔
98
            }
×
99
        }
116✔
100
        Some(pos)
175✔
101
    } else {
102
        None
10✔
103
    }
104
}
198✔
105

106
fn find_literal(string: &str, terminator: LiteralTerminator) -> Option<SpecialCharacter> {
61✔
107
    find_next_unescaped(string, terminator.get_char())
61✔
108
        .map(|p| SpecialCharacter::LiteralMarker(p, terminator))
61✔
109
}
61✔
110

111
fn find_new_line(string: &str) -> Option<SpecialCharacter> {
60✔
112
    find_next_unescaped(string, NEW_LINE).map(SpecialCharacter::NewLine)
60✔
113
}
60✔
114

115
fn find_field_stop(string: &str, field_sep: char) -> Option<SpecialCharacter> {
60✔
116
    find_next_unescaped(string, field_sep).map(|p| SpecialCharacter::FieldStop(p, field_sep))
60✔
117
}
60✔
118

119
fn find_special_char(string: &str, field_sep: char) -> Option<SpecialCharacter> {
73,851✔
120
    for (pos, chr) in string.char_indices() {
676,128✔
121
        if pos > 0 && string.as_bytes()[pos - 1] == ESCAPE_BYTE {
676,128✔
122
            continue;
2✔
123
        }
676,126✔
124
        match chr {
646,259✔
125
            QUOTE => {
126
                return Some(SpecialCharacter::LiteralMarker(
62✔
127
                    pos,
62✔
128
                    LiteralTerminator::Quote,
62✔
129
                ));
62✔
130
            }
131
            NEW_LINE => {
132
                return Some(SpecialCharacter::NewLine(pos));
29,805✔
133
            }
134
            other if other == field_sep => {
646,259✔
135
                return Some(SpecialCharacter::FieldStop(pos, field_sep));
43,350✔
136
            }
137
            _ => continue,
602,909✔
138
        }
139
    }
140
    None
634✔
141
}
73,851✔
142

143
struct RowBuffer(Vec<Vec<Value>>);
144
impl RowBuffer {
145
    pub fn new() -> RowBuffer {
628✔
146
        RowBuffer(vec![Vec::new()])
628✔
147
    }
628✔
148

149
    pub fn push_field(&mut self, value: Value) {
73,821✔
150
        if let Some(current_row) = self.0.last_mut() {
73,821✔
151
            current_row.push(value);
73,821✔
152
        }
73,821✔
153
    }
73,821✔
154

155
    pub fn new_row(&mut self) {
29,856✔
156
        self.0.push(Vec::new());
29,856✔
157
    }
29,856✔
158

159
    pub fn into_iter(mut self) -> std::vec::IntoIter<Vec<Value>> {
628✔
160
        self.trim_end();
628✔
161
        self.0.into_iter()
628✔
162
    }
628✔
163

164
    fn trim_end(&mut self) {
628✔
165
        'PopEmpty: loop {
166
            if let Some(back) = self.0.last() {
1,275✔
167
                if back.len() <= 1 {
1,275✔
168
                    if let Some(first) = back.first() {
667✔
169
                        if first.as_str().is_empty() {
667✔
170
                            self.0.pop();
647✔
171
                        } else {
647✔
172
                            break 'PopEmpty;
20✔
173
                        }
174
                    } else {
×
175
                        self.0.pop();
×
176
                    }
×
177
                } else {
178
                    break 'PopEmpty;
608✔
179
                }
180
            }
×
181
        }
182
    }
628✔
183
}
184

185
pub(crate) struct Parser<R: Read + Seek> {
186
    reader: R,
187
    delimiters: Delimiters,
188
}
189

190
fn tokenize(input: &str, field_sep: char) -> Result<Vec<Token>, Error> {
637✔
191
    let mut tokens = Vec::new();
637✔
192
    let mut pos = 0;
637✔
193
    while let Some(remainder) = &input.get(pos..) {
73,851✔
194
        if let Some(special_char) = find_special_char(remainder, field_sep) {
73,849✔
195
            let mut end_pos = special_char.get_position();
73,215✔
196
            match &special_char {
73,215✔
197
                SpecialCharacter::FieldStop(_, _) => {
43,350✔
198
                    tokens.push(Token::Field(&remainder[..end_pos]));
43,350✔
199
                }
43,350✔
200
                SpecialCharacter::NewLine(_) => {
29,805✔
201
                    let field_value = &remainder[..end_pos].trim();
29,805✔
202
                    tokens.push(Token::Field(field_value));
29,805✔
203
                    tokens.push(Token::LineBreak);
29,805✔
204
                }
29,805✔
205
                SpecialCharacter::LiteralMarker(_, terminator) => {
60✔
206
                    let (literal_end_pos, token, break_line) =
59✔
207
                        parse_literal(field_sep, remainder, *terminator)?;
60✔
208
                    tokens.push(token);
59✔
209
                    if break_line {
59✔
210
                        tokens.push(Token::LineBreak);
54✔
211
                    }
54✔
212
                    end_pos += literal_end_pos;
59✔
213
                }
214
            };
215
            pos += end_pos + special_char.len();
73,214✔
216
        } else {
217
            break;
634✔
218
        }
219
    }
220

221
    if pos <= input.len() {
636✔
222
        tokens.push(Token::Field(&input[pos..]));
634✔
223
    }
634✔
224
    Ok(tokens)
636✔
225
}
637✔
226

227
fn parse_literal(
61✔
228
    field_sep: char,
61✔
229
    remainder: &str,
61✔
230
    literal_type: LiteralTerminator,
61✔
231
) -> Result<(usize, Token, bool), Error> {
61✔
232
    let terminator_len = literal_type.get_char().len_utf8();
61✔
233
    let after_first_quote = &remainder[terminator_len..];
61✔
234
    let quote_end =
60✔
235
        find_literal(after_first_quote, literal_type).ok_or(Error::UnterminatedLiteral)?;
61✔
236
    let after_second_quote_in_remainder = quote_end.get_position() + 2 * terminator_len;
60✔
237
    let inner_remainder = &remainder[after_second_quote_in_remainder..];
60✔
238
    let field_end = find_field_stop(inner_remainder, field_sep)
60✔
239
        .map(|sc| sc.get_position())
60✔
240
        .unwrap_or(inner_remainder.len());
60✔
241
    let line_end = find_new_line(inner_remainder)
60✔
242
        .map(|sc| sc.get_position())
60✔
243
        .unwrap_or(inner_remainder.len());
60✔
244
    if line_end < field_end {
60✔
245
        let token = Token::Field(&remainder[..after_second_quote_in_remainder]);
54✔
246
        Ok((after_second_quote_in_remainder, token, true))
54✔
247
    } else {
248
        let token = Token::Field(&remainder[..after_second_quote_in_remainder + field_end]);
6✔
249
        Ok((after_second_quote_in_remainder + field_end, token, false))
6✔
250
    }
251
}
61✔
252

253
impl<R: Read + Seek> Parser<R> {
254
    pub fn new_guess_format(mut reader: R) -> Result<Self, Error> {
183✔
255
        guess_format_from_reader(&mut reader).map(|delimiters| Parser { reader, delimiters })
183✔
256
    }
183✔
257

258
    pub fn new(reader: R, delimiters: Delimiters) -> Option<Self> {
1✔
259
        delimiters.field_delimiter?;
1✔
260
        Some(Parser { reader, delimiters })
1✔
261
    }
1✔
262

263
    pub(crate) fn parse_to_rows(&mut self) -> Result<std::vec::IntoIter<Vec<Value>>, Error> {
184✔
264
        debug!(
184✔
265
            "Generating tokens with field delimiter: {:?}",
×
266
            self.delimiters.field_delimiter
×
267
        );
×
268

269
        let mut string_buffer = String::new();
184✔
270
        self.reader.read_to_string(&mut string_buffer)?;
184✔
271
        // remove BoM & windows line endings to linux line endings
272
        string_buffer.retain(|c| ![BOM, CARRIAGE_RETURN].contains(&c));
300,678✔
273
        let field_sep = self
184✔
274
            .delimiters
184✔
275
            .field_delimiter
184✔
276
            .unwrap_or(DEFAULT_FIELD_SEPARATOR);
184✔
277

184✔
278
        let mut buffer = RowBuffer::new();
184✔
279

184✔
280
        tokenize(string_buffer.as_str(), field_sep)?
184✔
281
            .into_iter()
184✔
282
            .for_each(|t| match t {
47,409✔
283
                Token::Field(input_str) => {
32,913✔
284
                    buffer.push_field(Value::from_str(
32,913✔
285
                        input_str,
32,913✔
286
                        &self.delimiters.decimal_separator,
32,913✔
287
                    ));
32,913✔
288
                }
32,913✔
289
                Token::LineBreak => buffer.new_row(),
14,496✔
290
            });
47,409✔
291

184✔
292
        Ok(buffer.into_iter())
184✔
293
    }
184✔
294
}
295

296
#[cfg(test)]
297
mod tokenizer_tests {
298
    use super::*;
299
    use std::fs::File;
300
    use std::io::Cursor;
301

302
    #[test]
1✔
303
    fn unescaped() {
1✔
304
        let str = "...\\,...,";
1✔
305
        let next = find_next_unescaped(str, ',').unwrap();
1✔
306
        assert_eq!(next, 8);
1✔
307
    }
1✔
308

309
    #[test]
1✔
310
    fn next_special_char_finds_first_quote() {
1✔
311
        let str = ".....\"..',.";
1✔
312
        let next = find_special_char(str, ',').unwrap();
1✔
313
        assert_eq!(next, SpecialCharacter::quote(5));
1✔
314
    }
1✔
315

316
    #[test]
1✔
317
    fn next_special_char_finds_first_unescaped_quote() {
1✔
318
        let str = r#"..\"."..',."#;
1✔
319
        let next = find_special_char(str, ',').unwrap();
1✔
320
        assert_eq!(next, SpecialCharacter::quote(5));
1✔
321
    }
1✔
322

323
    #[test]
1✔
324
    fn tokenization_simple() {
1✔
325
        let str = "bla,blubb,2.0";
1✔
326
        let mut tokens = tokenize(str, ',').unwrap();
1✔
327
        assert_eq!(tokens.len(), 3);
1✔
328
        assert_eq!(tokens.pop().unwrap(), Token::Field("2.0"));
1✔
329
        assert_eq!(tokens.pop().unwrap(), Token::Field("blubb"));
1✔
330
        assert_eq!(tokens.pop().unwrap(), Token::Field("bla"));
1✔
331
    }
1✔
332

333
    #[test]
1✔
334
    fn tokenization_simple_last_field_empty() {
1✔
335
        let str = "bla,\nblubb,";
1✔
336
        let mut tokens = tokenize(str, ',').unwrap();
1✔
337
        assert_eq!(tokens.len(), 5);
1✔
338
        assert_eq!(tokens.pop().unwrap(), Token::Field(""));
1✔
339
        assert_eq!(tokens.pop().unwrap(), Token::Field("blubb"));
1✔
340
        assert_eq!(tokens.pop().unwrap(), Token::LineBreak);
1✔
341
        assert_eq!(tokens.pop().unwrap(), Token::Field(""));
1✔
342
        assert_eq!(tokens.pop().unwrap(), Token::Field("bla"));
1✔
343
    }
1✔
344

345
    #[test]
1✔
346
    fn tokenization_with_literals() {
1✔
347
        let str = r#"bla,"bla,bla",2.0"#;
1✔
348
        let mut tokens = tokenize(str, ',').unwrap();
1✔
349
        assert_eq!(tokens.len(), 3);
1✔
350
        assert_eq!(tokens.pop().unwrap(), Token::Field("2.0"));
1✔
351
        assert_eq!(tokens.pop().unwrap(), Token::Field("\"bla,bla\""));
1✔
352
        assert_eq!(tokens.pop().unwrap(), Token::Field("bla"));
1✔
353
    }
1✔
354

355
    #[test]
1✔
356
    fn tokenization_of_unterminated_literal_errors() {
1✔
357
        let str = r#"bla,"There is no termination"#;
1✔
358
        let tokens = tokenize(str, ',');
1✔
359
        assert!(matches!(tokens.unwrap_err(), Error::UnterminatedLiteral));
1✔
360
    }
1✔
361

362
    #[test]
1✔
363
    fn tokenization_of_literals_and_spaces() {
1✔
364
        let str = r#"bla, "literally""#;
1✔
365
        let mut tokens = tokenize(str, ',').unwrap();
1✔
366
        assert_eq!(tokens.len(), 2);
1✔
367
        assert_eq!(tokens.pop().unwrap(), Token::Field(" \"literally\""));
1✔
368
        assert_eq!(tokens.pop().unwrap(), Token::Field("bla"));
1✔
369
    }
1✔
370

371
    #[test]
1✔
372
    fn tokenization_literals_at_line_end() {
1✔
373
        let str = r#"bla,"bla,bla"
1✔
374
bla,bla"#;
1✔
375
        let mut tokens = tokenize(str, ',').unwrap();
1✔
376
        assert_eq!(tokens.pop().unwrap(), Token::Field("bla"));
1✔
377
        assert_eq!(tokens.pop().unwrap(), Token::Field("bla"));
1✔
378
        assert_eq!(tokens.pop().unwrap(), Token::LineBreak);
1✔
379
        assert_eq!(tokens.pop().unwrap(), Token::Field("\"bla,bla\""));
1✔
380
        assert_eq!(tokens.pop().unwrap(), Token::Field("bla"));
1✔
381
    }
1✔
382

383
    #[test]
1✔
384
    fn tokenization_with_multi_line_literals() {
1✔
385
        let str = "bla,\"bla\nbla\",2.0";
1✔
386
        let mut tokens = tokenize(str, ',').unwrap();
1✔
387
        assert_eq!(tokens.len(), 3);
1✔
388
        assert_eq!(tokens.pop().unwrap(), Token::Field("2.0"));
1✔
389
        assert_eq!(tokens.pop().unwrap(), Token::Field("\"bla\nbla\""));
1✔
390
        assert_eq!(tokens.pop().unwrap(), Token::Field("bla"));
1✔
391
    }
1✔
392

393
    #[test]
1✔
394
    fn tokenize_to_values_cuts_last_nl() {
1✔
395
        let str = "bla\n2.0\n\n";
1✔
396
        let mut parser = Parser::new_guess_format(Cursor::new(str)).unwrap();
1✔
397
        let lines = parser.parse_to_rows().unwrap();
1✔
398
        assert_eq!(lines.len(), 2);
1✔
399
    }
1✔
400

401
    #[test]
1✔
402
    fn tokenization_with_multi_line_with_escape_break_literals() {
1✔
403
        let str = "\\\"bla,\"'bla\\\"\nbla'\",2.0";
1✔
404
        let mut tokens = tokenize(str, ',').unwrap();
1✔
405
        assert_eq!(tokens.len(), 3);
1✔
406
        assert_eq!(tokens.pop().unwrap(), Token::Field("2.0"));
1✔
407
        assert_eq!(tokens.pop().unwrap(), Token::Field("\"'bla\\\"\nbla'\""));
1✔
408
        assert_eq!(tokens.pop().unwrap(), Token::Field("\\\"bla"));
1✔
409
    }
1✔
410

411
    #[test]
1✔
412
    fn find_chars_unicode_with_utf8() {
1✔
413
        let str = r#"mm²,Area"#;
1✔
414
        let pos = find_next_unescaped(str, ',').unwrap();
1✔
415
        assert_eq!(pos, 4);
1✔
416
    }
1✔
417

418
    #[test]
1✔
419
    fn find_next_unescaped_field_after_utf8_multibyte_char() {
1✔
420
        let str = r#"mm²,Area"#;
1✔
421
        let pos = find_next_unescaped(str, ',').unwrap();
1✔
422
        assert_eq!(pos, 4);
1✔
423
    }
1✔
424

425
    #[test]
1✔
426
    fn tokenization_windows_newlines() {
1✔
427
        let str = "bla\n\rbla";
1✔
428
        let mut tokens = Parser::new(
1✔
429
            Cursor::new(str),
1✔
430
            Delimiters {
1✔
431
                field_delimiter: Some(','),
1✔
432
                decimal_separator: None,
1✔
433
            },
1✔
434
        )
1✔
435
        .unwrap()
1✔
436
        .parse_to_rows()
1✔
437
        .unwrap();
1✔
438
        assert_eq!(tokens.len(), 2);
1✔
439
        assert_eq!(
1✔
440
            *tokens.next().unwrap().first().unwrap(),
1✔
441
            Value::from_str("bla", &None)
1✔
442
        );
1✔
443
        assert_eq!(
1✔
444
            *tokens.next().unwrap().first().unwrap(),
1✔
445
            Value::from_str("bla", &None)
1✔
446
        );
1✔
447
    }
1✔
448

449
    #[test]
1✔
450
    fn tokenization_new_lines() {
1✔
451
        let str = "bla,bla\nbla,bla";
1✔
452
        let mut tokens = tokenize(str, ',').unwrap();
1✔
453
        assert_eq!(tokens.len(), 5);
1✔
454
        assert_eq!(tokens.pop().unwrap(), Token::Field("bla"));
1✔
455
        assert_eq!(tokens.pop().unwrap(), Token::Field("bla"));
1✔
456
        assert_eq!(tokens.pop().unwrap(), Token::LineBreak);
1✔
457
        assert_eq!(tokens.pop().unwrap(), Token::Field("bla"));
1✔
458
        assert_eq!(tokens.pop().unwrap(), Token::Field("bla"));
1✔
459
    }
1✔
460

461
    #[test]
1✔
462
    fn tokenizer_smoke() {
1✔
463
        let actual = File::open(
1✔
464
            "tests/integ/data/display_of_status_message_in_cm_tables/actual/Volume1.csv",
1✔
465
        )
1✔
466
        .unwrap();
1✔
467
        let mut parser = Parser::new_guess_format(actual).unwrap();
1✔
468
        parser.parse_to_rows().unwrap();
1✔
469
        let nominal = File::open(
1✔
470
            "tests/integ/data/display_of_status_message_in_cm_tables/expected/Volume1.csv",
1✔
471
        )
1✔
472
        .unwrap();
1✔
473
        let mut parser = Parser::new_guess_format(nominal).unwrap();
1✔
474
        parser.parse_to_rows().unwrap();
1✔
475
    }
1✔
476

477
    #[test]
1✔
478
    fn tokenizer_semicolon_test() {
1✔
479
        let nominal =
1✔
480
            File::open("tests/csv/data/easy_pore_export_annoration_table_result.csv").unwrap();
1✔
481
        let mut parser = Parser::new_guess_format(nominal).unwrap();
1✔
482
        for line in parser.parse_to_rows().unwrap() {
6✔
483
            assert_eq!(line.len(), 5);
6✔
484
        }
485
    }
1✔
486

487
    #[test]
1✔
488
    fn special_quote_escape_works() {
1✔
489
        let str = r#"""..""#;
1✔
490
        let quote = find_next_unescaped(str, '\"').unwrap();
1✔
491
        assert_eq!(quote, 4);
1✔
492
    }
1✔
493

494
    #[test]
1✔
495
    fn special_quote_escape_works_complicated() {
1✔
496
        let str = r#"""Scene""=>""Mesh 1""""#;
1✔
497
        let (pos, _, _) = parse_literal(',', str, LiteralTerminator::Quote).unwrap();
1✔
498
        assert_eq!(pos, 22);
1✔
499
    }
1✔
500

501
    #[test]
1✔
502
    fn tokenize_complicated_literal_smoke() {
1✔
503
        let str = r#"Deviation interval A [mm],-0.6
1✔
504
Deviation interval B [mm],0.6
1✔
505
Actual object,"""Scene""=>""Volume 1""=>""Merged region"""
1✔
506
Nominal object,"""Scene""=>""Mesh 1""""#;
1✔
507
        let lines = Parser::new_guess_format(Cursor::new(str))
1✔
508
            .unwrap()
1✔
509
            .parse_to_rows()
1✔
510
            .unwrap();
1✔
511
        for line in lines {
5✔
512
            assert_eq!(line.len(), 2);
4✔
513
        }
514
    }
1✔
515
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc