• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

OCHA-DAP / hdx-scraper-ipc / 18664145215

20 Oct 2025 08:32PM UTC coverage: 88.18% (-0.8%) from 88.994%
18664145215

push

github

mcarans
Fix IPC projection loading

470 of 533 relevant lines covered (88.18%)

0.88 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

92.04
/src/hdx/scraper/ipc/ipc.py
1
#!/usr/bin/python
2
"""
3
IPC:
4
----
5

6
Reads IPC data and creates datasets.
7

8
"""
9

10
import logging
1✔
11
from copy import deepcopy
1✔
12
from datetime import datetime, timezone
1✔
13
from typing import Dict, List
1✔
14

15
from dateutil.relativedelta import relativedelta
1✔
16
from slugify import slugify
1✔
17

18
from hdx.api.configuration import Configuration
1✔
19
from hdx.data.dataset import Dataset
1✔
20
from hdx.data.resource import Resource
1✔
21
from hdx.data.showcase import Showcase
1✔
22
from hdx.location.country import Country
1✔
23
from hdx.utilities.dateparse import (
1✔
24
    default_date,
25
    default_enddate,
26
)
27
from hdx.utilities.retriever import Retrieve
1✔
28

29
logger = logging.getLogger(__name__)
1✔
30

31

32
class IPC:
1✔
33
    def __init__(
1✔
34
        self,
35
        configuration: Configuration,
36
        retriever: Retrieve,
37
        state: Dict,
38
        ch_countries: List,
39
    ):
40
        self._configuration = configuration
1✔
41
        self._retriever = retriever
1✔
42
        self._state = state
1✔
43
        self._default_start_date = state["DEFAULT"]
1✔
44
        self._base_url = configuration["base_url"]
1✔
45
        self._projection_names = [
1✔
46
            "Current",
47
            "First projection",
48
            "Second projection",
49
        ]
50
        self._projection_suffixes = ["", "_projected", "_second_projected"]
1✔
51
        self._projections = ["current", "projected", "second_projected"]
1✔
52
        self._phasemapping = {
1✔
53
            "estimated": "all",
54
            "p3plus": "3+",
55
            "phase1": "1",
56
            "phase2": "2",
57
            "phase3": "3",
58
            "phase4": "4",
59
            "phase5": "5",
60
        }
61
        self._colnamemapping = {"estimated": "analyzed"}
1✔
62
        self._output = {
1✔
63
            "country_rows_latest": [],
64
            "country_rows_wide_latest": [],
65
            "group_rows_latest": [],
66
            "group_rows_wide_latest": [],
67
            "area_rows_latest": [],
68
            "area_rows_wide_latest": [],
69
            "country_rows": [],
70
            "country_rows_wide": [],
71
            "group_rows": [],
72
            "group_rows_wide": [],
73
            "area_rows": [],
74
            "area_rows_wide": [],
75
            "start_date": state.get("START_DATE", default_enddate),
76
            "end_date": state.get("END_DATE", default_date),
77
        }
78
        name, title = self.get_dataset_title_name("Global")
1✔
79
        temp_dataset = Dataset({"name": name, "title": title})
1✔
80
        self._global_dataset_url = temp_dataset.get_hdx_url()
1✔
81
        self._ch_countries = ch_countries
1✔
82
        self._acute_analysis_ids = []
1✔
83

84
    def get_dataset_title_name(self, countryname):
1✔
85
        title = f"{countryname}: Acute Food Insecurity Country Data"
1✔
86
        name = slugify(title).lower()
1✔
87
        return name, title
1✔
88

89
    def get_countries(self):
1✔
90
        countryisos = set()
1✔
91
        json = self._retriever.download_json(f"{self._base_url}/analyses?type=A")
1✔
92

93
        for analysis in json:
1✔
94
            countryiso2 = analysis["country"]
1✔
95
            countryiso3 = Country.get_iso3_from_iso2(countryiso2)
1✔
96
            if countryiso3 is None:
1✔
97
                logger.error(
×
98
                    f"Could not find country ISO 3 code matching ISO 2 code {countryiso2}!"
99
                )
100
            else:
101
                countryisos.add(countryiso3)
1✔
102
                self._acute_analysis_ids.append(analysis["id"])
1✔
103
        return [{"iso3": x} for x in sorted(countryisos)]
1✔
104

105
    @staticmethod
1✔
106
    def parse_date(datestring):
1✔
107
        date = datetime.strptime(datestring, "%b %Y")
1✔
108
        return date.replace(tzinfo=timezone.utc)
1✔
109

110
    @classmethod
1✔
111
    def parse_date_range(cls, date_range, time_period):
1✔
112
        start, end = date_range.split(" - ")
1✔
113
        startdate = cls.parse_date(start)
1✔
114
        if startdate < time_period["start_date"]:
1✔
115
            time_period["start_date"] = startdate
1✔
116
        enddate = cls.parse_date(end)
1✔
117
        enddate = enddate + relativedelta(months=1, days=-1)
1✔
118
        if enddate > time_period["end_date"]:
1✔
119
            time_period["end_date"] = enddate
1✔
120
        startdatestr = startdate.date().isoformat()
1✔
121
        enddatestr = enddate.date().isoformat()
1✔
122
        return startdatestr, enddatestr
1✔
123

124
    def add_country_subnational_rows(
1✔
125
        self,
126
        base_row,
127
        time_period,
128
        location,
129
        rows,
130
        rows_wide,
131
        analysis=None,
132
    ):
133
        if analysis is None:
1✔
134
            analysis = location
1✔
135
        country_subnational_row = deepcopy(base_row)
1✔
136
        row_wide = deepcopy(country_subnational_row)
1✔
137
        have_data = False
1✔
138
        for i, projection in enumerate(self._projections):
1✔
139
            projection_row = deepcopy(country_subnational_row)
1✔
140
            period_date = analysis.get(f"{projection}_period_dates")
1✔
141
            if period_date:
1✔
142
                period_start, period_end = self.parse_date_range(
1✔
143
                    period_date, time_period
144
                )
145
                have_data = True
1✔
146
            else:
147
                period_start = period_end = None
1✔
148
            projection_name = self._projection_names[i]
1✔
149
            projection_name_l = projection_name.lower()
1✔
150
            projection_row["Validity period"] = projection_name_l
1✔
151
            projection_row["From"] = period_start
1✔
152
            projection_row["To"] = period_end
1✔
153
            row_wide[f"{projection_name} from"] = period_start
1✔
154
            row_wide[f"{projection_name} to"] = period_end
1✔
155
            projection_suffix = self._projection_suffixes[i]
1✔
156
            location[f"estimated_percentage{projection_suffix}"] = 1.0
1✔
157
            for prefix, phase in self._phasemapping.items():
1✔
158
                row = deepcopy(projection_row)
1✔
159
                if phase == "3+":
1✔
160
                    key = f"p3plus{projection_suffix}"
1✔
161
                else:
162
                    key = f"{prefix}_population{projection_suffix}"
1✔
163
                affected = location.get(key)
1✔
164
                row["Phase"] = phase
1✔
165
                row["Number"] = affected
1✔
166
                projection_name_l = projection_name.lower()
1✔
167
                if phase == "all":
1✔
168
                    if period_start is None:
1✔
169
                        affected = None
1✔
170
                    colname = f"Population analyzed {projection_name_l}"
1✔
171
                else:
172
                    colname = f"Phase {phase} number {projection_name_l}"
1✔
173
                row_wide[colname] = affected
1✔
174
                percentage = location.get(f"{prefix}_percentage{projection_suffix}")
1✔
175
                row["Percentage"] = percentage
1✔
176
                if prefix != "estimated":
1✔
177
                    row_wide[f"Phase {phase} percentage {projection_name_l}"] = (
1✔
178
                        percentage
179
                    )
180
                if affected is not None and period_date:
1✔
181
                    rows.append(row)
1✔
182
        if have_data:
1✔
183
            rows_wide.append(row_wide)
1✔
184

185
    @staticmethod
1✔
186
    def get_base_row(analysis, countryiso3):
1✔
187
        return {
1✔
188
            "Date of analysis": analysis["analysis_date"],
189
            "Country": countryiso3,
190
            "Total country population": analysis.get("population"),
191
        }
192

193
    def add_country_rows(self, analysis, countryiso3, time_period, rows, rows_wide):
1✔
194
        base_row = self.get_base_row(analysis, countryiso3)
1✔
195
        self.add_country_subnational_rows(
1✔
196
            base_row,
197
            time_period,
198
            analysis,
199
            rows=rows,
200
            rows_wide=rows_wide,
201
        )
202

203
    def add_subnational_rows(
1✔
204
        self,
205
        analysis,
206
        countryiso3,
207
        time_period,
208
        group_rows,
209
        group_rows_wide,
210
        area_rows,
211
        area_rows_wide,
212
    ):
213
        def process_areas(adm_row, adm):
1✔
214
            if adm["areas"] is None:
1✔
215
                logger.error(
×
216
                    f'{countryiso3}: {analysis["title"]} has blank "areas" field!'
217
                )
218
                return
×
219
            for area in adm["areas"]:
1✔
220
                area_row = deepcopy(adm_row)
1✔
221
                if "Level 1" not in area_row:
1✔
222
                    area_row["Level 1"] = None
1✔
223
                area_row["Area"] = area["name"]
1✔
224
                self.add_country_subnational_rows(
1✔
225
                    area_row,
226
                    time_period,
227
                    area,
228
                    rows=area_rows,
229
                    rows_wide=area_rows_wide,
230
                    analysis=analysis,
231
                )
232

233
        base_row = self.get_base_row(analysis, countryiso3)
1✔
234
        groups = analysis.get("groups")
1✔
235
        if groups:
1✔
236
            for group in analysis["groups"]:
1✔
237
                group_row = deepcopy(base_row)
1✔
238
                group_row["Level 1"] = group["name"]
1✔
239
                self.add_country_subnational_rows(
1✔
240
                    group_row,
241
                    time_period,
242
                    group,
243
                    rows=group_rows,
244
                    rows_wide=group_rows_wide,
245
                    analysis=analysis,
246
                )
247
                if "areas" in group:
1✔
248
                    process_areas(group_row, group)
1✔
249
        else:
250
            process_areas(base_row, analysis)
1✔
251

252
    def get_country_data(self, countryiso3):
1✔
253
        countryiso2 = Country.get_iso2_from_iso3(countryiso3)
1✔
254
        url = f"{self._base_url}/population?country={countryiso2}"
1✔
255
        country_data = self._retriever.download_json(url)
1✔
256
        if not country_data:
1✔
257
            return None
×
258
        most_recent_analysis = None
1✔
259
        for analysis in country_data:
1✔
260
            if analysis["id"] in self._acute_analysis_ids:
1✔
261
                most_recent_analysis = analysis
1✔
262
                break
1✔
263
        if not most_recent_analysis:
1✔
264
            return None
×
265
        analysis_date = self.parse_date(most_recent_analysis["analysis_date"])
1✔
266
        if analysis_date <= self._state.get(countryiso3, self._default_start_date):
1✔
267
            update = False
×
268
        else:
269
            update = True
1✔
270
        self._state[countryiso3] = analysis_date
1✔
271
        time_period = {"start_date": default_enddate, "end_date": default_date}
1✔
272

273
        output = {"countryiso3": countryiso3}
1✔
274

275
        most_recent_current_analysis = None
1✔
276
        projection_letter = None
1✔
277
        for analysis in country_data:
1✔
278
            if analysis["id"] in self._acute_analysis_ids:
1✔
279
                if analysis["current_period_dates"]:
1✔
280
                    most_recent_current_analysis = analysis
1✔
281
                    projection_letter = "C"
1✔
282
                    break
1✔
283
                elif analysis["projected_period_dates"]:
×
284
                    most_recent_current_analysis = analysis
×
285
                    projection_letter = "P"
×
286
                    break
×
287
                elif analysis["second_projected_period_dates"]:
×
288
                    most_recent_current_analysis = analysis
×
289
                    projection_letter = "A"
×
290
                    break
×
291
        if most_recent_current_analysis:
1✔
292
            analysis_id = most_recent_current_analysis["id"]
1✔
293
            url = f"{self._base_url}/areas/{analysis_id}/{projection_letter}?country={countryiso2}&type=A&format=geojson"
1✔
294
            filename = f"ipc_{countryiso3.lower()}.geojson"
1✔
295
            path = self._retriever.download_file(url, filename=filename)
1✔
296
            output["geojson"] = path
1✔
297
            country_rows = output["country_rows_latest"] = []
1✔
298
            country_rows_wide = output["country_rows_wide_latest"] = []
1✔
299
            group_rows = output["group_rows_latest"] = []
1✔
300
            group_rows_wide = output["group_rows_wide_latest"] = []
1✔
301
            area_rows = output["area_rows_latest"] = []
1✔
302
            area_rows_wide = output["area_rows_wide_latest"] = []
1✔
303
            self.add_country_rows(
1✔
304
                most_recent_current_analysis,
305
                countryiso3,
306
                time_period,
307
                country_rows,
308
                country_rows_wide,
309
            )
310
            self.add_subnational_rows(
1✔
311
                most_recent_current_analysis,
312
                countryiso3,
313
                time_period,
314
                group_rows,
315
                group_rows_wide,
316
                area_rows,
317
                area_rows_wide,
318
            )
319
            self._output["country_rows_latest"].extend(country_rows)
1✔
320
            self._output["country_rows_wide_latest"].extend(country_rows_wide)
1✔
321
            self._output["group_rows_latest"].extend(group_rows)
1✔
322
            self._output["group_rows_wide_latest"].extend(group_rows_wide)
1✔
323
            self._output["area_rows_latest"].extend(area_rows)
1✔
324
            self._output["area_rows_wide_latest"].extend(area_rows_wide)
1✔
325
        else:
326
            output["geojson"] = None
×
327

328
        country_rows = output["country_rows"] = []
1✔
329
        country_rows_wide = output["country_rows_wide"] = []
1✔
330
        group_rows = output["group_rows"] = []
1✔
331
        group_rows_wide = output["group_rows_wide"] = []
1✔
332
        area_rows = output["area_rows"] = []
1✔
333
        area_rows_wide = output["area_rows_wide"] = []
1✔
334
        for analysis in country_data:
1✔
335
            if analysis["id"] not in self._acute_analysis_ids:
1✔
336
                continue
1✔
337
            self.add_country_rows(
1✔
338
                analysis,
339
                countryiso3,
340
                time_period,
341
                country_rows,
342
                country_rows_wide,
343
            )
344
            self.add_subnational_rows(
1✔
345
                analysis,
346
                countryiso3,
347
                time_period,
348
                group_rows,
349
                group_rows_wide,
350
                area_rows,
351
                area_rows_wide,
352
            )
353
        self._output["country_rows"].extend(country_rows)
1✔
354
        self._output["country_rows_wide"].extend(country_rows_wide)
1✔
355
        self._output["group_rows"].extend(group_rows)
1✔
356
        self._output["group_rows_wide"].extend(group_rows_wide)
1✔
357
        self._output["area_rows"].extend(area_rows)
1✔
358
        self._output["area_rows_wide"].extend(area_rows_wide)
1✔
359

360
        start_date = time_period["start_date"]
1✔
361
        end_date = time_period["end_date"]
1✔
362
        output["start_date"] = start_date
1✔
363
        output["end_date"] = end_date
1✔
364
        if start_date < self._output["start_date"]:
1✔
365
            self._output["start_date"] = start_date
1✔
366
            self._state["START_DATE"] = start_date
1✔
367
        if end_date > self._output["end_date"]:
1✔
368
            self._output["end_date"] = end_date
1✔
369
            self._state["END_DATE"] = end_date
1✔
370
        if not update:
1✔
371
            return None
×
372
        return output
1✔
373

374
    def get_all_data(self):
1✔
375
        return self._output
1✔
376

377
    def generate_dataset_and_showcase(self, folder, output):
1✔
378
        if not output:
1✔
379
            return None, None
×
380
        countryiso3 = output.get("countryiso3")
1✔
381
        if countryiso3:
1✔
382
            countryname = Country.get_country_name_from_iso3(countryiso3)
1✔
383
            notes = f"There is also a [global dataset]({self._global_dataset_url})."
1✔
384
        else:
385
            if not output["country_rows_latest"]:
1✔
386
                return None, None
×
387
            countryname = "Global"
1✔
388
            notes = (
1✔
389
                f"There are also [country datasets]({self._configuration.get_hdx_site_url()}/"
390
                f"organization/da501ffc-aadb-43f5-9d28-8fa572fd9ce0)"
391
            )
392
        name, title = self.get_dataset_title_name(countryname)
1✔
393
        logger.info(f"Creating dataset: {title}")
1✔
394
        dataset = Dataset(
1✔
395
            {
396
                "name": name,
397
                "title": title,
398
                "notes": notes,
399
            }
400
        )
401
        dataset.set_maintainer("196196be-6037-4488-8b71-d786adf4c081")
1✔
402
        dataset.set_organization("da501ffc-aadb-43f5-9d28-8fa572fd9ce0")
1✔
403
        dataset.set_expected_update_frequency("As needed")
1✔
404
        dataset.set_subnational(True)
1✔
405
        if countryiso3:
1✔
406
            dataset.add_country_location(countryiso3)
1✔
407
            countryiso3lower = countryiso3.lower()
1✔
408
        else:
409
            dataset.add_other_location("world")
1✔
410
            countryiso3lower = "global"
1✔
411
        tags = (
1✔
412
            "hxl",
413
            "food security",
414
            "integrated food security phase classification-ipc",
415
        )
416
        dataset.add_tags(tags)
1✔
417
        dataset.set_time_period(output["start_date"], output["end_date"])
1✔
418

419
        if countryiso3:
1✔
420
            filename = f"ipc_{countryiso3lower}.geojson"
1✔
421
            resourcedata = {
1✔
422
                "name": filename,
423
                "description": "IPC GeoJSON for latest analysis",
424
            }
425
            resource = Resource(resourcedata)
1✔
426
            resource.set_file_to_upload(output["geojson"])
1✔
427
            resource.set_format("geojson")
1✔
428
            dataset.add_update_resource(resource)
1✔
429

430
        filename = f"ipc_{countryiso3lower}_national_long_latest.csv"
1✔
431
        resourcedata = {
1✔
432
            "name": filename,
433
            "description": "Latest IPC national data in long form with HXL tags",
434
        }
435
        country_rows = output["country_rows_latest"]
1✔
436
        if not country_rows:
1✔
437
            logger.warning(f"{filename} has no data!")
×
438
            return None, None
×
439
        success, results = dataset.generate_resource_from_iterable(
1✔
440
            list(country_rows[0].keys()),
441
            country_rows,
442
            self._configuration["long_hxltags"],
443
            folder,
444
            filename,
445
            resourcedata,
446
        )
447
        if success is False:
1✔
448
            logger.warning(f"{filename} has no data!")
×
449
            return None, None
×
450

451
        country_rows_wide = output["country_rows_wide_latest"]
1✔
452
        # Won't do wide latest for country as just one row, but do it for global
453
        if len(country_rows_wide) > 1:
1✔
454
            filename = f"ipc_{countryiso3lower}_national_wide_latest.csv"
1✔
455
            resourcedata = {
1✔
456
                "name": filename,
457
                "description": "Latest IPC national data in wide form with HXL tags",
458
            }
459
            success, results = dataset.generate_resource_from_iterable(
1✔
460
                list(country_rows_wide[0].keys()),
461
                country_rows_wide,
462
                self._configuration["wide_hxltags"],
463
                folder,
464
                filename,
465
                resourcedata,
466
            )
467

468
        if countryiso3lower == "global":
1✔
469
            showcase_description = "IPC-CH Dashboard"
1✔
470
            showcase_url = "https://www.ipcinfo.org/ipcinfo-website/ipc-dashboard/en/"
1✔
471
        elif countryiso3 in self._ch_countries:
1✔
472
            showcase_description = (
1✔
473
                "CH regional page on IPC website with map and reports"
474
            )
475
            showcase_url = self._configuration["ch_showcase_url"]
1✔
476
        else:
477
            showcase_description = f"Access all of IPC’s analyses for {countryname}"
1✔
478
            showcase_url = self._configuration["showcase_url"]
1✔
479
            showcase_url = f"{showcase_url}{countryiso3}"
1✔
480
        showcase = Showcase(
1✔
481
            {
482
                "name": f"{name}-showcase",
483
                "title": f"{title} showcase",
484
                "notes": showcase_description,
485
                "url": showcase_url,
486
                "image_url": "https://www.ipcinfo.org/fileadmin/user_upload/ipcinfo/img/dashboard_thumbnail.jpg",
487
            }
488
        )
489
        showcase.add_tags(tags)
1✔
490
        group_rows = output["group_rows_latest"]
1✔
491
        if group_rows:
1✔
492
            filename = f"ipc_{countryiso3lower}_level1_long_latest.csv"
1✔
493
            resourcedata = {
1✔
494
                "name": filename,
495
                "description": "Latest IPC level 1 data in long form with HXL tags",
496
            }
497
            success, results = dataset.generate_resource_from_iterable(
1✔
498
                list(group_rows[0].keys()),
499
                group_rows,
500
                self._configuration["long_hxltags"],
501
                folder,
502
                filename,
503
                resourcedata,
504
            )
505

506
        group_rows_wide = output["group_rows_wide_latest"]
1✔
507
        if group_rows_wide:
1✔
508
            filename = f"ipc_{countryiso3lower}_level1_wide_latest.csv"
1✔
509
            resourcedata = {
1✔
510
                "name": filename,
511
                "description": "Latest IPC level 1 data in wide form with HXL tags",
512
            }
513
            success, results = dataset.generate_resource_from_iterable(
1✔
514
                list(group_rows_wide[0].keys()),
515
                group_rows_wide,
516
                self._configuration["wide_hxltags"],
517
                folder,
518
                filename,
519
                resourcedata,
520
            )
521

522
        area_rows = output["area_rows_latest"]
1✔
523
        if area_rows:
1✔
524
            filename = f"ipc_{countryiso3lower}_area_long_latest.csv"
1✔
525
            resourcedata = {
1✔
526
                "name": filename,
527
                "description": "Latest IPC area data in long form with HXL tags",
528
            }
529
            success, results = dataset.generate_resource_from_iterable(
1✔
530
                list(area_rows[0].keys()),
531
                area_rows,
532
                self._configuration["long_hxltags"],
533
                folder,
534
                filename,
535
                resourcedata,
536
            )
537
        elif not group_rows:
×
538
            logger.error(f"{countryiso3} has no latest subnational data!")
×
539

540
        area_rows_wide = output["area_rows_wide_latest"]
1✔
541
        if area_rows_wide:
1✔
542
            filename = f"ipc_{countryiso3lower}_area_wide_latest.csv"
1✔
543
            resourcedata = {
1✔
544
                "name": filename,
545
                "description": "Latest IPC area data in wide form with HXL tags",
546
            }
547
            success, results = dataset.generate_resource_from_iterable(
1✔
548
                list(area_rows_wide[0].keys()),
549
                area_rows_wide,
550
                self._configuration["wide_hxltags"],
551
                folder,
552
                filename,
553
                resourcedata,
554
            )
555

556
        country_rows_wide = output["country_rows_wide"]
1✔
557
        if len(country_rows_wide) == 1:
1✔
558
            return dataset, showcase
×
559

560
        country_rows = output["country_rows"]
1✔
561
        filename = f"ipc_{countryiso3lower}_national_long.csv"
1✔
562
        resourcedata = {
1✔
563
            "name": filename,
564
            "description": "All IPC national data in long form with HXL tags",
565
        }
566
        success, results = dataset.generate_resource_from_iterable(
1✔
567
            list(country_rows[0].keys()),
568
            country_rows,
569
            self._configuration["long_hxltags"],
570
            folder,
571
            filename,
572
            resourcedata,
573
        )
574

575
        filename = f"ipc_{countryiso3lower}_national_wide.csv"
1✔
576
        resourcedata = {
1✔
577
            "name": filename,
578
            "description": "All IPC national data in wide form with HXL tags",
579
        }
580
        success, results = dataset.generate_resource_from_iterable(
1✔
581
            list(country_rows_wide[0].keys()),
582
            country_rows_wide,
583
            self._configuration["wide_hxltags"],
584
            folder,
585
            filename,
586
            resourcedata,
587
        )
588

589
        group_rows = output["group_rows"]
1✔
590
        if group_rows:
1✔
591
            filename = f"ipc_{countryiso3lower}_level1_long.csv"
1✔
592
            resourcedata = {
1✔
593
                "name": filename,
594
                "description": "All IPC level 1 data in long form with HXL tags",
595
            }
596
            success, results = dataset.generate_resource_from_iterable(
1✔
597
                list(group_rows[0].keys()),
598
                group_rows,
599
                self._configuration["long_hxltags"],
600
                folder,
601
                filename,
602
                resourcedata,
603
            )
604

605
        group_rows_wide = output["group_rows_wide"]
1✔
606
        if group_rows_wide:
1✔
607
            filename = f"ipc_{countryiso3lower}_level1_wide.csv"
1✔
608
            resourcedata = {
1✔
609
                "name": filename,
610
                "description": "All IPC level 1 data in wide form with HXL tags",
611
            }
612
            success, results = dataset.generate_resource_from_iterable(
1✔
613
                list(group_rows_wide[0].keys()),
614
                group_rows_wide,
615
                self._configuration["wide_hxltags"],
616
                folder,
617
                filename,
618
                resourcedata,
619
            )
620

621
        area_rows = output["area_rows"]
1✔
622
        if area_rows:
1✔
623
            filename = f"ipc_{countryiso3lower}_area_long.csv"
1✔
624
            resourcedata = {
1✔
625
                "name": filename,
626
                "description": "All IPC area data in long form with HXL tags",
627
            }
628
            success, results = dataset.generate_resource_from_iterable(
1✔
629
                list(area_rows[0].keys()),
630
                area_rows,
631
                self._configuration["long_hxltags"],
632
                folder,
633
                filename,
634
                resourcedata,
635
            )
636
        elif not group_rows:
×
637
            logger.error(f"{countryiso3} has no subnational data!")
×
638

639
        area_rows_wide = output["area_rows_wide"]
1✔
640
        if area_rows_wide:
1✔
641
            filename = f"ipc_{countryiso3lower}_area_wide.csv"
1✔
642
            resourcedata = {
1✔
643
                "name": filename,
644
                "description": "All IPC area data in wide form with HXL tags",
645
            }
646
            success, results = dataset.generate_resource_from_iterable(
1✔
647
                list(area_rows_wide[0].keys()),
648
                area_rows_wide,
649
                self._configuration["wide_hxltags"],
650
                folder,
651
                filename,
652
                resourcedata,
653
            )
654

655
        return dataset, showcase
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc