• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

ssec / sift / 13528712524

25 Feb 2025 06:50PM UTC coverage: 29.691% (-20.2%) from 49.871%
13528712524

push

github

web-flow
Merge pull request #437 from ameraner/fix_export_image_float

Deactivate export image and rgb config tests to avoid Segfaults in tests and add explicit float casting for Fraction call to fix export tests

0 of 1 new or added line in 1 file covered. (0.0%)

2747 existing lines in 33 files now uncovered.

4386 of 14772 relevant lines covered (29.69%)

0.59 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

21.24
/uwsift/model/document.py
1
#!/usr/bin/env python
2
# -*- coding: utf-8 -*-
3
"""
2✔
4
uwsift.model.document
5
---------------------
6

7
The document is an interface to further process some user interactions and delegate the import of new content to the
8
workspace. It also contains all metadata information of all loaded records.
9

10
The document handles the following tasks:
11
    - import new files
12
    - instruct the workspace to import new content
13
    - create a Presentation using metadata information
14
    - manage the currently active area definition used to present the data
15
    - manage ser color maps
16

17
The communication between the document and other parts of the application are done with signal/slot connections.
18

19
Document has zero or more Colormaps, determining how they're presented
20

21
The document does not own data (content). It only owns metadata (info).
22

23
All entities in the Document have a UUID that is their identity throughout their lifecycle,
24
and is often used as shorthand between subsystems. Document rarely deals directly with content.
25

26
:author: R.K.Garcia <rayg@ssec.wisc.edu> and others
27
:copyright: 2015 by University of Wisconsin Regents, see AUTHORS for more details
28
:license: GPLv3, see LICENSE for more details
29
"""
30
from __future__ import annotations
2✔
31

32
__author__ = "rayg"
2✔
33
__docformat__ = "reStructuredText"
2✔
34

35
import json
2✔
36
import logging
2✔
37
import os
2✔
38
import typing as typ
2✔
39
from uuid import UUID
2✔
40

41
from PyQt5.QtCore import QObject, pyqtSignal
2✔
42

43
from uwsift.common import Info, Kind, Presentation
2✔
44
from uwsift.model.area_definitions_manager import AreaDefinitionsManager
2✔
45
from uwsift.queue import TASK_DOING, TASK_PROGRESS, TaskQueue
2✔
46
from uwsift.util.common import get_initial_gamma, units_conversion
2✔
47
from uwsift.util.default_paths import DOCUMENT_SETTINGS_DIR
2✔
48
from uwsift.view.colormap import (
2✔
49
    COLORMAP_MANAGER,
50
    SITE_CATEGORY,
51
    USER_CATEGORY,
52
    PyQtGraphColormap,
53
)
54
from uwsift.workspace import BaseWorkspace, CachingWorkspace, SimpleWorkspace
2✔
55
from uwsift.workspace.metadatabase import Product
2✔
56

57
LOG = logging.getLogger(__name__)
2✔
58

59
###################################################################################################################
60

61

62
class Document(QObject):  # base class is rightmost, mixins left of that
2✔
63
    """Storage for dataset info and user information.
64

65
    This is the low-level "internal" interface that acts as a signaling hub.
66
    Direct access to the document is being deprecated.
67
    Most direct access patterns should be migrated to using a contextual view of the document,
68
    in order to reduce abstraction leakage and permit the document storage to evolve.
69
    """
70

71
    # signals
72
    didAddDataset = pyqtSignal(dict, Presentation)
2✔
73
    didUpdateBasicDataset = pyqtSignal(UUID, Kind)
2✔
74
    didChangeProjection = pyqtSignal(str)  # name of projection (area definition)
2✔
75
    didReorderTracks = pyqtSignal(set, set)  # added track names, removed track names
2✔
76
    didUpdateUserColormap = pyqtSignal(str)  # name of colormap which has an update
2✔
77

78
    def __init__(
2✔
79
        self,
80
        workspace: BaseWorkspace,
81
        queue: TaskQueue,
82
        config_dir=DOCUMENT_SETTINGS_DIR,
83
        **kwargs,
84
    ):
UNCOV
85
        super(Document, self).__init__(**kwargs)
×
UNCOV
86
        self.config_dir = config_dir
×
UNCOV
87
        self.queue = queue
×
UNCOV
88
        if not os.path.isdir(self.config_dir):
×
89
            LOG.debug("Creating settings directory {}".format(self.config_dir))
×
90
            os.makedirs(self.config_dir)
×
91

UNCOV
92
        self._workspace = workspace
×
UNCOV
93
        self._info_by_uuid: typ.Dict[UUID, dict] = {}  # dict(uuid:frozendict)
×
94

UNCOV
95
        self.colormaps = COLORMAP_MANAGER
×
UNCOV
96
        self.default_area_def_name = AreaDefinitionsManager.default_area_def_name()
×
UNCOV
97
        self.current_area_def_name = self.default_area_def_name
×
98

99
        # Create directory if it does not exist
UNCOV
100
        cmap_base_dir = os.path.join(self.config_dir, "colormaps")
×
UNCOV
101
        read_cmap_dir = os.path.join(cmap_base_dir, "site")  # read-only
×
UNCOV
102
        write_cmap_dir = os.path.join(cmap_base_dir, "user")  # writeable
×
UNCOV
103
        self.read_cmap_dir = read_cmap_dir
×
UNCOV
104
        self.write_cmap_dir = write_cmap_dir
×
UNCOV
105
        importable_cmap_cats = [(True, SITE_CATEGORY, read_cmap_dir), (False, USER_CATEGORY, write_cmap_dir)]
×
UNCOV
106
        for read_only, cmap_cat, cmap_dir in importable_cmap_cats:
×
UNCOV
107
            if not os.path.exists(cmap_dir):
×
UNCOV
108
                os.makedirs(cmap_dir)
×
109
            else:
110
                self.colormaps.import_colormaps(cmap_dir, read_only=read_only, category=cmap_cat)
×
111

112
    def find_colormap(self, colormap):
2✔
113
        if isinstance(colormap, str) and colormap in self.colormaps:
×
114
            colormap = self.colormaps[colormap]
×
115
        return colormap
×
116

117
    def area_definition(self, area_definition_name=None):
2✔
UNCOV
118
        return AreaDefinitionsManager.area_def_by_name(area_definition_name or self.current_area_def_name)
×
119

120
    def change_projection(self, area_def_name=None):
2✔
UNCOV
121
        if area_def_name is None:
×
UNCOV
122
            area_def_name = self.default_area_def_name
×
UNCOV
123
        assert area_def_name in AreaDefinitionsManager.available_area_def_names()  # nosec B101
×
UNCOV
124
        if area_def_name != self.current_area_def_name:
×
125
            LOG.debug(
×
126
                f"Changing projection (area definition) from" f" '{self.current_area_def_name}' to '{area_def_name}'"
127
            )
128
            self.current_area_def_name = area_def_name
×
129
            self.didChangeProjection.emit(self.current_area_def_name)
×
130

131
    def update_user_colormap(self, colormap, name):
2✔
132
        # Update new gradient into save location
133
        try:
×
134
            filepath = self.write_cmap_dir
×
135
            cmap_file = open(os.path.join(filepath, name + ".json"), "w")
×
136
            cmap_file.write(json.dumps(colormap, indent=2, sort_keys=True))
×
137
            cmap_file.close()
×
138
        except IOError:
×
139
            LOG.error("Error saving gradient: {}".format(name), exc_info=True)
×
140

141
        cmap = PyQtGraphColormap(colormap)
×
142
        self.colormaps[name] = cmap
×
143

144
        # Update live map
145
        self.didUpdateUserColormap.emit(name)
×
146

147
    def remove_user_colormap(self, name):
2✔
148
        try:
×
149
            os.remove(os.path.join(self.config_dir, "colormaps", "user", name + ".json"))
×
150
        except OSError:
×
151
            pass
×
152

153
        del self.colormaps[name]
×
154

155
    def current_projection_index(self):
2✔
UNCOV
156
        return list(AreaDefinitionsManager.available_area_def_names()).index(self.current_area_def_name)
×
157

158
    def change_projection_index(self, idx):
2✔
159
        return self.change_projection(tuple(AreaDefinitionsManager.available_area_def_names())[idx])
×
160

161
    def _insert_dataset_with_info(self, info: dict, cmap=None, style=None, insert_before=0):
2✔
162
        """
163
        insert a dataset into the presentations but do not signal
164
        :return: new Presentation tuple, new reordered indices tuple
165
        """
166
        if cmap is None:
×
167
            cmap = info.get(Info.COLORMAP)
×
168
        if style is None:
×
169
            style = info.get(Info.STYLE)
×
170
        gamma = get_initial_gamma(info)
×
171

172
        climits = self._workspace.get_range_for_dataset_no_fail(info)
×
173

174
        p = Presentation(
×
175
            uuid=info[Info.UUID],
176
            kind=info[Info.KIND],
177
            visible=True,
178
            colormap=cmap,
179
            style=style,
180
            climits=climits,
181
            gamma=gamma,
182
            opacity=1.0,
183
        )
184

185
        return p
×
186

187
    def activate_product_uuid_as_new_dataset(self, uuid: UUID, insert_before=0, **importer_kwargs):
2✔
188
        if uuid in self._info_by_uuid:
×
189
            LOG.debug("dataset already loaded: {}".format(uuid))
×
190
            self._workspace.import_product_content(uuid, **importer_kwargs)
×
191
            return
×
192

193
        # FUTURE: Load this async, the slots for the below signal need to be OK
194
        # with that
195
        self._workspace.import_product_content(uuid, **importer_kwargs)
×
196
        # updated metadata with content information (most importantly navigation
197
        # information)
198
        frozen_info = self._workspace.get_info(uuid)
×
199
        assert frozen_info is not None  # nosec B101
×
200
        info = dict(frozen_info)  # make a copy to which stuff can be added
×
201
        LOG.debug("cell_width: {}".format(repr(info[Info.CELL_WIDTH])))
×
202

203
        LOG.debug("new dataset info: {}".format(repr(info)))
×
204
        self._info_by_uuid[uuid] = info
×
205
        if Info.UNIT_CONVERSION not in info:
×
206
            info[Info.UNIT_CONVERSION] = units_conversion(info)
×
207
        if Info.FAMILY not in info:
×
208
            info[Info.FAMILY] = self._family_for_product_or_info(info)
×
209
        presentation = self._insert_dataset_with_info(info, insert_before=insert_before)
×
210

211
        # signal updates from the document
212
        self.didAddDataset.emit(info, presentation)
×
213

214
    def _family_for_product_or_info(self, uuid_or_info):
2✔
215
        if isinstance(uuid_or_info, UUID):
×
216
            if isinstance(self._workspace, CachingWorkspace):
×
217
                with self._workspace.metadatabase as s:
×
218
                    fam = s.query(Product.family).filter_by(uuid_str=str(uuid_or_info)).first()
×
219
            if isinstance(self._workspace, SimpleWorkspace):
×
220
                fam = self._workspace.get_info(uuid_or_info)[Info.FAMILY]
×
221
            if fam:
×
222
                return fam[0]
×
223
            uuid_or_info = self[uuid_or_info]
×
224
        if Info.FAMILY in uuid_or_info:
×
225
            LOG.debug("using pre-existing family {}".format(uuid_or_info[Info.FAMILY]))
×
226
            return uuid_or_info[Info.FAMILY]
×
227
        # kind:pointofreference:measurement:wavelength
228
        kind = uuid_or_info[Info.KIND]
×
229
        refpoint = "unknown"  # FUTURE: geo/leo
×
230
        measurement = uuid_or_info.get(Info.STANDARD_NAME)
×
231
        if uuid_or_info.get("recipe"):
×
232
            # RGB
233
            subcat = uuid_or_info["recipe"].name
×
234
        elif uuid_or_info.get(Info.CENTRAL_WAVELENGTH):
×
235
            # basic band
236
            subcat = uuid_or_info[Info.CENTRAL_WAVELENGTH]
×
237
        else:
238
            # higher level product or algebraic dataset
239
            subcat = uuid_or_info[Info.DATASET_NAME]
×
240
        return "{}:{}:{}:{}".format(kind.name, refpoint, measurement, subcat)
×
241

242
    def import_files(self, paths, insert_before=0, **importer_kwargs) -> typ.Generator[dict, None, None]:
2✔
243
        """Load product metadata and content from provided file paths.
244

245
        :param paths: paths to open
246
        :param insert_before: where to insert them in layer manager
247
        :return:
248

249
        """
250

251
        # NOTE: if the importer argument 'merge_with_existing' is not set it
252
        # defaults to True here.
253
        # TODO(AR) make 'merge_with_existing' an explicit argument to this
254
        #  method.
255
        do_merge_with_existing = importer_kwargs.get("merge_with_existing", True) and not importer_kwargs.get(
×
256
            "resampling_info"
257
        )
258
        # Ensure that the result of the test just performed is consistently
259
        # passed on to further import steps via importer_kwargs:
260
        importer_kwargs["merge_with_existing"] = do_merge_with_existing
×
261

262
        # Load all the metadata so we can sort the files
263
        # assume metadata collection is in the most user-friendly order
264
        infos = self._workspace.collect_product_metadata_for_paths(paths, **importer_kwargs)
×
265
        uuids = []
×
266
        merge_target_uuids = {}  # map new files uuids to merge target uuids
×
267
        total_products = 0
×
268
        for dex, (num_prods, info) in enumerate(infos):
×
269
            uuid = info[Info.UUID]
×
270
            merge_target_uuid = uuid
×
271
            if do_merge_with_existing:
×
272
                # real_paths because for satpy imports the methods paths parameter actually
273
                # contains the reader names
274
                real_paths = info["paths"]
×
275
                merge_target = self._workspace.find_merge_target(uuid, real_paths, info)
×
276
                if merge_target:
×
277
                    merge_target_uuid = merge_target.uuid
×
278

279
            yield {
×
280
                TASK_DOING: "Collecting metadata {}/{}".format(dex + 1, num_prods),
281
                TASK_PROGRESS: float(dex + 1) / float(num_prods),
282
                "uuid": merge_target_uuid,
283
                "num_products": num_prods,
284
            }
285
            # redundant but also more explicit than depending on num_prods
286
            total_products = num_prods
×
287
            uuids.append(uuid)
×
288
            merge_target_uuids[uuid] = merge_target_uuid
×
289

290
        if not total_products:
×
291
            raise ValueError("no products available in {}".format(paths))
×
292

293
        # collect product and resource information but don't yet import content
294
        for dex, uuid in enumerate(uuids):
×
295
            merge_target_uuid = merge_target_uuids[uuid]
×
296
            if do_merge_with_existing and uuid != merge_target_uuid:  # merge products
×
297
                active_content_data = self._workspace.import_product_content(
×
298
                    uuid, merge_target_uuid=merge_target_uuid, **importer_kwargs
299
                )
300
                # active_content_data is none if all segments are already loaded
301
                # and there is nothing new to import
302
                if active_content_data:
×
303
                    dataset_info = self[merge_target_uuid]
×
304
                    self.didUpdateBasicDataset.emit(merge_target_uuid, dataset_info[Info.KIND])
×
305
            elif uuid in self._info_by_uuid:
×
306
                LOG.warning("dataset with UUID {} already in document?".format(uuid))
×
307
                self._workspace.get_content(uuid)
×
308
            else:
309
                self.activate_product_uuid_as_new_dataset(uuid, insert_before=insert_before, **importer_kwargs)
×
310

311
            yield {
×
312
                TASK_DOING: "Loading content {}/{}".format(dex + 1, total_products),
313
                TASK_PROGRESS: float(dex + 1) / float(total_products),
314
                "uuid": merge_target_uuid,
315
                "num_products": total_products,
316
            }
317

318
    def sort_product_uuids(self, uuids: typ.Iterable[UUID]) -> typ.List[UUID]:
2✔
319
        assert isinstance(self._workspace, CachingWorkspace)  # nosec B101
×
320
        uuidset = set(str(x) for x in uuids)
×
321
        if not uuidset:
×
322
            return []
×
323
        with self._workspace.metadatabase as S:
×
324
            zult = [
×
325
                (x.uuid, x.ident)
326
                for x in S.query(Product)
327
                .filter(Product.uuid_str.in_(uuidset))
328
                .order_by(Product.family, Product.category, Product.serial)
329
                .all()
330
            ]
331
        LOG.debug("sorted products: {}".format(repr(zult)))
×
332
        return [u for u, _ in zult]
×
333

334
    # TODO: add a document style guide which says how different bands from different instruments are displayed
335

336
    def get_uuids(self):
2✔
337
        return list(self._info_by_uuid.keys())
×
338

339
    def __getitem__(self, dataset_uuid):
2✔
340
        """
341
        return dataset info with the given UUID
342
        """
343
        if dataset_uuid is None:
×
344
            raise KeyError("Key 'None' does not exist in document or workspace")
×
345
        elif not isinstance(dataset_uuid, UUID):
×
346
            raise ValueError("document[UUID] required, %r was used" % type(dataset_uuid))
×
347

348
        if dataset_uuid in self._info_by_uuid:
×
349
            return self._info_by_uuid[dataset_uuid]
×
350

351
        # check the workspace for information
352
        try:
×
353
            LOG.debug("Checking workspace for information on inactive product")
×
354
            info = self._workspace.get_info(dataset_uuid)
×
355
        except KeyError:
×
356
            info = None
×
357

358
        if info is None:
×
359
            raise KeyError("Key '{}' does not exist in document or workspace".format(dataset_uuid))
×
360
        return info
×
361

362
    def remove_dataset_info(self, uuid: UUID):
2✔
363
        """Remove the info of a dataset because it is no longer needed
364

365
        :param uuid: UUID of the dataset which is removed
366
        """
367
        LOG.debug(f"Remove dataset info of  uuid {uuid}")
×
368
        self._info_by_uuid.pop(uuid, None)
×
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc