• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

SEED-platform / seed / #8755

10 Dec 2024 09:20PM UTC coverage: 78.792% (-0.008%) from 78.8%
#8755

push

coveralls-python

web-flow
Fix organization delete - ensure there is a chance to confirm and that it works in one step. (#4851)

* new confirm modal for removing organizations, rearrange inventory deletion so as to not try to remove org before the org is empty

* linting

* update translations and css

* lint

---------

Co-authored-by: Alex Swindler <alex.swindler@nrel.gov>
Co-authored-by: Katherine Fleming <2205659+kflemin@users.noreply.github.com>

25 of 38 new or added lines in 2 files covered. (65.79%)

4 existing lines in 1 file now uncovered.

18472 of 23444 relevant lines covered (78.79%)

0.79 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

61.05
/seed/tasks.py
1
"""
2
SEED Platform (TM), Copyright (c) Alliance for Sustainable Energy, LLC, and other contributors.
3
See also https://github.com/SEED-platform/seed/blob/main/LICENSE.md
4
"""
5

6
import itertools
1✔
7
import math
1✔
8
import sys
1✔
9
from datetime import datetime
1✔
10
from random import randint
1✔
11

12
import pytz
1✔
13
from celery import chord, shared_task
1✔
14
from celery.utils.log import get_task_logger
1✔
15
from django.conf import settings
16
from django.core.mail import send_mail
17
from django.db import transaction
18
from django.db.models import Q
19
from django.template import Context, Template, loader
20
from django.urls import reverse_lazy
21
from django.utils.encoding import force_bytes
22
from django.utils.http import urlsafe_base64_encode
23

24
from seed.audit_template.audit_template import AuditTemplate
1✔
25
from seed.data_importer.tasks import hash_state_object
1✔
26
from seed.decorators import lock_and_track
1✔
27
from seed.lib.mcm.utils import batch
1✔
28
from seed.lib.progress_data.progress_data import ProgressData
1✔
29
from seed.lib.superperms.orgs.models import Organization
1✔
30
from seed.models import (
1✔
31
    DATA_STATE_MATCHING,
32
    Column,
33
    ColumnMapping,
34
    Cycle,
35
    DerivedColumn,
36
    Property,
37
    PropertyState,
38
    PropertyView,
39
    SalesforceConfig,
40
    TaxLot,
41
    TaxLotState,
42
    TaxLotView,
43
)
44
from seed.utils.salesforce import auto_sync_salesforce_properties
1✔
45

46
logger = get_task_logger(__name__)
1✔
47

48

49
def invite_new_user_to_seed(domain, email_address, token, user_pk, first_name):
1✔
50
    """Send invitation email to newly created user from the landing page.
51
    NOTE: this function is only used on the landing page because the user has not been assigned an organization
52
    domain -- The domain name of the running seed instance
53
    email_address -- The address to send the invitation to
54
    token -- generated by Django's default_token_generator
55
    user_pk -- primary key for this user record
56
    first_name -- First name of the new user
57
    new_user
58

59
    Returns: nothing
60
    """
61
    signup_url = reverse_lazy("landing:activate", kwargs={"uidb64": urlsafe_base64_encode(force_bytes(user_pk)), "token": token})
×
62

63
    context = {"email": email_address, "domain": domain, "protocol": settings.PROTOCOL, "first_name": first_name, "signup_url": signup_url}
×
64

65
    subject = "New SEED account"
×
66
    email_body = loader.render_to_string("seed/account_create_email.html", context)
×
67
    send_mail(subject, email_body, settings.SERVER_EMAIL, [email_address])
×
68
    try:
×
69
        bcc_address = settings.SEED_ACCOUNT_CREATION_BCC
×
70
        new_subject = f"{subject} ({email_address})"
×
71
        send_mail(new_subject, email_body, settings.SERVER_EMAIL, [bcc_address])
×
72
    except AttributeError:
×
73
        pass
×
74

75

76
@shared_task
1✔
77
def invite_to_seed(domain, email_address, token, organization, user_pk, first_name):
1✔
78
    """Send invitation email to newly created user.
79

80
    domain -- The domain name of the running seed instance
81
    email_address -- The address to send the invitation to
82
    token -- generated by Django's default_token_generator
83
    organization --  the organization user was invited to
84
    user_pk -- primary key for this user record
85
    first_name -- First name of the new user
86

87
    Returns: nothing
88
    """
89
    sign_up_url = Template("{{protocol}}://{{domain}}{{sign_up_url}}").render(
1✔
90
        Context(
91
            {
92
                "protocol": settings.PROTOCOL,
93
                "domain": domain,
94
                "sign_up_url": reverse_lazy(
95
                    "landing:signup", kwargs={"uidb64": urlsafe_base64_encode(force_bytes(user_pk)), "token": token}
96
                ),
97
            }
98
        )
99
    )
100

101
    content = Template(organization.new_user_email_content).render(Context({"first_name": first_name, "sign_up_link": sign_up_url}))
1✔
102

103
    body = Template("{{content}}\n\n{{signature}}").render(
1✔
104
        Context({"content": content, "signature": organization.new_user_email_signature})
105
    )
106

107
    send_mail(organization.new_user_email_subject, body, organization.new_user_email_from, [email_address])
1✔
108
    try:
1✔
109
        bcc_address = settings.SEED_ACCOUNT_CREATION_BCC
1✔
110
        new_subject = f"{organization.new_user_email_subject} ({email_address})"
×
111
        send_mail(new_subject, body, organization.new_user_email_from, [bcc_address])
×
112
    except AttributeError:
1✔
113
        pass
1✔
114

115

116
@shared_task
1✔
117
def invite_to_organization(domain, new_user, requested_by, new_org):
1✔
118
    """Send invitation to a newly created organization.
119

120
    domain -- The domain name of the running seed instance
121
    email_address -- The address to send the invitation to
122
    token -- generated by Django's default_token_generator
123
    user_pk --primary key for this user record
124
    first_name -- First name of the new user
125

126
    Returns: nothing
127
    """
128
    context = {
×
129
        "new_user": new_user,
130
        "first_name": new_user.first_name,
131
        "domain": domain,
132
        "protocol": settings.PROTOCOL,
133
        "new_org": new_org,
134
        "requested_by": requested_by,
135
    }
136

137
    subject = "Your SEED account has been added to an organization"
×
138
    email_body = loader.render_to_string("seed/account_org_added.html", context)
×
139
    send_mail(subject, email_body, settings.SERVER_EMAIL, [new_user.email])
×
140
    try:
×
141
        bcc_address = settings.SEED_ACCOUNT_CREATION_BCC
×
142
        new_subject = f"{subject} ({new_user.email})"
×
143
        send_mail(new_subject, email_body, settings.SERVER_EMAIL, [bcc_address])
×
144
    except AttributeError:
×
145
        pass
×
146

147

148
def send_salesforce_error_log(org_pk, errors):
1✔
149
    """send salesforce error log to logging email when errors are encountered during scheduled sync"""
150
    sf_conf = SalesforceConfig.objects.get(organization_id=org_pk)
×
151
    org = Organization.objects.get(pk=org_pk)
×
152

153
    if sf_conf.logging_email:
×
154
        context = {"organization_name": org.name, "errors": errors}
×
155

156
        subject = "Salesforce Automatic Update Errors"
×
157
        email_body = loader.render_to_string("seed/salesforce_update_errors.html", context)
×
158
        send_mail(subject, email_body, settings.SERVER_EMAIL, [sf_conf.logging_email])
×
159

160

161
@shared_task
1✔
162
@lock_and_track
1✔
163
def delete_organization_and_inventory(org_pk, prog_key=None):
1✔
164
    """Deletes all associated inventory and the containing org"""
165

166
    progress_data = (
1✔
167
        ProgressData.from_key(prog_key) if prog_key else ProgressData(func_name="delete_organization_and_inventory", unique_id=org_pk)
168
    )
169
    _evaluate_delete_organization_and_inventory.subtask((progress_data.key, org_pk, True)).apply_async()
1✔
170
    return progress_data.result()
1✔
171

172

173
@shared_task
1✔
174
def _evaluate_delete_organization_and_inventory(prog_key, org_pk, delete_org=False):
1✔
175
    "check for inventory, delete it if it exists, then pass to finish function to delete the organization"
176
    chunk_size = 100
1✔
177

178
    progress_data = ProgressData.from_key(prog_key)
1✔
179

180
    property_ids = list(Property.objects.filter(organization_id=org_pk).values_list("id", flat=True))
1✔
181
    property_state_ids = list(PropertyState.objects.filter(organization_id=org_pk).values_list("id", flat=True))
1✔
182
    taxlot_ids = list(TaxLot.objects.filter(organization_id=org_pk).values_list("id", flat=True))
1✔
183
    taxlot_state_ids = list(TaxLotState.objects.filter(organization_id=org_pk).values_list("id", flat=True))
1✔
184

185
    total = len(property_ids) + len(property_state_ids) + len(taxlot_ids) + len(taxlot_state_ids)
1✔
186
    progress_data.total = total / float(chunk_size) + 1
1✔
187
    progress_data.data["completed_records"] = 0
1✔
188
    progress_data.data["total_records"] = total
1✔
189
    progress_data.save()
1✔
190

191
    if total > 0:
1✔
NEW
192
        for chunk_ids in batch(property_ids, chunk_size):
×
NEW
193
            _delete_organization_children(chunk_ids, Property, progress_data.key)
×
NEW
194
        for chunk_ids in batch(property_state_ids, chunk_size):
×
NEW
195
            _delete_organization_children(chunk_ids, PropertyState, progress_data.key)
×
NEW
196
        for chunk_ids in batch(taxlot_ids, chunk_size):
×
NEW
197
            _delete_organization_children(chunk_ids, TaxLot, progress_data.key)
×
NEW
198
        for chunk_ids in batch(taxlot_state_ids, chunk_size):
×
NEW
199
            _delete_organization_children(chunk_ids, TaxLotState, progress_data.key)
×
200
    else:
201
        progress_data.step()
1✔
202

203
    if delete_org:
1✔
204
        DerivedColumn.objects.filter(organization_id=org_pk).delete()
1✔
205
        Organization.objects.get(pk=org_pk).delete()
1✔
206
        # TODO: Delete measures in BRICR branch
207

208

209
@shared_task
1✔
210
def _delete_organization_children(chunk_ids, class_name, prog_key):
1✔
NEW
211
    class_name.objects.filter(id__in=chunk_ids).delete()
×
UNCOV
212
    progress_data = ProgressData.from_key(prog_key)
×
NEW
213
    progress_data.step_with_counter()
×
214

215

216
@shared_task
1✔
217
def _finish_delete_column(column_id, prog_key):
1✔
218
    # Delete all mappings from raw column names to the mapped column, then delete the mapped column
219
    column = Column.objects.get(id=column_id)
1✔
220
    ColumnMapping.objects.filter(column_mapped=column).delete()
1✔
221
    column.delete()
1✔
222

223
    progress_data = ProgressData.from_key(prog_key)
1✔
224
    return progress_data.finish_with_success(f'Removed {column.column_name} from {progress_data.data["total_records"]} records')
1✔
225

226

227
@shared_task
1✔
228
@lock_and_track
1✔
229
def delete_organization_inventory(org_pk, prog_key=None, chunk_size=100, *args, **kwargs):
1✔
230
    """Deletes all properties & taxlots within an organization."""
UNCOV
231
    sys.setrecursionlimit(5000)  # default is 1000
×
UNCOV
232
    progress_data = (
×
233
        ProgressData.from_key(prog_key) if prog_key else ProgressData(func_name="delete_organization_inventory", unique_id=org_pk)
234
    )
NEW
235
    _evaluate_delete_organization_and_inventory.subtask((progress_data.key, org_pk, False)).apply_async()
×
NEW
236
    sys.setrecursionlimit(1000)  # default is 1000
×
UNCOV
237
    return progress_data.result()
×
238

239

240
@shared_task
1✔
241
@lock_and_track
1✔
242
def delete_organization_cycle(cycle_pk, organization_pk, prog_key=None, chunk_size=100, *args, **kwargs):
1✔
243
    """Deletes an organization's cycle.
244

245
    This must be an async task b/c a cascading deletion can require the removal
246
    of many *States associated with an ImportFile, overwhelming the server.
247

248
    :param cycle_pk: int
249
    :param prog_key: str
250
    :param chunk_size: int
251
    :return: dict, from ProgressData.result()
252
    """
253
    progress_data = ProgressData.from_key(prog_key) if prog_key else ProgressData(func_name="delete_organization_cycle", unique_id=cycle_pk)
1✔
254

255
    has_inventory = PropertyView.objects.filter(cycle_id=cycle_pk).exists() or TaxLotView.objects.filter(cycle_id=cycle_pk).exists()
1✔
256
    if has_inventory:
1✔
257
        progress_data.finish_with_error("All PropertyView and TaxLotViews linked to the Cycle must be removed")
×
258
        return progress_data.result()
×
259

260
    property_state_ids = PropertyState.objects.filter(import_file__cycle_id=cycle_pk).values_list("id", flat=True)
1✔
261
    tax_lot_state_ids = TaxLotState.objects.filter(import_file__cycle_id=cycle_pk).values_list("id", flat=True)
1✔
262
    progress_data.total = (len(property_state_ids) + len(tax_lot_state_ids)) / chunk_size
1✔
263
    progress_data.save()
1✔
264

265
    tasks = []
1✔
266
    for chunk_ids in batch(property_state_ids, chunk_size):
1✔
267
        tasks.append(_delete_organization_property_state_chunk.si(chunk_ids, progress_data.key, organization_pk))
×
268
    for chunk_ids in batch(tax_lot_state_ids, chunk_size):
1✔
269
        tasks.append(_delete_organization_taxlot_state_chunk.si(chunk_ids, progress_data.key, organization_pk))
×
270

271
    chord(tasks, interval=15)(_finish_delete_cycle.si(cycle_pk, progress_data.key))
1✔
272

273
    return progress_data.result()
1✔
274

275

276
@shared_task
1✔
277
def _finish_delete_cycle(cycle_id, prog_key):
1✔
278
    # Finally delete the cycle
279
    cycle = Cycle.objects.get(id=cycle_id)
1✔
280
    cycle.delete()
1✔
281

282
    progress_data = ProgressData.from_key(prog_key)
1✔
283
    return progress_data.finish_with_success(f"Removed {cycle.name}")
1✔
284

285

286
@shared_task
1✔
287
@lock_and_track
1✔
288
def delete_organization_column(column_pk, org_pk, prog_key=None, chunk_size=100, *args, **kwargs):
1✔
289
    """Deletes an extra_data column from all merged property/taxlot states."""
290
    progress_data = (
1✔
291
        ProgressData.from_key(prog_key) if prog_key else ProgressData(func_name="delete_organization_column", unique_id=column_pk)
292
    )
293

294
    _evaluate_delete_organization_column.subtask((column_pk, org_pk, progress_data.key, chunk_size)).apply_async()
1✔
295

296
    return progress_data.result()
1✔
297

298

299
@shared_task
1✔
300
@lock_and_track
1✔
301
def update_multiple_columns(key, table_name, org_pk, changes, prog_key=None):
1✔
302
    """Updates several columns and optionally rehashes if need be."""
303

304
    progress_data = ProgressData.from_key(prog_key) if prog_key else ProgressData(func_name="update_multiple_columns", unique_id=key)
×
305
    _evaluate_update_multiple_columns.subtask((progress_data.key, table_name, org_pk, changes)).apply_async()
×
306
    return progress_data.result()
×
307

308

309
@shared_task
1✔
310
def _evaluate_update_multiple_columns(prog_key, table_name, org_pk, changes):
1✔
311
    """Update columns, then check for required rehash - and rehash states if need be"""
312

313
    chunk_size = 100
×
314

315
    rehashed_columns = []
×
316
    for key in changes:
×
317
        c = Column.objects.get(pk=key)
×
318
        for attribute in changes[key]:
×
319
            if attribute == "is_excluded_from_hash":
×
320
                rehashed_columns.append(c)
×
321
            setattr(c, attribute, changes[key][attribute])
×
322
        c.save()
×
323

324
    progress_data = ProgressData.from_key(prog_key)
×
325

326
    if len(rehashed_columns) == 0:
×
327
        total = len(changes.keys())
×
328
        progress_data.total = total / float(chunk_size) + 1
×
329
        progress_data.data["completed_records"] = total
×
330
        progress_data.data["total_records"] = total
×
331
        progress_data.save()
×
332
    else:
333
        query = _build_property_query_for_rehashed_columns(org_pk, rehashed_columns)
×
334

335
        ids = []
×
336

337
        if table_name == "PropertyState":
×
338
            ids = PropertyState.objects.filter(query).values_list("id", flat=True)
×
339
        elif table_name == "TaxLotState":
×
340
            ids = TaxLotState.objects.filter(query).values_list("id", flat=True)
×
341

342
        total = len(ids)
×
343
        progress_data.total = total / float(chunk_size) + 1
×
344
        progress_data.data["completed_records"] = 0
×
345
        progress_data.data["total_records"] = total
×
346
        progress_data.save()
×
347

348
        for chunk_ids in batch(ids, chunk_size):
×
349
            _rehash_state_chunk(chunk_ids, table_name, progress_data.key)
×
350

351
    _finish_update_multiple_columns(changes, progress_data.key)
×
352

353

354
@shared_task
1✔
355
def _build_property_query_for_rehashed_columns(org_pk, rehashed_columns):
1✔
356
    query = Q()
1✔
357
    query.add(Q(data_state=DATA_STATE_MATCHING), Q.AND)
1✔
358
    query.add(Q(organization_id=org_pk), Q.AND)
1✔
359
    or_query = Q()
1✔
360
    fields = []
1✔
361
    extra_fields = []
1✔
362
    for column in rehashed_columns:
1✔
363
        if column.is_extra_data:
1✔
364
            extra_fields.append(column.column_name)
×
365
        else:
366
            fields.append(column.column_name)
1✔
367

368
    for field in fields:
1✔
369
        or_query.add(Q(**{field + "__isnull": False}), Q.OR)
1✔
370
    if len(extra_fields) > 0:
1✔
371
        or_query.add(Q(extra_data__has_any_key=extra_fields), Q.OR)
×
372

373
    query.add(or_query, Q.AND)
1✔
374
    return query
1✔
375

376

377
@shared_task
1✔
378
def _rehash_state_chunk(chunk_ids, table_name, prog_key):
1✔
379
    if table_name == "PropertyState":
×
380
        states = PropertyState.objects.filter(id__in=chunk_ids)
×
381
    else:
382
        states = TaxLotState.objects.filter(id__in=chunk_ids)
×
383
    with transaction.atomic():
×
384
        for state in states:
×
385
            state.hash_object = hash_state_object(state)
×
386
            state.save(update_fields=["hash_object"])
×
387

388
    progress_data = ProgressData.from_key(prog_key)
×
389
    progress_data.step_with_counter()
×
390

391

392
@shared_task
1✔
393
def _finish_update_multiple_columns(changes, prog_key):
1✔
394
    progress_data = ProgressData.from_key(prog_key)
×
395
    if progress_data.data["total_records"] == len(changes):
×
396
        return progress_data.finish_with_success(f"Updated {len(changes.keys())} columns.")
×
397
    else:
398
        return progress_data.finish_with_success(
×
399
            f'Updated {len(changes.keys())} columns.  Rebuilt {progress_data.data["total_records"]} records'
400
        )
401

402

403
@shared_task
1✔
404
def _evaluate_delete_organization_column(column_pk, org_pk, prog_key, chunk_size, *args, **kwargs):
1✔
405
    """Find -States with column to be deleted"""
406
    column = Column.objects.get(id=column_pk, organization_id=org_pk)
1✔
407

408
    ids = []
1✔
409

410
    if column.table_name == "PropertyState":
1✔
411
        ids = PropertyState.objects.filter(
1✔
412
            organization_id=org_pk, data_state=DATA_STATE_MATCHING, extra_data__has_key=column.column_name
413
        ).values_list("id", flat=True)
414
    elif column.table_name == "TaxLotState":
×
415
        ids = TaxLotState.objects.filter(
×
416
            organization_id=org_pk, data_state=DATA_STATE_MATCHING, extra_data__has_key=column.column_name
417
        ).values_list("id", flat=True)
418

419
    progress_data = ProgressData.from_key(prog_key)
1✔
420
    total = len(ids)
1✔
421
    progress_data.total = total / float(chunk_size) + 1
1✔
422
    progress_data.data["completed_records"] = 0
1✔
423
    progress_data.data["total_records"] = total
1✔
424
    progress_data.save()
1✔
425

426
    for chunk_ids in batch(ids, chunk_size):
1✔
427
        _delete_organization_column_chunk(chunk_ids, column.column_name, column.table_name, progress_data.key)
×
428

429
    _finish_delete_column(column_pk, progress_data.key)
1✔
430

431

432
def _delete_organization_column_chunk(chunk_ids, column_name, table_name, prog_key, *args, **kwargs):
1✔
433
    """updates a list of ``chunk_ids`` and increments the cache"""
434

435
    if table_name == "PropertyState":
×
436
        states = PropertyState.objects.filter(id__in=chunk_ids)
×
437
    else:
438
        states = TaxLotState.objects.filter(id__in=chunk_ids)
×
439

440
    with transaction.atomic():
×
441
        for state in states:
×
442
            del state.extra_data[column_name]
×
443
            state.save(update_fields=["extra_data", "hash_object"])
×
444

445
    progress_data = ProgressData.from_key(prog_key)
×
446
    progress_data.step_with_counter()
×
447

448

449
@shared_task
1✔
450
def _delete_organization_property_chunk(del_ids, prog_key, org_pk, *args, **kwargs):
1✔
451
    """deletes a list of ``del_ids`` and increments the cache"""
452
    Property.objects.filter(organization_id=org_pk, pk__in=del_ids).delete()
×
453
    progress_data = ProgressData.from_key(prog_key)
×
454
    progress_data.step()
×
455

456

457
@shared_task
1✔
458
def _delete_organization_property_state_chunk(del_ids, prog_key, org_pk, *args, **kwargs):
1✔
459
    """deletes a list of ``del_ids`` and increments the cache"""
460
    PropertyState.objects.filter(pk__in=del_ids).delete()
×
461
    progress_data = ProgressData.from_key(prog_key)
×
462
    progress_data.step()
×
463

464

465
@shared_task
1✔
466
def _delete_organization_taxlot_chunk(del_ids, prog_key, org_pk, *args, **kwargs):
1✔
467
    """deletes a list of ``del_ids`` and increments the cache"""
468
    TaxLot.objects.filter(organization_id=org_pk, pk__in=del_ids).delete()
×
469
    progress_data = ProgressData.from_key(prog_key)
×
470
    progress_data.step()
×
471

472

473
@shared_task
1✔
474
def _delete_organization_taxlot_state_chunk(del_ids, prog_key, org_pk, *args, **kwargs):
1✔
475
    """deletes a list of ``del_ids`` and increments the cache"""
476
    TaxLotState.objects.filter(organization_id=org_pk, pk__in=del_ids).delete()
×
477
    progress_data = ProgressData.from_key(prog_key)
×
478
    progress_data.step()
×
479

480

481
@shared_task
1✔
482
def sync_salesforce(org_id):
1✔
483
    status, messages = auto_sync_salesforce_properties(org_id)
×
484
    if not status:
×
485
        # send email with errors
486
        send_salesforce_error_log(org_id, messages)
×
487

488

489
@shared_task
1✔
490
def sync_audit_template(org_id):
1✔
491
    try:
×
492
        org = Organization.objects.get(id=org_id)
×
493
    except Organization.DoesNotExist:
×
494
        return
×
495

496
    if not org.audit_template_city_id:
×
497
        return
×
498

499
    at = AuditTemplate(org_id)
×
500
    at.batch_get_city_submission_xml([])
×
501

502

503
@shared_task
1✔
504
def set_update_to_now(property_view_ids, taxlot_view_ids, progress_key):
1✔
505
    now = datetime.now(pytz.UTC)
1✔
506
    progress_data = ProgressData.from_key(progress_key)
1✔
507
    progress_data.total = 100
1✔
508
    id_count = len(property_view_ids) + len(taxlot_view_ids)
1✔
509
    batch_size = math.ceil(id_count / 100)
1✔
510

511
    property_views = PropertyView.objects.filter(id__in=property_view_ids).prefetch_related("state", "property")
1✔
512
    taxlot_views = TaxLotView.objects.filter(id__in=taxlot_view_ids).prefetch_related("state", "taxlot")
1✔
513

514
    with transaction.atomic():
1✔
515
        for idx, view in enumerate(itertools.chain(property_views, taxlot_views)):
1✔
516
            view.state.updated = now
1✔
517
            view.state.save()
1✔
518

519
            if isinstance(view, PropertyView):
1✔
520
                view.property.update = now
1✔
521
                view.property.save()
1✔
522
            else:
523
                view.taxlot.update = now
1✔
524
                view.taxlot.save()
1✔
525

526
            if batch_size > 0 and idx % batch_size == 0:
1✔
527
                progress_data.step(f"Refreshing ({idx}/{id_count})")
1✔
528

529
    progress_data.finish_with_success()
1✔
530
    return progress_data.result()["progress"]
1✔
531

532

533
@shared_task
1✔
534
def update_state_derived_data(property_state_ids=[], taxlot_state_ids=[], derived_column_ids=[]):
1✔
535
    progress_data = ProgressData(func_name="update_derived_data", unique_id=randint(10000, 99999))
1✔
536
    progress_data.total = len(property_state_ids) + len(taxlot_state_ids)
1✔
537
    progress_data.save()
1✔
538
    progress_key = progress_data.key
1✔
539

540
    chunk_size = 100
1✔
541

542
    derived_columns = DerivedColumn.objects.filter(id__in=derived_column_ids)
1✔
543
    property_derived_column_ids = list(derived_columns.filter(inventory_type=DerivedColumn.PROPERTY_TYPE).values_list("id", flat=True))
1✔
544
    taxlot_derived_column_ids = list(derived_columns.filter(inventory_type=DerivedColumn.TAXLOT_TYPE).values_list("id", flat=True))
1✔
545

546
    tasks = []
1✔
547
    for chunk_ids in batch(property_state_ids, chunk_size):
1✔
548
        tasks.append(_update_property_state_derived_data_chunk.si(progress_key, chunk_ids, property_derived_column_ids))
1✔
549
    for chunk_ids in batch(taxlot_state_ids, chunk_size):
1✔
550
        tasks.append(_update_taxlot_state_derived_data_chunk.si(progress_key, chunk_ids, taxlot_derived_column_ids))
1✔
551

552
    chord(tasks, interval=15)(_finish_update_state_derived_data.si(progress_key, property_derived_column_ids + taxlot_derived_column_ids))
1✔
553

554
    return progress_data.result()
1✔
555

556

557
@shared_task
1✔
558
def _update_property_state_derived_data_chunk(progress_key, property_state_ids=[], derived_column_ids=[]):
1✔
559
    progress_data = ProgressData.from_key(progress_key)
1✔
560

561
    states = PropertyState.objects.filter(id__in=property_state_ids)
1✔
562
    derived_columns = DerivedColumn.objects.filter(id__in=derived_column_ids)
1✔
563

564
    for state in states:
1✔
565
        for derived_column in derived_columns:
1✔
566
            state.derived_data[derived_column.name] = derived_column.evaluate(state)
1✔
567
        state.save()
1✔
568
        progress_data.step()
1✔
569

570

571
@shared_task
1✔
572
def _update_taxlot_state_derived_data_chunk(progress_key, taxlot_state_ids=[], derived_column_ids=[]):
1✔
573
    progress_data = ProgressData.from_key(progress_key)
1✔
574

575
    states = TaxLotState.objects.filter(id__in=taxlot_state_ids)
1✔
576
    derived_columns = DerivedColumn.objects.filter(id__in=derived_column_ids)
1✔
577

578
    for state in states:
1✔
579
        for derived_column in derived_columns:
1✔
580
            state.derived_data[derived_column.name] = derived_column.evaluate(state)
×
581
        state.save()
1✔
582
        progress_data.step()
1✔
583

584

585
@shared_task
1✔
586
def _finish_update_state_derived_data(progress_key, derived_column_ids):
1✔
587
    derived_columns = DerivedColumn.objects.filter(id__in=derived_column_ids)
1✔
588
    Column.objects.filter(derived_column__in=derived_columns).update(is_updating=False)
1✔
589

590
    progress_data = ProgressData.from_key(progress_key)
1✔
591
    progress_data.finish_with_success("Updated Derived Data")
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc