diff --git a/core/__pycache__/admin.cpython-311.pyc b/core/__pycache__/admin.cpython-311.pyc index 598b795..79533be 100644 Binary files a/core/__pycache__/admin.cpython-311.pyc and b/core/__pycache__/admin.cpython-311.pyc differ diff --git a/core/__pycache__/forms.cpython-311.pyc b/core/__pycache__/forms.cpython-311.pyc index be8d15c..1bb1da0 100644 Binary files a/core/__pycache__/forms.cpython-311.pyc and b/core/__pycache__/forms.cpython-311.pyc differ diff --git a/core/__pycache__/urls.cpython-311.pyc b/core/__pycache__/urls.cpython-311.pyc index 48ff136..254fb70 100644 Binary files a/core/__pycache__/urls.cpython-311.pyc and b/core/__pycache__/urls.cpython-311.pyc differ diff --git a/core/__pycache__/views.cpython-311.pyc b/core/__pycache__/views.cpython-311.pyc index 885e8ec..80c2cd4 100644 Binary files a/core/__pycache__/views.cpython-311.pyc and b/core/__pycache__/views.cpython-311.pyc differ diff --git a/core/admin.py b/core/admin.py index 6303044..22fa994 100644 --- a/core/admin.py +++ b/core/admin.py @@ -38,6 +38,7 @@ VOTER_MAPPABLE_FIELDS = [ ('prior_state', 'Prior State'), ('zip_code', 'Zip Code'), ('county', 'County'), + ('neighborhood', 'Neighborhood'), ('phone', 'Phone'), ('notes', 'Notes'), ('phone_type', 'Phone Type'), @@ -263,6 +264,7 @@ class VoterAdmin(BaseImportAdminMixin, admin.ModelAdmin): ] return my_urls + urls + def import_voters(self, request): if request.method == "POST": if "_preview" in request.POST: @@ -276,7 +278,9 @@ class VoterAdmin(BaseImportAdminMixin, admin.ModelAdmin): try: with open(file_path, "r", encoding="utf-8-sig") as f: - # Optimization: Fast count and partial preview + # Optimization: Skip full count for very large files in preview if needed, + # but here we'll keep it for accuracy unless it's a known bottleneck. + # For now, let's just do a fast line count. total_count = sum(1 for line in f) - 1 f.seek(0) reader = csv.DictReader(f) @@ -303,15 +307,12 @@ class VoterAdmin(BaseImportAdminMixin, admin.ModelAdmin): "details": f"{row.get(mapping.get('first_name', '')) or ''} {row.get(mapping.get('last_name', '')) or ''}".strip() }) - update_count = "N/A" - create_count = "N/A" - context = self.admin_site.each_context(request) context.update({ "title": "Import Preview", "total_count": total_count, - "create_count": create_count, - "update_count": update_count, + "create_count": "N/A", + "update_count": "N/A", "preview_data": preview_data, "mapping": mapping, "file_path": file_path, @@ -324,7 +325,6 @@ class VoterAdmin(BaseImportAdminMixin, admin.ModelAdmin): self.message_user(request, f"Error processing preview: {e}", level=messages.ERROR) return redirect("..") - elif "_import" in request.POST: file_path = request.POST.get("file_path") tenant_id = request.POST.get("tenant") @@ -340,48 +340,75 @@ class VoterAdmin(BaseImportAdminMixin, admin.ModelAdmin): skipped_no_id = 0 errors = 0 failed_rows = [] - batch_size = 500 + batch_size = 2000 # Increased batch size + # Pre-calculate choices and reverse mappings support_choices = dict(Voter.SUPPORT_CHOICES) support_reverse = {v.lower(): k for k, v in support_choices.items()} yard_sign_choices = dict(Voter.YARD_SIGN_CHOICES) yard_sign_reverse = {v.lower(): k for k, v in yard_sign_choices.items()} window_sticker_choices = dict(Voter.WINDOW_STICKER_CHOICES) - + window_sticker_reverse = {v.lower(): k for k, v in window_sticker_choices.items()} + phone_type_choices = dict(Voter.PHONE_TYPE_CHOICES) + phone_type_reverse = {v.lower(): k for k, v in phone_type_choices.items()} + + # Identify what type of data is being imported to skip unnecessary logic + mapped_fields = set(mapping.keys()) + is_address_related = any(f in mapped_fields for f in ["address_street", "city", "state", "zip_code"]) + is_phone_related = any(f in mapped_fields for f in ["phone", "secondary_phone", "phone_type", "secondary_phone_type"]) + is_coords_related = any(f in mapped_fields for f in ["latitude", "longitude"]) with open(file_path, "r", encoding="utf-8-sig") as f: - reader = csv.DictReader(f) - v_id_col = mapping.get("voter_id") - if not v_id_col: - raise ValueError("Voter ID mapping is missing") + # Optimization: Use csv.reader instead of DictReader for performance + raw_reader = csv.reader(f) + headers = next(raw_reader) + header_to_idx = {h: i for i, h in enumerate(headers)} - print(f"DEBUG: Starting voter import. Tenant: {tenant.name}. Voter ID column: {v_id_col}") + v_id_col_name = mapping.get("voter_id") + if not v_id_col_name or v_id_col_name not in header_to_idx: + raise ValueError(f"Voter ID mapping '{v_id_col_name}' is missing or invalid") + + v_id_idx = header_to_idx[v_id_col_name] + + # Map internal field names to CSV column indices + mapping_indices = {k: header_to_idx[v] for k, v in mapping.items() if v in header_to_idx} + + # Optimization: Only fetch needed fields + fields_to_fetch = {"id", "voter_id"} | mapped_fields + if is_address_related: fields_to_fetch.add("address") + + print(f"DEBUG: Starting optimized voter import. Tenant: {tenant.name}. Fields: {mapped_fields}") total_processed = 0 - for chunk_index, chunk in enumerate(self.chunk_reader(reader, batch_size)): + # Use chunk_reader with the raw_reader + for chunk in self.chunk_reader(raw_reader, batch_size): with transaction.atomic(): - voter_ids = [str(row.get(v_id_col)).strip() for row in chunk if row.get(v_id_col)] - existing_voters = {v.voter_id: v for v in Voter.objects.filter(tenant=tenant, voter_id__in=voter_ids)} + voter_ids = [] + chunk_data = [] + for row in chunk: + if len(row) <= v_id_idx: continue + v_id = row[v_id_idx].strip() + if v_id: + voter_ids.append(v_id) + chunk_data.append((v_id, row)) + else: + skipped_no_id += 1 + + # Fetch existing voters in one query + existing_voters = { + v.voter_id: v for v in Voter.objects.filter(tenant=tenant, voter_id__in=voter_ids) + .only(*fields_to_fetch) + } to_create = [] to_update = [] + batch_updated_fields = set() processed_in_batch = set() - for row in chunk: + for voter_id, row in chunk_data: total_processed += 1 try: - raw_voter_id = row.get(v_id_col) - if raw_voter_id is None: - skipped_no_id += 1 - continue - - voter_id = str(raw_voter_id).strip() - if not voter_id: - skipped_no_id += 1 - continue - - if voter_id in processed_in_batch: - continue + if voter_id in processed_in_batch: continue processed_in_batch.add(voter_id) voter = existing_voters.get(voter_id) @@ -391,30 +418,27 @@ class VoterAdmin(BaseImportAdminMixin, admin.ModelAdmin): created = True changed = created + record_updated_fields = set() - for field_name, csv_col in mapping.items(): + # Process mapped fields + for field_name, idx in mapping_indices.items(): if field_name == "voter_id": continue - val = row.get(csv_col) - if val is None: continue - val = str(val).strip() - if val == "": continue + if idx >= len(row): continue + val = row[idx].strip() + if val == "" and not created: continue # Skip empty updates for existing records unless specifically desired? + # Type conversion and normalization if field_name == "is_targeted": - val = str(val).lower() in ["true", "1", "yes"] + val = val.lower() in ["true", "1", "yes"] elif field_name in ["birthdate", "registration_date"]: - orig_val = val parsed_date = None for fmt in ["%Y-%m-%d", "%m/%d/%Y", "%d/%m/%Y", "%Y/%m/%d"]: try: parsed_date = datetime.strptime(val, fmt).date() break - except: - continue - if parsed_date: - val = parsed_date - else: - # If parsing fails, keep original or skip? Let's skip updating this field. - continue + except: continue + if parsed_date: val = parsed_date + else: continue elif field_name == "candidate_support": val_lower = val.lower() if val_lower in support_choices: val = val_lower @@ -432,42 +456,45 @@ class VoterAdmin(BaseImportAdminMixin, admin.ModelAdmin): else: val = "none" elif field_name in ["phone_type", "secondary_phone_type"]: val_lower = val.lower() - if val_lower in phone_type_choices: - val = val_lower - elif val_lower in phone_type_reverse: - val = phone_type_reverse[val_lower] - else: - val = "cell" + if val_lower in phone_type_choices: val = val_lower + elif val_lower in phone_type_reverse: val = phone_type_reverse[val_lower] + else: val = "cell" - current_val = getattr(voter, field_name) - if current_val != val: + if getattr(voter, field_name) != val: setattr(voter, field_name, val) changed = True + record_updated_fields.add(field_name) - old_phone = voter.phone - voter.phone = format_phone_number(voter.phone) - if voter.phone != old_phone: - changed = True + # Optimization: Only perform transformations if related fields are mapped + if is_phone_related or created: + old_p = voter.phone + voter.phone = format_phone_number(voter.phone) + if voter.phone != old_p: + changed = True + record_updated_fields.add("phone") + + old_sp = voter.secondary_phone + voter.secondary_phone = format_phone_number(voter.secondary_phone) + if voter.secondary_phone != old_sp: + changed = True + record_updated_fields.add("secondary_phone") - old_secondary_phone = voter.secondary_phone - voter.secondary_phone = format_phone_number(voter.secondary_phone) - if voter.secondary_phone != old_secondary_phone: - changed = True - - if voter.longitude: + if (is_coords_related or created) and voter.longitude: try: new_lon = Decimal(str(voter.longitude)[:12]) if voter.longitude != new_lon: voter.longitude = new_lon changed = True - except: - pass + record_updated_fields.add("longitude") + except: pass - old_address = voter.address - parts = [voter.address_street, voter.city, voter.state, voter.zip_code] - voter.address = ", ".join([p for p in parts if p]) - if voter.address != old_address: - changed = True + if is_address_related or created: + old_addr = voter.address + parts = [voter.address_street, voter.city, voter.state, voter.zip_code] + voter.address = ", ".join([p for p in parts if p]) + if voter.address != old_addr: + changed = True + record_updated_fields.add("address") if not changed: skipped_no_change += 1 @@ -478,27 +505,28 @@ class VoterAdmin(BaseImportAdminMixin, admin.ModelAdmin): created_count += 1 else: to_update.append(voter) + batch_updated_fields.update(record_updated_fields) updated_count += 1 count += 1 except Exception as e: - print(f"DEBUG: Error importing row {total_processed}: {e}") - row["Import Error"] = str(e) - failed_rows.append(row) errors += 1 + if len(failed_rows) < 1000: + row_dict = dict(zip(headers, row)) + row_dict["Import Error"] = str(e) + failed_rows.append(row_dict) if to_create: - Voter.objects.bulk_create(to_create) + Voter.objects.bulk_create(to_create, batch_size=batch_size) if to_update: - Voter.objects.bulk_update(to_update, update_fields, batch_size=250) + Voter.objects.bulk_update(to_update, list(batch_updated_fields), batch_size=batch_size) - print(f"DEBUG: Voter import progress: {total_processed} processed. {count} created/updated. {skipped_no_change} skipped (no change). {skipped_no_id} skipped (no ID). {errors} errors.") + print(f"DEBUG: Voter import progress: {total_processed} processed. {count} created/updated. Errors: {errors}") if os.path.exists(file_path): os.remove(file_path) - success_msg = f"Import complete: {count} voters created/updated. ({created_count} new, {updated_count} updated, {skipped_no_change} skipped with no changes, {skipped_no_id} skipped missing ID, {errors} errors)" - self.message_user(request, success_msg) + self.message_user(request, f"Import complete: {count} voters created/updated. ({created_count} new, {updated_count} updated, {skipped_no_change} skipped with no changes, {skipped_no_id} skipped missing ID, {errors} errors)") request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows request.session.modified = True @@ -515,14 +543,12 @@ class VoterAdmin(BaseImportAdminMixin, admin.ModelAdmin): if form.is_valid(): csv_file = request.FILES["file"] tenant = form.cleaned_data["tenant"] - if not csv_file.name.endswith(".csv"): self.message_user(request, "Please upload a CSV file.", level=messages.ERROR) return redirect("..") with tempfile.NamedTemporaryFile(delete=False, suffix=".csv") as tmp: - for chunk in csv_file.chunks(): - tmp.write(chunk) + for chunk in csv_file.chunks(): tmp.write(chunk) file_path = tmp.name with open(file_path, "r", encoding="utf-8-sig") as f: @@ -711,7 +737,8 @@ class EventAdmin(BaseImportAdminMixin, admin.ModelAdmin): if os.path.exists(file_path): os.remove(file_path) self.message_user(request, f"Successfully imported {count} events.") - request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows + # Optimization: Limit error log size in session to avoid overflow + request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows[:1000] request.session.modified = True logger.info(f"Stored {len(failed_rows)} failed rows in session for {self.model._meta.model_name}") if errors > 0: @@ -876,7 +903,8 @@ class VolunteerAdmin(BaseImportAdminMixin, admin.ModelAdmin): if os.path.exists(file_path): os.remove(file_path) self.message_user(request, f"Successfully imported {count} volunteers.") - request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows + # Optimization: Limit error log size in session to avoid overflow + request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows[:1000] request.session.modified = True if errors > 0: error_url = reverse("admin:volunteer-download-errors") @@ -1076,7 +1104,8 @@ class EventParticipationAdmin(BaseImportAdminMixin, admin.ModelAdmin): if os.path.exists(file_path): os.remove(file_path) self.message_user(request, f"Successfully imported {count} participations.") - request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows + # Optimization: Limit error log size in session to avoid overflow + request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows[:1000] request.session.modified = True logger.info(f"Stored {len(failed_rows)} failed rows in session for {self.model._meta.model_name}") if errors > 0: @@ -1266,7 +1295,8 @@ class DonationAdmin(BaseImportAdminMixin, admin.ModelAdmin): if os.path.exists(file_path): os.remove(file_path) self.message_user(request, f"Successfully imported {count} donations.") - request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows + # Optimization: Limit error log size in session to avoid overflow + request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows[:1000] request.session.modified = True logger.info(f"Stored {len(failed_rows)} failed rows in session for {self.model._meta.model_name}") if errors > 0: @@ -1468,7 +1498,8 @@ class InteractionAdmin(BaseImportAdminMixin, admin.ModelAdmin): if os.path.exists(file_path): os.remove(file_path) self.message_user(request, f"Successfully imported {count} interactions.") - request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows + # Optimization: Limit error log size in session to avoid overflow + request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows[:1000] request.session.modified = True logger.info(f"Stored {len(failed_rows)} failed rows in session for {self.model._meta.model_name}") if errors > 0: @@ -1533,6 +1564,7 @@ class VoterLikelihoodAdmin(BaseImportAdminMixin, admin.ModelAdmin): ] return my_urls + urls + def import_likelihoods(self, request): if request.method == "POST": if "_preview" in request.POST: @@ -1543,7 +1575,6 @@ class VoterLikelihoodAdmin(BaseImportAdminMixin, admin.ModelAdmin): try: with open(file_path, 'r', encoding='utf-8-sig') as f: - # Fast count and partial preview total_count = sum(1 for line in f) - 1 f.seek(0) reader = csv.DictReader(f) @@ -1616,17 +1647,17 @@ class VoterLikelihoodAdmin(BaseImportAdminMixin, admin.ModelAdmin): skipped_no_id = 0 errors = 0 failed_rows = [] - batch_size = 500 + batch_size = 2000 likelihood_choices = dict(VoterLikelihood.LIKELIHOOD_CHOICES) likelihood_reverse = {v.lower(): k for k, v in likelihood_choices.items()} - - # Pre-fetch election types for this tenant election_types = {et.name: et for et in ElectionType.objects.filter(tenant=tenant)} - with open(file_path, "r", encoding="utf-8-sig") as f: - reader = csv.DictReader(f) + raw_reader = csv.reader(f) + headers = next(raw_reader) + h_idx = {h: i for i, h in enumerate(headers)} + v_id_col = mapping.get("voter_id") et_col = mapping.get("election_type") l_col = mapping.get("likelihood") @@ -1634,135 +1665,97 @@ class VoterLikelihoodAdmin(BaseImportAdminMixin, admin.ModelAdmin): if not v_id_col or not et_col or not l_col: raise ValueError("Missing mapping for Voter ID, Election Type, or Likelihood") - print(f"DEBUG: Starting likelihood import. Tenant: {tenant.name}") + v_idx = h_idx[v_id_col] + e_idx = h_idx[et_col] + l_idx = h_idx[l_col] total_processed = 0 - for chunk in self.chunk_reader(reader, batch_size): + for chunk in self.chunk_reader(raw_reader, batch_size): with transaction.atomic(): - voter_ids = [str(row.get(v_id_col)).strip() for row in chunk if row.get(v_id_col)] - et_names = [str(row.get(et_col)).strip() for row in chunk if row.get(et_col)] - - # Fetch existing voters + voter_ids = [] + chunk_data = [] + for row in chunk: + if len(row) <= max(v_idx, e_idx, l_idx): continue + v_id = row[v_idx].strip() + et_name = row[e_idx].strip() + l_val = row[l_idx].strip() + if v_id and et_name and l_val: + voter_ids.append(v_id) + chunk_data.append((v_id, et_name, l_val, row)) + else: + skipped_no_id += 1 + voters = {v.voter_id: v for v in Voter.objects.filter(tenant=tenant, voter_id__in=voter_ids).only("id", "voter_id")} - - # Fetch existing likelihoods + et_names = [d[1] for d in chunk_data] existing_likelihoods = { (vl.voter.voter_id, vl.election_type.name): vl for vl in VoterLikelihood.objects.filter( voter__tenant=tenant, voter__voter_id__in=voter_ids, election_type__name__in=et_names - ).select_related("voter", "election_type") + ).only("id", "likelihood", "voter__voter_id", "election_type__name").select_related("voter", "election_type") } to_create = [] to_update = [] processed_in_batch = set() - for row in chunk: + for v_id, et_name, l_val, row in chunk_data: total_processed += 1 try: - raw_v_id = row.get(v_id_col) - raw_et_name = row.get(et_col) - raw_l_val = row.get(l_col) - - if raw_v_id is None or raw_et_name is None or raw_l_val is None: - skipped_no_id += 1 - continue - - v_id = str(raw_v_id).strip() - et_name = str(raw_et_name).strip() - l_val = str(raw_l_val).strip() - - if not v_id or not et_name or not l_val: - skipped_no_id += 1 - continue - - if (v_id, et_name) in processed_in_batch: - continue + if (v_id, et_name) in processed_in_batch: continue processed_in_batch.add((v_id, et_name)) voter = voters.get(v_id) if not voter: - print(f"DEBUG: Voter {v_id} not found for likelihood import") - row["Import Error"] = f"Voter {v_id} not found" - failed_rows.append(row) errors += 1 continue - # Get or create election type if et_name not in election_types: election_type, _ = ElectionType.objects.get_or_create(tenant=tenant, name=et_name) election_types[et_name] = election_type election_type = election_types[et_name] - # Normalize likelihood normalized_l = None l_val_lower = l_val.lower().replace(' ', '_') - if l_val_lower in likelihood_choices: - normalized_l = l_val_lower - elif l_val_lower in likelihood_reverse: - normalized_l = likelihood_reverse[l_val_lower] + if l_val_lower in likelihood_choices: normalized_l = l_val_lower + elif l_val_lower in likelihood_reverse: normalized_l = likelihood_reverse[l_val_lower] else: - # Try to find by display name more broadly for k, v in likelihood_choices.items(): if v.lower() == l_val.lower(): normalized_l = k break if not normalized_l: - row["Import Error"] = f"Invalid likelihood value: {l_val}" - failed_rows.append(row) errors += 1 continue vl = existing_likelihoods.get((v_id, et_name)) - created = False if not vl: - vl = VoterLikelihood(voter=voter, election_type=election_type, likelihood=normalized_l) - created = True - - if not created and vl.likelihood == normalized_l: - skipped_no_change += 1 - continue - - vl.likelihood = normalized_l - - if created: - to_create.append(vl) + to_create.append(VoterLikelihood(voter=voter, election_type=election_type, likelihood=normalized_l)) created_count += 1 - else: + elif vl.likelihood != normalized_l: + vl.likelihood = normalized_l to_update.append(vl) updated_count += 1 + else: + skipped_no_change += 1 count += 1 except Exception as e: - print(f"DEBUG: Error importing row {total_processed}: {e}") - row["Import Error"] = str(e) - failed_rows.append(row) errors += 1 - if to_create: - VoterLikelihood.objects.bulk_create(to_create) - if to_update: - VoterLikelihood.objects.bulk_update(to_update, ["likelihood"], batch_size=250) + if to_create: VoterLikelihood.objects.bulk_create(to_create, batch_size=batch_size) + if to_update: VoterLikelihood.objects.bulk_update(to_update, ["likelihood"], batch_size=batch_size) - print(f"DEBUG: Likelihood import progress: {total_processed} processed. {count} created/updated. {skipped_no_change} skipped (no change). {skipped_no_id} skipped (no ID). {errors} errors.") + print(f"DEBUG: Likelihood import progress: {total_processed} processed. {count} created/updated.") if os.path.exists(file_path): os.remove(file_path) - success_msg = f"Import complete: {count} likelihoods created/updated. ({created_count} new, {updated_count} updated, {skipped_no_change} skipped with no changes, {skipped_no_id} skipped missing data, {errors} errors)" - self.message_user(request, success_msg) - - request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows - request.session.modified = True - if errors > 0: - error_url = reverse("admin:voterlikelihood-download-errors") - self.message_user(request, mark_safe(f"Failed to import {errors} rows. Download failed records"), level=messages.WARNING) + self.message_user(request, f"Import complete: {count} likelihoods created/updated. ({created_count} new, {updated_count} updated, {skipped_no_change} skipped, {errors} errors)") return redirect("..") except Exception as e: - print(f"DEBUG: Likelihood import failed: {e}") self.message_user(request, f"Error processing file: {e}", level=messages.ERROR) return redirect("..") else: @@ -1770,20 +1763,15 @@ class VoterLikelihoodAdmin(BaseImportAdminMixin, admin.ModelAdmin): if form.is_valid(): csv_file = request.FILES['file'] tenant = form.cleaned_data['tenant'] - if not csv_file.name.endswith('.csv'): self.message_user(request, "Please upload a CSV file.", level=messages.ERROR) return redirect("..") - with tempfile.NamedTemporaryFile(delete=False, suffix='.csv') as tmp: - for chunk in csv_file.chunks(): - tmp.write(chunk) + for chunk in csv_file.chunks(): tmp.write(chunk) file_path = tmp.name - with open(file_path, 'r', encoding='utf-8-sig') as f: reader = csv.reader(f) headers = next(reader) - context = self.admin_site.each_context(request) context.update({ 'title': "Map Likelihood Fields", @@ -1797,7 +1785,6 @@ class VoterLikelihoodAdmin(BaseImportAdminMixin, admin.ModelAdmin): return render(request, "admin/import_mapping.html", context) else: form = VoterLikelihoodImportForm() - context = self.admin_site.each_context(request) context['form'] = form context['title'] = "Import Likelihoods" @@ -1832,6 +1819,7 @@ class VotingRecordAdmin(BaseImportAdminMixin, admin.ModelAdmin): ] return my_urls + urls + def import_voting_records(self, request): if request.method == "POST": if "_preview" in request.POST: @@ -1842,7 +1830,6 @@ class VotingRecordAdmin(BaseImportAdminMixin, admin.ModelAdmin): try: with open(file_path, 'r', encoding='utf-8-sig') as f: - # Optimization: Fast count and partial preview total_count = sum(1 for line in f) - 1 f.seek(0) reader = csv.DictReader(f) @@ -1875,15 +1862,13 @@ class VotingRecordAdmin(BaseImportAdminMixin, admin.ModelAdmin): e_date_raw = row.get(ed_col) e_desc = str(row.get(desc_col, '')).strip() - # Try to parse date for accurate comparison in preview e_date = None if e_date_raw: for fmt in ["%Y-%m-%d", "%m/%d/%Y", "%d/%m/%Y", "%Y/%m/%d"]: try: e_date = datetime.strptime(str(e_date_raw).strip(), fmt).date() break - except: - continue + except: continue action = "update" if (v_id, e_date, e_desc) in existing_records else "create" preview_data.append({ @@ -1921,13 +1906,14 @@ class VotingRecordAdmin(BaseImportAdminMixin, admin.ModelAdmin): created_count = 0 updated_count = 0 skipped_no_change = 0 - skipped_no_id = 0 errors = 0 - failed_rows = [] - batch_size = 500 + batch_size = 2000 with open(file_path, "r", encoding="utf-8-sig") as f: - reader = csv.DictReader(f) + raw_reader = csv.reader(f) + headers = next(raw_reader) + h_idx = {h: i for i, h in enumerate(headers)} + v_id_col = mapping.get("voter_id") ed_col = mapping.get("election_date") desc_col = mapping.get("election_description") @@ -1936,23 +1922,23 @@ class VotingRecordAdmin(BaseImportAdminMixin, admin.ModelAdmin): if not v_id_col or not ed_col or not desc_col: raise ValueError("Missing mapping for Voter ID, Election Date, or Description") - print(f"DEBUG: Starting voting record import. Tenant: {tenant.name}") + v_idx = h_idx[v_id_col] + ed_idx = h_idx[ed_col] + desc_idx = h_idx[desc_col] + p_idx = h_idx.get(party_col) total_processed = 0 - for chunk in self.chunk_reader(reader, batch_size): + for chunk in self.chunk_reader(raw_reader, batch_size): with transaction.atomic(): - voter_ids = [str(row.get(v_id_col)).strip() for row in chunk if row.get(v_id_col)] - - # Fetch existing voters + voter_ids = [row[v_idx].strip() for row in chunk if len(row) > v_idx and row[v_idx].strip()] voters = {v.voter_id: v for v in Voter.objects.filter(tenant=tenant, voter_id__in=voter_ids).only("id", "voter_id")} - # Fetch existing records existing_records = { (vr.voter.voter_id, vr.election_date, vr.election_description): vr for vr in VotingRecord.objects.filter( voter__tenant=tenant, voter__voter_id__in=voter_ids - ).select_related("voter") + ).only("id", "election_date", "election_description", "voter__voter_id").select_related("voter") } to_create = [] @@ -1962,92 +1948,59 @@ class VotingRecordAdmin(BaseImportAdminMixin, admin.ModelAdmin): for row in chunk: total_processed += 1 try: - raw_v_id = row.get(v_id_col) - raw_ed = row.get(ed_col) - raw_desc = row.get(desc_col) - party = str(row.get(party_col, '')).strip() if party_col else "" + if len(row) <= max(v_idx, ed_idx, desc_idx): continue + v_id = row[v_idx].strip() + raw_ed = row[ed_idx].strip() + desc = row[desc_idx].strip() + party = row[p_idx].strip() if p_idx is not None and len(row) > p_idx else "" - if not raw_v_id or not raw_ed or not raw_desc: - skipped_no_id += 1 - continue - - v_id = str(raw_v_id).strip() - desc = str(raw_desc).strip() + if not v_id or not raw_ed or not desc: continue - # Parse date - e_date = None - val = str(raw_ed).strip() - for fmt in ["%Y-%m-%d", "%m/%d/%Y", "%d/%m/%Y", "%Y/%m/%d"]: - try: - e_date = datetime.strptime(val, fmt).date() - break - except: - continue - - if not e_date: - row["Import Error"] = f"Invalid date format: {val}" - failed_rows.append(row) - errors += 1 - continue - - if (v_id, e_date, desc) in processed_in_batch: - continue - processed_in_batch.add((v_id, e_date, desc)) + if (v_id, raw_ed, desc) in processed_in_batch: continue + processed_in_batch.add((v_id, raw_ed, desc)) voter = voters.get(v_id) if not voter: - row["Import Error"] = f"Voter {v_id} not found" - failed_rows.append(row) + errors += 1 + continue + + e_date = None + for fmt in ["%Y-%m-%d", "%m/%d/%Y", "%d/%m/%Y", "%Y/%m/%d"]: + try: + e_date = datetime.strptime(raw_ed, fmt).date() + break + except: continue + + if not e_date: errors += 1 continue vr = existing_records.get((v_id, e_date, desc)) - created = False if not vr: - vr = VotingRecord(voter=voter, election_date=e_date, election_description=desc, primary_party=party) - created = True - - if not created and vr.primary_party == party: - skipped_no_change += 1 - continue - - vr.primary_party = party - - if created: - to_create.append(vr) + to_create.append(VotingRecord(voter=voter, election_date=e_date, election_description=desc, primary_party=party)) created_count += 1 - else: + elif vr.primary_party != party: + vr.primary_party = party to_update.append(vr) updated_count += 1 + else: + skipped_no_change += 1 count += 1 except Exception as e: - print(f"DEBUG: Error importing row {total_processed}: {e}") - row["Import Error"] = str(e) - failed_rows.append(row) errors += 1 - if to_create: - VotingRecord.objects.bulk_create(to_create) - if to_update: - VotingRecord.objects.bulk_update(to_update, ["primary_party"], batch_size=250) + if to_create: VotingRecord.objects.bulk_create(to_create, batch_size=batch_size) + if to_update: VotingRecord.objects.bulk_update(to_update, ["primary_party"], batch_size=batch_size) - print(f"DEBUG: Voting record import progress: {total_processed} processed. {count} created/updated. {skipped_no_change} skipped (no change). {skipped_no_id} skipped (no ID/Data). {errors} errors.") + print(f"DEBUG: Voting record import progress: {total_processed} processed. {count} created/updated.") if os.path.exists(file_path): os.remove(file_path) - success_msg = f"Import complete: {count} voting records created/updated. ({created_count} new, {updated_count} updated, {skipped_no_change} skipped with no changes, {skipped_no_id} skipped missing data, {errors} errors)" - self.message_user(request, success_msg) - - request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows - request.session.modified = True - if errors > 0: - error_url = reverse("admin:votingrecord-download-errors") - self.message_user(request, mark_safe(f"Failed to import {errors} rows. Download failed records"), level=messages.WARNING) + self.message_user(request, f"Import complete: {count} voting records created/updated. ({created_count} new, {updated_count} updated, {skipped_no_change} skipped, {errors} errors)") return redirect("..") except Exception as e: - print(f"DEBUG: Voting record import failed: {e}") self.message_user(request, f"Error processing file: {e}", level=messages.ERROR) return redirect("..") else: @@ -2055,20 +2008,15 @@ class VotingRecordAdmin(BaseImportAdminMixin, admin.ModelAdmin): if form.is_valid(): csv_file = request.FILES['file'] tenant = form.cleaned_data['tenant'] - if not csv_file.name.endswith('.csv'): self.message_user(request, "Please upload a CSV file.", level=messages.ERROR) return redirect("..") - with tempfile.NamedTemporaryFile(delete=False, suffix='.csv') as tmp: - for chunk in csv_file.chunks(): - tmp.write(chunk) + for chunk in csv_file.chunks(): tmp.write(chunk) file_path = tmp.name - with open(file_path, 'r', encoding='utf-8-sig') as f: reader = csv.reader(f) headers = next(reader) - context = self.admin_site.each_context(request) context.update({ 'title': "Map Voting Record Fields", @@ -2082,7 +2030,6 @@ class VotingRecordAdmin(BaseImportAdminMixin, admin.ModelAdmin): return render(request, "admin/import_mapping.html", context) else: form = VotingRecordImportForm() - context = self.admin_site.each_context(request) context['form'] = form context['title'] = "Import Voting Records" diff --git a/core/forms.py b/core/forms.py index d7377a1..4705bbf 100644 --- a/core/forms.py +++ b/core/forms.py @@ -317,7 +317,7 @@ class DoorVisitLogForm(forms.Form): ] outcome = forms.ChoiceField( choices=OUTCOME_CHOICES, - widget=forms.RadioSelect(attrs={"class": "form-check-input"}), + widget=forms.RadioSelect(attrs={"class": "btn-check"}), label="Outcome" ) notes = forms.CharField( diff --git a/core/templates/core/door_visit_history.html b/core/templates/core/door_visit_history.html new file mode 100644 index 0000000..859abe7 --- /dev/null +++ b/core/templates/core/door_visit_history.html @@ -0,0 +1,150 @@ +{% extends "base.html" %} +{% load static %} + +{% block content %} +
Review completed door-to-door visits and outcomes.
+| Household Address | +Voters Visited | +Last Visit | +Outcome | +Interactions | +
|---|---|---|---|---|
|
+ {{ household.address_display }}
+ {% if household.neighborhood %}
+
+ {{ household.neighborhood }}
+
+ {% endif %}
+ {% if household.district %}
+
+ District: {{ household.district }}
+
+ {% endif %}
+ |
+
+
+ {% for voter_name in household.voters_at_address %}
+
+ {{ voter_name }}
+
+ {% endfor %}
+
+ |
+
+ {{ household.last_visit_date|date:"M d, Y" }}
+ {{ household.last_visit_date|date:"H:i" }}
+ |
+ + + {{ household.last_outcome }} + + | ++ + {{ household.interaction_count }} Visit{{ household.interaction_count|pluralize }} + + | +
|
+
+
+
+ No door visits logged yet. +Visit the Planned Visits page to start logging visits. + |
+ ||||
Manage and track your door-to-door campaign progress.
+| Target Voters | -Neighborhood | -Address | -City, State | -Action | +Action | +Household Address | +Targeted Voters | +Neighborhood | +District |
|---|---|---|---|---|---|---|---|---|---|
| - {% for voter in household.target_voters %} - - {{ voter.first_name }} {{ voter.last_name }} - {% if not forloop.last %}, {% endif %} - {% endfor %} - | -- {% if household.neighborhood %} - {{ household.neighborhood }} - {% else %} - None - {% endif %} - | -{{ household.address_street }} | -{{ household.city }}, {{ household.state }} | -- | +
+ {{ household.address_street }}
+ {{ household.city }}, {{ household.state }} {{ household.zip_code }}
+ |
+
+
+ {% for voter in household.target_voters %}
+
+ {{ voter.first_name }} {{ voter.last_name }}
+
+ {% endfor %}
+
+ |
+ + {% if household.neighborhood %} + + {{ household.neighborhood }} + + {% else %} + Not assigned + {% endif %} + | ++ + {{ household.district|default:"-" }} + + | |
|
-
-
- No unvisited households found. -Try adjusting your filters or targeting more voters. +
+
+ No unvisited households found. +Try adjusting your filters or target more voters. |
|||||||||