diff --git a/core/__pycache__/admin.cpython-311.pyc b/core/__pycache__/admin.cpython-311.pyc index 7a8a9d7..718ca54 100644 Binary files a/core/__pycache__/admin.cpython-311.pyc and b/core/__pycache__/admin.cpython-311.pyc differ diff --git a/core/__pycache__/urls.cpython-311.pyc b/core/__pycache__/urls.cpython-311.pyc index a732775..839bd27 100644 Binary files a/core/__pycache__/urls.cpython-311.pyc and b/core/__pycache__/urls.cpython-311.pyc differ diff --git a/core/__pycache__/views.cpython-311.pyc b/core/__pycache__/views.cpython-311.pyc index ac9719a..f306c0f 100644 Binary files a/core/__pycache__/views.cpython-311.pyc and b/core/__pycache__/views.cpython-311.pyc differ diff --git a/core/admin.py b/core/admin.py index 19ac800..b8aedb6 100644 --- a/core/admin.py +++ b/core/admin.py @@ -308,14 +308,16 @@ class VoterAdmin(BaseImportAdminMixin, admin.ModelAdmin): batch_size = 500 support_choices = dict(Voter.SUPPORT_CHOICES) + support_reverse = {v.lower(): k for k, v in support_choices.items()} yard_sign_choices = dict(Voter.YARD_SIGN_CHOICES) + yard_sign_reverse = {v.lower(): k for k, v in yard_sign_choices.items()} window_sticker_choices = dict(Voter.WINDOW_STICKER_CHOICES) + window_sticker_reverse = {v.lower(): k for k, v in window_sticker_choices.items()} phone_type_choices = dict(Voter.PHONE_TYPE_CHOICES) phone_type_reverse = {v.lower(): k for k, v in phone_type_choices.items()} valid_fields = {f.name for f in Voter._meta.get_fields()} mapped_fields = {f for f in mapping.keys() if f in valid_fields} - fetch_fields = list(mapped_fields | {"voter_id", "address", "phone", "longitude", "latitude"}) # Ensure derived/special fields are in update_fields update_fields = list(mapped_fields | {"address", "phone", "longitude", "latitude"}) if "voter_id" in update_fields: update_fields.remove("voter_id") @@ -342,7 +344,7 @@ class VoterAdmin(BaseImportAdminMixin, admin.ModelAdmin): for chunk_index, chunk in enumerate(chunk_reader(reader, batch_size)): with transaction.atomic(): voter_ids = [str(row.get(v_id_col)).strip() for row in chunk if row.get(v_id_col)] - existing_voters = {v.voter_id: v for v in Voter.objects.filter(tenant=tenant, voter_id__in=voter_ids).only(*fetch_fields)} + existing_voters = {v.voter_id: v for v in Voter.objects.filter(tenant=tenant, voter_id__in=voter_ids)} to_create = [] to_update = [] @@ -397,14 +399,20 @@ class VoterAdmin(BaseImportAdminMixin, admin.ModelAdmin): # If parsing fails, keep original or skip? Let's skip updating this field. continue elif field_name == "candidate_support": - val = val.lower().replace(" ", "_") - if val not in support_choices: val = "unknown" + val_lower = val.lower() + if val_lower in support_choices: val = val_lower + elif val_lower in support_reverse: val = support_reverse[val_lower] + else: val = "unknown" elif field_name == "yard_sign": - val = val.lower().replace(" ", "_") - if val not in yard_sign_choices: val = "none" + val_lower = val.lower() + if val_lower in yard_sign_choices: val = val_lower + elif val_lower in yard_sign_reverse: val = yard_sign_reverse[val_lower] + else: val = "none" elif field_name == "window_sticker": - val = val.lower().replace(" ", "_") - if val not in window_sticker_choices: val = "none" + val_lower = val.lower() + if val_lower in window_sticker_choices: val = val_lower + elif val_lower in window_sticker_reverse: val = window_sticker_reverse[val_lower] + else: val = "none" elif field_name == "phone_type": val_lower = val.lower() if val_lower in phone_type_choices: @@ -518,6 +526,7 @@ class VoterAdmin(BaseImportAdminMixin, admin.ModelAdmin): context["title"] = "Import Voters" context["opts"] = self.model._meta return render(request, "admin/import_csv.html", context) +@admin.register(Event) class EventAdmin(BaseImportAdminMixin, admin.ModelAdmin): list_display = ('id', 'name', 'event_type', 'date', 'start_time', 'end_time', 'tenant') list_filter = ('tenant', 'date', 'event_type') @@ -1505,49 +1514,63 @@ class VoterLikelihoodAdmin(BaseImportAdminMixin, admin.ModelAdmin): file_path = request.POST.get('file_path') tenant_id = request.POST.get('tenant') tenant = Tenant.objects.get(id=tenant_id) - mapping = {} - for field_name, _ in VOTER_LIKELIHOOD_MAPPABLE_FIELDS: - mapping[field_name] = request.POST.get(f'map_{field_name}') + mapping = {k: request.POST.get(f"map_{k}") for k, _ in VOTER_LIKELIHOOD_MAPPABLE_FIELDS if request.POST.get(f"map_{k}")} + try: - with open(file_path, 'r', encoding='UTF-8') as f: + with open(file_path, 'r', encoding='utf-8-sig') as f: + # Fast count and partial preview + total_count = sum(1 for line in f) - 1 + f.seek(0) reader = csv.DictReader(f) - total_count = 0 - create_count = 0 - update_count = 0 - preview_data = [] - for row in reader: - total_count += 1 - voter_id = row.get(mapping.get('voter_id')) - election_type_name = row.get(mapping.get('election_type')) - exists = False - if voter_id and election_type_name: - exists = VoterLikelihood.objects.filter(voter__tenant=tenant, voter__voter_id=voter_id, election_type__name=election_type_name).exists() - - if exists: - update_count += 1 - action = 'update' + preview_rows = [] + voter_ids_for_preview = set() + election_types_for_preview = set() + + v_id_col = mapping.get('voter_id') + et_col = mapping.get('election_type') + + if not v_id_col or not et_col: + raise ValueError("Missing mapping for Voter ID or Election Type") + + for i, row in enumerate(reader): + if i < 10: + preview_rows.append(row) + v_id = row.get(v_id_col) + et_name = row.get(et_col) + if v_id: voter_ids_for_preview.add(str(v_id).strip()) + if et_name: election_types_for_preview.add(str(et_name).strip()) else: - create_count += 1 - action = 'create' - - if len(preview_data) < 10: - preview_data.append({ - 'action': action, - 'identifier': f"Voter: {voter_id}", - 'details': f"Election: {election_type_name}, Likelihood: {row.get(mapping.get('likelihood', '')) or ''}" - }) + break + + existing_likelihoods = set(VoterLikelihood.objects.filter( + voter__tenant=tenant, + voter__voter_id__in=voter_ids_for_preview, + election_type__name__in=election_types_for_preview + ).values_list("voter__voter_id", "election_type__name")) + + preview_data = [] + for row in preview_rows: + v_id = str(row.get(v_id_col, '')).strip() + et_name = str(row.get(et_col, '')).strip() + action = "update" if (v_id, et_name) in existing_likelihoods else "create" + preview_data.append({ + "action": action, + "identifier": f"Voter: {v_id}, Election: {et_name}", + "details": f"Likelihood: {row.get(mapping.get('likelihood', '')) or ''}" + }) + context = self.admin_site.each_context(request) context.update({ - 'title': "Import Preview", - 'total_count': total_count, - 'create_count': create_count, - 'update_count': update_count, - 'preview_data': preview_data, - 'mapping': mapping, - 'file_path': file_path, - 'tenant_id': tenant_id, - 'action_url': request.path, - 'opts': self.model._meta, + "title": "Import Preview", + "total_count": total_count, + "create_count": "N/A", + "update_count": "N/A", + "preview_data": preview_data, + "mapping": mapping, + "file_path": file_path, + "tenant_id": tenant_id, + "action_url": request.path, + "opts": self.model._meta, }) return render(request, "admin/import_preview.html", context) except Exception as e: @@ -1558,98 +1581,172 @@ class VoterLikelihoodAdmin(BaseImportAdminMixin, admin.ModelAdmin): file_path = request.POST.get('file_path') tenant_id = request.POST.get('tenant') tenant = Tenant.objects.get(id=tenant_id) - - mapping = {} - for field_name, _ in VOTER_LIKELIHOOD_MAPPABLE_FIELDS: - mapping[field_name] = request.POST.get(f'map_{field_name}') + mapping = {k: request.POST.get(f"map_{k}") for k, _ in VOTER_LIKELIHOOD_MAPPABLE_FIELDS if request.POST.get(f"map_{k}")} try: - with open(file_path, 'r', encoding='UTF-8') as f: - reader = csv.DictReader(f) - count = 0 - errors = 0 - failed_rows = [] - for row in reader: - try: - voter_id = row.get(mapping.get('voter_id')) if mapping.get('voter_id') else None - if not voter_id: - row["Import Error"] = "Missing voter ID" - failed_rows.append(row) - errors += 1 - continue - - try: - voter = Voter.objects.get(tenant=tenant, voter_id=voter_id) - except Voter.DoesNotExist: - row["Import Error"] = f"Voter {voter_id} not found" - failed_rows.append(row) - errors += 1 - continue - - election_type_name = row.get(mapping.get('election_type')) - likelihood_val = row.get(mapping.get('likelihood')) - - if not election_type_name or not likelihood_val: - row["Import Error"] = "Missing election type or likelihood value" - failed_rows.append(row) - errors += 1 - continue - - election_type, _ = ElectionType.objects.get_or_create( - tenant=tenant, - name=election_type_name - ) - - # Normalize likelihood - likelihood_choices = dict(VoterLikelihood.LIKELIHOOD_CHOICES) - normalized_likelihood = None - likelihood_val_lower = likelihood_val.lower().replace(' ', '_') - if likelihood_val_lower in likelihood_choices: - normalized_likelihood = likelihood_val_lower - else: - # Try to find by display name - for k, v in likelihood_choices.items(): - if v.lower() == likelihood_val.lower(): - normalized_likelihood = k - break - - if not normalized_likelihood: - row["Import Error"] = f"Invalid likelihood value: {likelihood_val}" - failed_rows.append(row) - errors += 1 - continue - - defaults = {} - if normalized_likelihood and normalized_likelihood.strip(): - defaults['likelihood'] = normalized_likelihood - - VoterLikelihood.objects.update_or_create( - voter=voter, - election_type=election_type, - defaults=defaults - ) - count += 1 - except Exception as e: - logger.error(f"Error importing: {e}") - row["Import Error"] = str(e) - failed_rows.append(row) - errors += 1 + count = 0 + created_count = 0 + updated_count = 0 + skipped_no_change = 0 + skipped_no_id = 0 + errors = 0 + failed_rows = [] + batch_size = 500 + likelihood_choices = dict(VoterLikelihood.LIKELIHOOD_CHOICES) + likelihood_reverse = {v.lower(): k for k, v in likelihood_choices.items()} + + # Pre-fetch election types for this tenant + election_types = {et.name: et for et in ElectionType.objects.filter(tenant=tenant)} + + def chunk_reader(reader, size): + chunk = [] + for row in reader: + chunk.append(row) + if len(chunk) == size: + yield chunk + chunk = [] + if chunk: + yield chunk + + with open(file_path, "r", encoding="utf-8-sig") as f: + reader = csv.DictReader(f) + v_id_col = mapping.get("voter_id") + et_col = mapping.get("election_type") + l_col = mapping.get("likelihood") + + if not v_id_col or not et_col or not l_col: + raise ValueError("Missing mapping for Voter ID, Election Type, or Likelihood") + + print(f"DEBUG: Starting likelihood import. Tenant: {tenant.name}") + + total_processed = 0 + for chunk in chunk_reader(reader, batch_size): + with transaction.atomic(): + voter_ids = [str(row.get(v_id_col)).strip() for row in chunk if row.get(v_id_col)] + et_names = [str(row.get(et_col)).strip() for row in chunk if row.get(et_col)] + + # Fetch existing voters + voters = {v.voter_id: v for v in Voter.objects.filter(tenant=tenant, voter_id__in=voter_ids).only("id", "voter_id")} + + # Fetch existing likelihoods + existing_likelihoods = { + (vl.voter.voter_id, vl.election_type.name): vl + for vl in VoterLikelihood.objects.filter( + voter__tenant=tenant, + voter__voter_id__in=voter_ids, + election_type__name__in=et_names + ).select_related("voter", "election_type") + } + + to_create = [] + to_update = [] + processed_in_batch = set() + + for row in chunk: + total_processed += 1 + try: + raw_v_id = row.get(v_id_col) + raw_et_name = row.get(et_col) + raw_l_val = row.get(l_col) + + if raw_v_id is None or raw_et_name is None or raw_l_val is None: + skipped_no_id += 1 + continue + + v_id = str(raw_v_id).strip() + et_name = str(raw_et_name).strip() + l_val = str(raw_l_val).strip() + + if not v_id or not et_name or not l_val: + skipped_no_id += 1 + continue + + if (v_id, et_name) in processed_in_batch: + continue + processed_in_batch.add((v_id, et_name)) + + voter = voters.get(v_id) + if not voter: + print(f"DEBUG: Voter {v_id} not found for likelihood import") + row["Import Error"] = f"Voter {v_id} not found" + failed_rows.append(row) + errors += 1 + continue + + # Get or create election type + if et_name not in election_types: + election_type, _ = ElectionType.objects.get_or_create(tenant=tenant, name=et_name) + election_types[et_name] = election_type + election_type = election_types[et_name] + + # Normalize likelihood + normalized_l = None + l_val_lower = l_val.lower().replace(' ', '_') + if l_val_lower in likelihood_choices: + normalized_l = l_val_lower + elif l_val_lower in likelihood_reverse: + normalized_l = likelihood_reverse[l_val_lower] + else: + # Try to find by display name more broadly + for k, v in likelihood_choices.items(): + if v.lower() == l_val.lower(): + normalized_l = k + break + + if not normalized_l: + row["Import Error"] = f"Invalid likelihood value: {l_val}" + failed_rows.append(row) + errors += 1 + continue + + vl = existing_likelihoods.get((v_id, et_name)) + created = False + if not vl: + vl = VoterLikelihood(voter=voter, election_type=election_type, likelihood=normalized_l) + created = True + + if not created and vl.likelihood == normalized_l: + skipped_no_change += 1 + continue + + vl.likelihood = normalized_l + + if created: + to_create.append(vl) + created_count += 1 + else: + to_update.append(vl) + updated_count += 1 + + count += 1 + except Exception as e: + print(f"DEBUG: Error importing row {total_processed}: {e}") + row["Import Error"] = str(e) + failed_rows.append(row) + errors += 1 + + if to_create: + VoterLikelihood.objects.bulk_create(to_create) + if to_update: + VoterLikelihood.objects.bulk_update(to_update, ["likelihood"], batch_size=250) + + print(f"DEBUG: Likelihood import progress: {total_processed} processed. {count} created/updated. {skipped_no_change} skipped (no change). {skipped_no_id} skipped (no ID). {errors} errors.") + if os.path.exists(file_path): os.remove(file_path) - self.message_user(request, f"Successfully imported {count} likelihoods.") + + success_msg = f"Import complete: {count} likelihoods created/updated. ({created_count} new, {updated_count} updated, {skipped_no_change} skipped with no changes, {skipped_no_id} skipped missing data, {errors} errors)" + self.message_user(success_msg) + request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows request.session.modified = True - logger.info(f"Stored {len(failed_rows)} failed rows in session for {self.model._meta.model_name}") if errors > 0: error_url = reverse("admin:voterlikelihood-download-errors") self.message_user(request, mark_safe(f"Failed to import {errors} rows. Download failed records"), level=messages.WARNING) return redirect("..") except Exception as e: - self.message_user(request, f"Error processing file: {e}", level=messages.ERROR) - return redirect("..") - except Exception as e: - print(f"DEBUG: Voter import failed: {e}") + print(f"DEBUG: Likelihood import failed: {e}") self.message_user(request, f"Error processing file: {e}", level=messages.ERROR) return redirect("..") else: @@ -1667,7 +1764,7 @@ class VoterLikelihoodAdmin(BaseImportAdminMixin, admin.ModelAdmin): tmp.write(chunk) file_path = tmp.name - with open(file_path, 'r', encoding='UTF-8') as f: + with open(file_path, 'r', encoding='utf-8-sig') as f: reader = csv.reader(f) headers = next(reader) @@ -1694,4 +1791,4 @@ class VoterLikelihoodAdmin(BaseImportAdminMixin, admin.ModelAdmin): @admin.register(CampaignSettings) class CampaignSettingsAdmin(admin.ModelAdmin): list_display = ('tenant', 'donation_goal') - list_filter = ('tenant',) \ No newline at end of file + list_filter = ('tenant',) diff --git a/core/templates/core/voter_detail.html b/core/templates/core/voter_detail.html index 2dc1bb3..3e97371 100644 --- a/core/templates/core/voter_detail.html +++ b/core/templates/core/voter_detail.html @@ -501,15 +501,45 @@ - + + + + - @@ -680,9 +715,14 @@ - @@ -770,9 +810,14 @@ - @@ -860,9 +905,14 @@ - diff --git a/core/urls.py b/core/urls.py index 0c1423e..0bcde88 100644 --- a/core/urls.py +++ b/core/urls.py @@ -9,6 +9,7 @@ urlpatterns = [ path('voters/export-csv/', views.export_voters_csv, name='export_voters_csv'), path('voters//', views.voter_detail, name='voter_detail'), path('voters//edit/', views.voter_edit, name='voter_edit'), + path('voters//delete/', views.voter_delete, name='voter_delete'), path('voters//geocode/', views.voter_geocode, name='voter_geocode'), path('voters//interaction/add/', views.add_interaction, name='add_interaction'), @@ -26,4 +27,4 @@ urlpatterns = [ path('voters//event-participation/add/', views.add_event_participation, name='add_event_participation'), path('event-participation//edit/', views.edit_event_participation, name='edit_event_participation'), path('event-participation//delete/', views.delete_event_participation, name='delete_event_participation'), -] \ No newline at end of file +] diff --git a/core/views.py b/core/views.py index b60b666..de09425 100644 --- a/core/views.py +++ b/core/views.py @@ -539,4 +539,18 @@ def export_voters_csv(request): voter.get_candidate_support_display(), voter.get_yard_sign_display(), voter.get_window_sticker_display(), voter.notes ]) - return response \ No newline at end of file + return response +def voter_delete(request, voter_id): + """ + Delete a voter profile. + """ + selected_tenant_id = request.session.get('tenant_id') + tenant = get_object_or_404(Tenant, id=selected_tenant_id) + voter = get_object_or_404(Voter, id=voter_id, tenant=tenant) + + if request.method == 'POST': + voter.delete() + messages.success(request, "Voter profile deleted successfully.") + return redirect('voter_list') + + return redirect('voter_detail', voter_id=voter.id) diff --git a/patch_voter_likelihood.py b/patch_voter_likelihood.py new file mode 100644 index 0000000..fa5af1e --- /dev/null +++ b/patch_voter_likelihood.py @@ -0,0 +1,330 @@ +import sys + +file_path = 'core/admin.py' +with open(file_path, 'r') as f: + lines = f.readlines() + +start_line = -1 +for i, line in enumerate(lines): + if 'def import_likelihoods(self, request):' in line and 'class VoterLikelihoodAdmin' in lines[i-1]: + start_line = i + break + # Also check if it's just after search_fields or something + if 'def import_likelihoods(self, request):' in line: + # Check if we are inside VoterLikelihoodAdmin + # We can look back for @admin.register(VoterLikelihood) + j = i + while j > 0: + if '@admin.register(VoterLikelihood)' in lines[j]: + start_line = i + break + if '@admin.register' in lines[j] and j < i - 50: # Too far back + break + j -= 1 + if start_line != -1: + break + +if start_line == -1: + print("Could not find import_likelihoods in VoterLikelihoodAdmin") + sys.exit(1) + +# Find the end of the method +# The method ends before @admin.register(CampaignSettings) +end_line = -1 +for i in range(start_line, len(lines)): + if '@admin.register(CampaignSettings)' in lines[i]: + end_line = i + break + +if end_line == -1: + print("Could not find end of import_likelihoods") + sys.exit(1) + +new_method = """ def import_likelihoods(self, request): + if request.method == "POST": + if "_preview" in request.POST: + file_path = request.POST.get('file_path') + tenant_id = request.POST.get('tenant') + tenant = Tenant.objects.get(id=tenant_id) + mapping = {k: request.POST.get(f"map_{k}") for k, _ in VOTER_LIKELIHOOD_MAPPABLE_FIELDS if request.POST.get(f"map_{k}")} + + try: + with open(file_path, 'r', encoding='utf-8-sig') as f: + # Fast count and partial preview + total_count = sum(1 for line in f) - 1 + f.seek(0) + reader = csv.DictReader(f) + preview_rows = [] + voter_ids_for_preview = set() + election_types_for_preview = set() + + v_id_col = mapping.get('voter_id') + et_col = mapping.get('election_type') + + if not v_id_col or not et_col: + raise ValueError("Missing mapping for Voter ID or Election Type") + + for i, row in enumerate(reader): + if i < 10: + preview_rows.append(row) + v_id = row.get(v_id_col) + et_name = row.get(et_col) + if v_id: voter_ids_for_preview.add(str(v_id).strip()) + if et_name: election_types_for_preview.add(str(et_name).strip()) + else: + break + + existing_likelihoods = set(VoterLikelihood.objects.filter( + voter__tenant=tenant, + voter__voter_id__in=voter_ids_for_preview, + election_type__name__in=election_types_for_preview + ).values_list("voter__voter_id", "election_type__name")) + + preview_data = [] + for row in preview_rows: + v_id = str(row.get(v_id_col, '')).strip() + et_name = str(row.get(et_col, '')).strip() + action = "update" if (v_id, et_name) in existing_likelihoods else "create" + preview_data.append({ + "action": action, + "identifier": f"Voter: {v_id}, Election: {et_name}", + "details": f"Likelihood: {row.get(mapping.get('likelihood', '')) or ''}" + }) + + context = self.admin_site.each_context(request) + context.update({ + "title": "Import Preview", + "total_count": total_count, + "create_count": "N/A", + "update_count": "N/A", + "preview_data": preview_data, + "mapping": mapping, + "file_path": file_path, + "tenant_id": tenant_id, + "action_url": request.path, + "opts": self.model._meta, + }) + return render(request, "admin/import_preview.html", context) + except Exception as e: + self.message_user(request, f"Error processing preview: {e}", level=messages.ERROR) + return redirect("..") + + elif "_import" in request.POST: + file_path = request.POST.get('file_path') + tenant_id = request.POST.get('tenant') + tenant = Tenant.objects.get(id=tenant_id) + mapping = {k: request.POST.get(f"map_{k}") for k, _ in VOTER_LIKELIHOOD_MAPPABLE_FIELDS if request.POST.get(f"map_{k}")} + + try: + count = 0 + created_count = 0 + updated_count = 0 + skipped_no_change = 0 + skipped_no_id = 0 + errors = 0 + failed_rows = [] + batch_size = 500 + + likelihood_choices = dict(VoterLikelihood.LIKELIHOOD_CHOICES) + likelihood_reverse = {v.lower(): k for k, v in likelihood_choices.items()} + + # Pre-fetch election types for this tenant + election_types = {et.name: et for et in ElectionType.objects.filter(tenant=tenant)} + + def chunk_reader(reader, size): + chunk = [] + for row in reader: + chunk.append(row) + if len(chunk) == size: + yield chunk + chunk = [] + if chunk: + yield chunk + + with open(file_path, "r", encoding="utf-8-sig") as f: + reader = csv.DictReader(f) + v_id_col = mapping.get("voter_id") + et_col = mapping.get("election_type") + l_col = mapping.get("likelihood") + + if not v_id_col or not et_col or not l_col: + raise ValueError("Missing mapping for Voter ID, Election Type, or Likelihood") + + print(f"DEBUG: Starting likelihood import. Tenant: {tenant.name}") + + total_processed = 0 + for chunk in chunk_reader(reader, batch_size): + with transaction.atomic(): + voter_ids = [str(row.get(v_id_col)).strip() for row in chunk if row.get(v_id_col)] + et_names = [str(row.get(et_col)).strip() for row in chunk if row.get(et_col)] + + # Fetch existing voters + voters = {v.voter_id: v for v in Voter.objects.filter(tenant=tenant, voter_id__in=voter_ids).only("id", "voter_id")} + + # Fetch existing likelihoods + existing_likelihoods = { + (vl.voter.voter_id, vl.election_type.name): vl + for vl in VoterLikelihood.objects.filter( + voter__tenant=tenant, + voter__voter_id__in=voter_ids, + election_type__name__in=et_names + ).select_related("voter", "election_type") + } + + to_create = [] + to_update = [] + processed_in_batch = set() + + for row in chunk: + total_processed += 1 + try: + raw_v_id = row.get(v_id_col) + raw_et_name = row.get(et_col) + raw_l_val = row.get(l_col) + + if raw_v_id is None or raw_et_name is None or raw_l_val is None: + skipped_no_id += 1 + continue + + v_id = str(raw_v_id).strip() + et_name = str(raw_et_name).strip() + l_val = str(raw_l_val).strip() + + if not v_id or not et_name or not l_val: + skipped_no_id += 1 + continue + + if (v_id, et_name) in processed_in_batch: + continue + processed_in_batch.add((v_id, et_name)) + + voter = voters.get(v_id) + if not voter: + print(f"DEBUG: Voter {v_id} not found for likelihood import") + row["Import Error"] = f"Voter {v_id} not found" + failed_rows.append(row) + errors += 1 + continue + + # Get or create election type + if et_name not in election_types: + election_type, _ = ElectionType.objects.get_or_create(tenant=tenant, name=et_name) + election_types[et_name] = election_type + election_type = election_types[et_name] + + # Normalize likelihood + normalized_l = None + l_val_lower = l_val.lower().replace(' ', '_') + if l_val_lower in likelihood_choices: + normalized_l = l_val_lower + elif l_val_lower in likelihood_reverse: + normalized_l = likelihood_reverse[l_val_lower] + else: + # Try to find by display name more broadly + for k, v in likelihood_choices.items(): + if v.lower() == l_val.lower(): + normalized_l = k + break + + if not normalized_l: + row["Import Error"] = f"Invalid likelihood value: {l_val}" + failed_rows.append(row) + errors += 1 + continue + + vl = existing_likelihoods.get((v_id, et_name)) + created = False + if not vl: + vl = VoterLikelihood(voter=voter, election_type=election_type, likelihood=normalized_l) + created = True + + if not created and vl.likelihood == normalized_l: + skipped_no_change += 1 + continue + + vl.likelihood = normalized_l + + if created: + to_create.append(vl) + created_count += 1 + else: + to_update.append(vl) + updated_count += 1 + + count += 1 + except Exception as e: + print(f"DEBUG: Error importing row {total_processed}: {e}") + row["Import Error"] = str(e) + failed_rows.append(row) + errors += 1 + + if to_create: + VoterLikelihood.objects.bulk_create(to_create) + if to_update: + VoterLikelihood.objects.bulk_update(to_update, ["likelihood"], batch_size=250) + + print(f"DEBUG: Likelihood import progress: {total_processed} processed. {count} created/updated. {skipped_no_change} skipped (no change). {skipped_no_id} skipped (no ID). {errors} errors.") + + if os.path.exists(file_path): + os.remove(file_path) + + success_msg = f"Import complete: {count} likelihoods created/updated. ({created_count} new, {updated_count} updated, {skipped_no_change} skipped with no changes, {skipped_no_id} skipped missing data, {errors} errors)" + self.message_user(request, success_msg) + + request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows + request.session.modified = True + if errors > 0: + error_url = reverse("admin:voterlikelihood-download-errors") + self.message_user(request, mark_safe(f"Failed to import {errors} rows. Download failed records"), level=messages.WARNING) + return redirect("..") + except Exception as e: + print(f"DEBUG: Likelihood import failed: {e}") + self.message_user(request, f"Error processing file: {e}", level=messages.ERROR) + return redirect("..") + else: + form = VoterLikelihoodImportForm(request.POST, request.FILES) + if form.is_valid(): + csv_file = request.FILES['file'] + tenant = form.cleaned_data['tenant'] + + if not csv_file.name.endswith('.csv'): + self.message_user(request, "Please upload a CSV file.", level=messages.ERROR) + return redirect("..") + + with tempfile.NamedTemporaryFile(delete=False, suffix='.csv') as tmp: + for chunk in csv_file.chunks(): + tmp.write(chunk) + file_path = tmp.name + + with open(file_path, 'r', encoding='utf-8-sig') as f: + reader = csv.reader(f) + headers = next(reader) + + context = self.admin_site.each_context(request) + context.update({ + 'title': "Map Likelihood Fields", + 'headers': headers, + 'model_fields': VOTER_LIKELIHOOD_MAPPABLE_FIELDS, + 'tenant_id': tenant.id, + 'file_path': file_path, + 'action_url': request.path, + 'opts': self.model._meta, + }) + return render(request, "admin/import_mapping.html", context) + else: + form = VoterLikelihoodImportForm() + + context = self.admin_site.each_context(request) + context['form'] = form + context['title'] = "Import Likelihoods" + context['opts'] = self.model._meta + return render(request, "admin/import_csv.html", context) + +""" + +lines[start_line:end_line] = [new_method] + +with open(file_path, 'w') as f: + f.writelines(lines) + +print(f"Successfully patched {file_path}") diff --git a/test_phone_type.py b/test_phone_type.py new file mode 100644 index 0000000..9c71bfc --- /dev/null +++ b/test_phone_type.py @@ -0,0 +1,18 @@ +import os +import django + +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings') +django.setup() + +from core.models import Voter +from core.forms import VoterForm + +voter = Voter.objects.first() +if voter: + voter.phone_type = 'home' + voter.save() + form = VoterForm(instance=voter) + print(f"Voter ID: {voter.id}, Phone Type: {voter.phone_type}") + print(form['phone_type'].as_widget()) +else: + print("No voters found.") diff --git a/test_phone_type_choices.py b/test_phone_type_choices.py new file mode 100644 index 0000000..fbe1fcf --- /dev/null +++ b/test_phone_type_choices.py @@ -0,0 +1,11 @@ +import os +import django + +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings') +django.setup() + +from core.models import Voter +from core.forms import VoterForm + +form = VoterForm() +print(f"Choices: {form.fields['phone_type'].choices}")