diff --git a/config/__pycache__/settings.cpython-311.pyc b/config/__pycache__/settings.cpython-311.pyc index de0615d..45dee05 100644 Binary files a/config/__pycache__/settings.cpython-311.pyc and b/config/__pycache__/settings.cpython-311.pyc differ diff --git a/config/__pycache__/urls.cpython-311.pyc b/config/__pycache__/urls.cpython-311.pyc index c5a37af..66debe8 100644 Binary files a/config/__pycache__/urls.cpython-311.pyc and b/config/__pycache__/urls.cpython-311.pyc differ diff --git a/config/csrf_settings.tmp b/config/csrf_settings.tmp new file mode 100644 index 0000000..504f8f2 --- /dev/null +++ b/config/csrf_settings.tmp @@ -0,0 +1,13 @@ +CSRF_TRUSTED_ORIGINS = [ + "https://grassrootscrm.flatlogic.app", +] +CSRF_TRUSTED_ORIGINS += [ + origin for origin in [ + os.getenv("HOST_FQDN", ""), + os.getenv("CSRF_TRUSTED_ORIGIN", "") + ] if origin +] +CSRF_TRUSTED_ORIGINS = [ + f"https://{host}" if not host.startswith(("http://", "https://")) else host + for host in CSRF_TRUSTED_ORIGINS +] diff --git a/config/settings.py b/config/settings.py index e1e7409..f85f89d 100644 --- a/config/settings.py +++ b/config/settings.py @@ -23,10 +23,14 @@ DEBUG = os.getenv("DJANGO_DEBUG", "true").lower() == "true" ALLOWED_HOSTS = [ "127.0.0.1", "localhost", + "grassrootscrm.flatlogic.app", os.getenv("HOST_FQDN", ""), ] CSRF_TRUSTED_ORIGINS = [ + "https://grassrootscrm.flatlogic.app", +] +CSRF_TRUSTED_ORIGINS += [ origin for origin in [ os.getenv("HOST_FQDN", ""), os.getenv("CSRF_TRUSTED_ORIGIN", "") @@ -64,6 +68,7 @@ MIDDLEWARE = [ 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', + 'core.middleware.LoginRequiredMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', # Disable X-Frame-Options middleware to allow Flatlogic preview iframes. # 'django.middleware.clickjacking.XFrameOptionsMiddleware', @@ -181,3 +186,6 @@ if EMAIL_USE_SSL: DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField' GOOGLE_MAPS_API_KEY = os.getenv("GOOGLE_MAPS_API_KEY", "AIzaSyAluZTEjH-RSiGJUHnfrSqWbcAXCGzGOq4") +LOGIN_URL = 'login' +LOGIN_REDIRECT_URL = 'index' +LOGOUT_REDIRECT_URL = 'login' diff --git a/config/urls.py b/config/urls.py index bcfc074..270c09c 100644 --- a/config/urls.py +++ b/config/urls.py @@ -22,6 +22,7 @@ from django.conf.urls.static import static urlpatterns = [ path("admin/", admin.site.urls), path("", include("core.urls")), + path("accounts/", include("django.contrib.auth.urls")), ] if settings.DEBUG: diff --git a/core/__pycache__/admin.cpython-311.pyc b/core/__pycache__/admin.cpython-311.pyc index 4bd0518..9a8a512 100644 Binary files a/core/__pycache__/admin.cpython-311.pyc and b/core/__pycache__/admin.cpython-311.pyc differ diff --git a/core/__pycache__/forms.cpython-311.pyc b/core/__pycache__/forms.cpython-311.pyc index b895f06..86cf633 100644 Binary files a/core/__pycache__/forms.cpython-311.pyc and b/core/__pycache__/forms.cpython-311.pyc differ diff --git a/core/__pycache__/middleware.cpython-311.pyc b/core/__pycache__/middleware.cpython-311.pyc new file mode 100644 index 0000000..9875fd6 Binary files /dev/null and b/core/__pycache__/middleware.cpython-311.pyc differ diff --git a/core/admin.py b/core/admin.py index 78b901e..8466f7e 100644 --- a/core/admin.py +++ b/core/admin.py @@ -21,7 +21,7 @@ from .models import ( from .forms import ( VoterImportForm, EventImportForm, EventParticipationImportForm, DonationImportForm, InteractionImportForm, VoterLikelihoodImportForm, - VolunteerImportForm + VolunteerImportForm, VotingRecordImportForm ) logger = logging.getLogger(__name__) @@ -108,6 +108,13 @@ VOTER_LIKELIHOOD_MAPPABLE_FIELDS = [ ('likelihood', 'Likelihood'), ] +VOTING_RECORD_MAPPABLE_FIELDS = [ + ('voter_id', 'Voter ID'), + ('election_date', 'Election Date'), + ('election_description', 'Election Description'), + ('primary_party', 'Primary Party'), +] + class BaseImportAdminMixin: def download_errors(self, request): logger.info(f"download_errors called for {self.model._meta.model_name}") @@ -131,6 +138,15 @@ class BaseImportAdminMixin: return response + def chunk_reader(self, reader, size): + chunk = [] + for row in reader: + chunk.append(row) + if len(chunk) == size: + yield chunk + chunk = [] + if chunk: + yield chunk class TenantUserRoleInline(admin.TabularInline): model = TenantUserRole extra = 1 @@ -323,25 +339,7 @@ class VoterAdmin(BaseImportAdminMixin, admin.ModelAdmin): yard_sign_choices = dict(Voter.YARD_SIGN_CHOICES) yard_sign_reverse = {v.lower(): k for k, v in yard_sign_choices.items()} window_sticker_choices = dict(Voter.WINDOW_STICKER_CHOICES) - window_sticker_reverse = {v.lower(): k for k, v in window_sticker_choices.items()} - phone_type_choices = dict(Voter.PHONE_TYPE_CHOICES) - phone_type_reverse = {v.lower(): k for k, v in phone_type_choices.items()} - valid_fields = {f.name for f in Voter._meta.get_fields()} - mapped_fields = {f for f in mapping.keys() if f in valid_fields} - # Ensure derived/special fields are in update_fields - update_fields = list(mapped_fields | {"address", "phone", "secondary_phone", "secondary_phone_type", "longitude", "latitude"}) - if "voter_id" in update_fields: update_fields.remove("voter_id") - - def chunk_reader(reader, size): - chunk = [] - for row in reader: - chunk.append(row) - if len(chunk) == size: - yield chunk - chunk = [] - if chunk: - yield chunk with open(file_path, "r", encoding="utf-8-sig") as f: reader = csv.DictReader(f) @@ -352,7 +350,7 @@ class VoterAdmin(BaseImportAdminMixin, admin.ModelAdmin): print(f"DEBUG: Starting voter import. Tenant: {tenant.name}. Voter ID column: {v_id_col}") total_processed = 0 - for chunk_index, chunk in enumerate(chunk_reader(reader, batch_size)): + for chunk_index, chunk in enumerate(self.chunk_reader(reader, batch_size)): with transaction.atomic(): voter_ids = [str(row.get(v_id_col)).strip() for row in chunk if row.get(v_id_col)] existing_voters = {v.voter_id: v for v in Voter.objects.filter(tenant=tenant, voter_id__in=voter_ids)} @@ -917,11 +915,13 @@ class VolunteerAdmin(BaseImportAdminMixin, admin.ModelAdmin): class VolunteerEventAdmin(admin.ModelAdmin): list_display = ('volunteer', 'event', 'role') list_filter = ('event__tenant', 'event', 'role') + autocomplete_fields = ["volunteer", "event"] @admin.register(EventParticipation) class EventParticipationAdmin(BaseImportAdminMixin, admin.ModelAdmin): list_display = ('voter', 'event', 'participation_status') list_filter = ('event__tenant', 'event', 'participation_status') + autocomplete_fields = ["voter", "event"] change_list_template = "admin/eventparticipation_change_list.html" def get_urls(self): @@ -1122,6 +1122,7 @@ class DonationAdmin(BaseImportAdminMixin, admin.ModelAdmin): list_display = ('id', 'voter', 'date', 'amount', 'method') list_filter = ('voter__tenant', 'date', 'method') search_fields = ('voter__first_name', 'voter__last_name', 'voter__voter_id') + autocomplete_fields = ["voter"] change_list_template = "admin/donation_change_list.html" def get_urls(self): @@ -1311,6 +1312,7 @@ class InteractionAdmin(BaseImportAdminMixin, admin.ModelAdmin): list_display = ('id', 'voter', 'volunteer', 'type', 'date', 'description') list_filter = ('voter__tenant', 'type', 'date', 'volunteer') search_fields = ('voter__first_name', 'voter__last_name', 'voter__voter_id', 'description', 'volunteer__first_name', 'volunteer__last_name') + autocomplete_fields = ["voter", "volunteer"] change_list_template = "admin/interaction_change_list.html" def get_urls(self): @@ -1512,6 +1514,7 @@ class VoterLikelihoodAdmin(BaseImportAdminMixin, admin.ModelAdmin): list_display = ('id', 'voter', 'election_type', 'likelihood') list_filter = ('voter__tenant', 'election_type', 'likelihood') search_fields = ('voter__first_name', 'voter__last_name', 'voter__voter_id') + autocomplete_fields = ["voter"] change_list_template = "admin/voterlikelihood_change_list.html" def get_urls(self): @@ -1613,15 +1616,6 @@ class VoterLikelihoodAdmin(BaseImportAdminMixin, admin.ModelAdmin): # Pre-fetch election types for this tenant election_types = {et.name: et for et in ElectionType.objects.filter(tenant=tenant)} - def chunk_reader(reader, size): - chunk = [] - for row in reader: - chunk.append(row) - if len(chunk) == size: - yield chunk - chunk = [] - if chunk: - yield chunk with open(file_path, "r", encoding="utf-8-sig") as f: reader = csv.DictReader(f) @@ -1635,7 +1629,7 @@ class VoterLikelihoodAdmin(BaseImportAdminMixin, admin.ModelAdmin): print(f"DEBUG: Starting likelihood import. Tenant: {tenant.name}") total_processed = 0 - for chunk in chunk_reader(reader, batch_size): + for chunk in self.chunk_reader(reader, batch_size): with transaction.atomic(): voter_ids = [str(row.get(v_id_col)).strip() for row in chunk if row.get(v_id_col)] et_names = [str(row.get(et_col)).strip() for row in chunk if row.get(et_col)] @@ -1751,7 +1745,7 @@ class VoterLikelihoodAdmin(BaseImportAdminMixin, admin.ModelAdmin): os.remove(file_path) success_msg = f"Import complete: {count} likelihoods created/updated. ({created_count} new, {updated_count} updated, {skipped_no_change} skipped with no changes, {skipped_no_id} skipped missing data, {errors} errors)" - self.message_user(success_msg) + self.message_user(request, success_msg) request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows request.session.modified = True @@ -1806,4 +1800,283 @@ class VoterLikelihoodAdmin(BaseImportAdminMixin, admin.ModelAdmin): class CampaignSettingsAdmin(admin.ModelAdmin): list_display = ('tenant', 'donation_goal', 'twilio_from_number') list_filter = ('tenant',) - fields = ('tenant', 'donation_goal', 'twilio_account_sid', 'twilio_auth_token', 'twilio_from_number') \ No newline at end of file + fields = ('tenant', 'donation_goal', 'twilio_account_sid', 'twilio_auth_token', 'twilio_from_number') + +@admin.register(VotingRecord) +class VotingRecordAdmin(BaseImportAdminMixin, admin.ModelAdmin): + list_display = ('voter', 'election_date', 'election_description', 'primary_party') + list_filter = ('voter__tenant', 'election_date', 'primary_party') + search_fields = ('voter__first_name', 'voter__last_name', 'voter__voter_id', 'election_description') + autocomplete_fields = ["voter"] + change_list_template = "admin/votingrecord_change_list.html" + + def changelist_view(self, request, extra_context=None): + extra_context = extra_context or {} + from core.models import Tenant + extra_context["tenants"] = Tenant.objects.all() + return super().changelist_view(request, extra_context=extra_context) + + def get_urls(self): + urls = super().get_urls() + my_urls = [ + path('download-errors/', self.admin_site.admin_view(self.download_errors), name='votingrecord-download-errors'), + path('import-voting-records/', self.admin_site.admin_view(self.import_voting_records), name='import-voting-records'), + ] + return my_urls + urls + + def import_voting_records(self, request): + if request.method == "POST": + if "_preview" in request.POST: + file_path = request.POST.get('file_path') + tenant_id = request.POST.get('tenant') + tenant = Tenant.objects.get(id=tenant_id) + mapping = {k: request.POST.get(f"map_{k}") for k, _ in VOTING_RECORD_MAPPABLE_FIELDS if request.POST.get(f"map_{k}")} + + try: + with open(file_path, 'r', encoding='utf-8-sig') as f: + # Optimization: Fast count and partial preview + total_count = sum(1 for line in f) - 1 + f.seek(0) + reader = csv.DictReader(f) + preview_rows = [] + voter_ids_for_preview = set() + + v_id_col = mapping.get('voter_id') + ed_col = mapping.get('election_date') + desc_col = mapping.get('election_description') + + if not v_id_col or not ed_col or not desc_col: + raise ValueError("Missing mapping for Voter ID, Election Date, or Description") + + for i, row in enumerate(reader): + if i < 10: + preview_rows.append(row) + v_id = row.get(v_id_col) + if v_id: voter_ids_for_preview.add(str(v_id).strip()) + else: + break + + existing_records = set(VotingRecord.objects.filter( + voter__tenant=tenant, + voter__voter_id__in=voter_ids_for_preview + ).values_list("voter__voter_id", "election_date", "election_description")) + + preview_data = [] + for row in preview_rows: + v_id = str(row.get(v_id_col, '')).strip() + e_date_raw = row.get(ed_col) + e_desc = str(row.get(desc_col, '')).strip() + + # Try to parse date for accurate comparison in preview + e_date = None + if e_date_raw: + for fmt in ["%Y-%m-%d", "%m/%d/%Y", "%d/%m/%Y", "%Y/%m/%d"]: + try: + e_date = datetime.strptime(str(e_date_raw).strip(), fmt).date() + break + except: + continue + + action = "update" if (v_id, e_date, e_desc) in existing_records else "create" + preview_data.append({ + "action": action, + "identifier": f"Voter: {v_id}, Election: {e_desc}", + "details": f"Date: {e_date or e_date_raw}" + }) + + context = self.admin_site.each_context(request) + context.update({ + "title": "Import Preview", + "total_count": total_count, + "create_count": "N/A", + "update_count": "N/A", + "preview_data": preview_data, + "mapping": mapping, + "file_path": file_path, + "tenant_id": tenant_id, + "action_url": request.path, + "opts": self.model._meta, + }) + return render(request, "admin/import_preview.html", context) + except Exception as e: + self.message_user(request, f"Error processing preview: {e}", level=messages.ERROR) + return redirect("..") + + elif "_import" in request.POST: + file_path = request.POST.get('file_path') + tenant_id = request.POST.get('tenant') + tenant = Tenant.objects.get(id=tenant_id) + mapping = {k: request.POST.get(f"map_{k}") for k, _ in VOTING_RECORD_MAPPABLE_FIELDS if request.POST.get(f"map_{k}")} + + try: + count = 0 + created_count = 0 + updated_count = 0 + skipped_no_change = 0 + skipped_no_id = 0 + errors = 0 + failed_rows = [] + batch_size = 500 + + with open(file_path, "r", encoding="utf-8-sig") as f: + reader = csv.DictReader(f) + v_id_col = mapping.get("voter_id") + ed_col = mapping.get("election_date") + desc_col = mapping.get("election_description") + party_col = mapping.get("primary_party") + + if not v_id_col or not ed_col or not desc_col: + raise ValueError("Missing mapping for Voter ID, Election Date, or Description") + + print(f"DEBUG: Starting voting record import. Tenant: {tenant.name}") + + total_processed = 0 + for chunk in self.chunk_reader(reader, batch_size): + with transaction.atomic(): + voter_ids = [str(row.get(v_id_col)).strip() for row in chunk if row.get(v_id_col)] + + # Fetch existing voters + voters = {v.voter_id: v for v in Voter.objects.filter(tenant=tenant, voter_id__in=voter_ids).only("id", "voter_id")} + + # Fetch existing records + existing_records = { + (vr.voter.voter_id, vr.election_date, vr.election_description): vr + for vr in VotingRecord.objects.filter( + voter__tenant=tenant, + voter__voter_id__in=voter_ids + ).select_related("voter") + } + + to_create = [] + to_update = [] + processed_in_batch = set() + + for row in chunk: + total_processed += 1 + try: + raw_v_id = row.get(v_id_col) + raw_ed = row.get(ed_col) + raw_desc = row.get(desc_col) + party = str(row.get(party_col, '')).strip() if party_col else "" + + if not raw_v_id or not raw_ed or not raw_desc: + skipped_no_id += 1 + continue + + v_id = str(raw_v_id).strip() + desc = str(raw_desc).strip() + + # Parse date + e_date = None + val = str(raw_ed).strip() + for fmt in ["%Y-%m-%d", "%m/%d/%Y", "%d/%m/%Y", "%Y/%m/%d"]: + try: + e_date = datetime.strptime(val, fmt).date() + break + except: + continue + + if not e_date: + row["Import Error"] = f"Invalid date format: {val}" + failed_rows.append(row) + errors += 1 + continue + + if (v_id, e_date, desc) in processed_in_batch: + continue + processed_in_batch.add((v_id, e_date, desc)) + + voter = voters.get(v_id) + if not voter: + row["Import Error"] = f"Voter {v_id} not found" + failed_rows.append(row) + errors += 1 + continue + + vr = existing_records.get((v_id, e_date, desc)) + created = False + if not vr: + vr = VotingRecord(voter=voter, election_date=e_date, election_description=desc, primary_party=party) + created = True + + if not created and vr.primary_party == party: + skipped_no_change += 1 + continue + + vr.primary_party = party + + if created: + to_create.append(vr) + created_count += 1 + else: + to_update.append(vr) + updated_count += 1 + + count += 1 + except Exception as e: + print(f"DEBUG: Error importing row {total_processed}: {e}") + row["Import Error"] = str(e) + failed_rows.append(row) + errors += 1 + + if to_create: + VotingRecord.objects.bulk_create(to_create) + if to_update: + VotingRecord.objects.bulk_update(to_update, ["primary_party"], batch_size=250) + + print(f"DEBUG: Voting record import progress: {total_processed} processed. {count} created/updated. {skipped_no_change} skipped (no change). {skipped_no_id} skipped (no ID/Data). {errors} errors.") + + if os.path.exists(file_path): + os.remove(file_path) + + success_msg = f"Import complete: {count} voting records created/updated. ({created_count} new, {updated_count} updated, {skipped_no_change} skipped with no changes, {skipped_no_id} skipped missing data, {errors} errors)" + self.message_user(request, success_msg) + + request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows + request.session.modified = True + if errors > 0: + error_url = reverse("admin:votingrecord-download-errors") + self.message_user(request, mark_safe(f"Failed to import {errors} rows. Download failed records"), level=messages.WARNING) + return redirect("..") + except Exception as e: + print(f"DEBUG: Voting record import failed: {e}") + self.message_user(request, f"Error processing file: {e}", level=messages.ERROR) + return redirect("..") + else: + form = VotingRecordImportForm(request.POST, request.FILES) + if form.is_valid(): + csv_file = request.FILES['file'] + tenant = form.cleaned_data['tenant'] + + if not csv_file.name.endswith('.csv'): + self.message_user(request, "Please upload a CSV file.", level=messages.ERROR) + return redirect("..") + + with tempfile.NamedTemporaryFile(delete=False, suffix='.csv') as tmp: + for chunk in csv_file.chunks(): + tmp.write(chunk) + file_path = tmp.name + + with open(file_path, 'r', encoding='utf-8-sig') as f: + reader = csv.reader(f) + headers = next(reader) + + context = self.admin_site.each_context(request) + context.update({ + 'title': "Map Voting Record Fields", + 'headers': headers, + 'model_fields': VOTING_RECORD_MAPPABLE_FIELDS, + 'tenant_id': tenant.id, + 'file_path': file_path, + 'action_url': request.path, + 'opts': self.model._meta, + }) + return render(request, "admin/import_mapping.html", context) + else: + form = VotingRecordImportForm() + + context = self.admin_site.each_context(request) + context['form'] = form + context['title'] = "Import Voting Records" + context['opts'] = self.model._meta + return render(request, "admin/import_csv.html", context) diff --git a/core/admin.py.bak b/core/admin.py.bak new file mode 100644 index 0000000..883003b --- /dev/null +++ b/core/admin.py.bak @@ -0,0 +1,1816 @@ +from decimal import Decimal +from datetime import datetime, date +from django.db import transaction +from django.http import HttpResponse +from django.utils.safestring import mark_safe +import csv +import io +import logging +import tempfile +import os +from django.contrib import admin, messages +from django.urls import path, reverse +from django.shortcuts import render, redirect +from django.template.response import TemplateResponse +from .models import ( + format_phone_number, + Tenant, TenantUserRole, InteractionType, DonationMethod, ElectionType, EventType, Voter, + VotingRecord, Event, EventParticipation, Donation, Interaction, VoterLikelihood, CampaignSettings, + Interest, Volunteer, VolunteerEvent, ParticipationStatus +) +from .forms import ( + VoterImportForm, EventImportForm, EventParticipationImportForm, + DonationImportForm, InteractionImportForm, VoterLikelihoodImportForm, + VolunteerImportForm, VotingRecordImportForm +) + +logger = logging.getLogger(__name__) + +VOTER_MAPPABLE_FIELDS = [ + ('voter_id', 'Voter ID'), + ('first_name', 'First Name'), + ('last_name', 'Last Name'), + ('nickname', 'Nickname'), + ('birthdate', 'Birthdate'), + ('address_street', 'Street Address'), + ('city', 'City'), + ('state', 'State'), + ('prior_state', 'Prior State'), + ('zip_code', 'Zip Code'), + ('county', 'County'), + ('phone', 'Phone'), + ('notes', 'Notes'), + ('phone_type', 'Phone Type'), + ('email', 'Email'), + ('district', 'District'), + ('precinct', 'Precinct'), + ('registration_date', 'Registration Date'), + ('is_targeted', 'Is Targeted'), + ('candidate_support', 'Candidate Support'), + ('yard_sign', 'Yard Sign'), + ('window_sticker', 'Window Sticker'), + ('latitude', 'Latitude'), + ('longitude', 'Longitude'), + ('secondary_phone', 'Secondary Phone'), + ('secondary_phone_type', 'Secondary Phone Type'), +] + +EVENT_MAPPABLE_FIELDS = [ + ('name', 'Name'), + ('date', 'Date'), + ('start_time', 'Start Time'), + ('end_time', 'End Time'), + ('event_type', 'Event Type (Name)'), + ('description', 'Description'), + ('location_name', 'Location Name'), + ('address', 'Address'), + ('city', 'City'), + ('state', 'State'), + ('zip_code', 'Zip Code'), + ('latitude', 'Latitude'), + ('longitude', 'Longitude'), +] + +EVENT_PARTICIPATION_MAPPABLE_FIELDS = [ + ('voter_id', 'Voter ID'), + ('event_name', 'Event Name'), + ('participation_status', 'Participation Status'), +] + +DONATION_MAPPABLE_FIELDS = [ + ('voter_id', 'Voter ID'), + ('date', 'Date'), + ('amount', 'Amount'), + ('method', 'Donation Method (Name)'), +] + +INTERACTION_MAPPABLE_FIELDS = [ + ('voter_id', 'Voter ID'), + ('volunteer_email', 'Volunteer Email'), + ('date', 'Date'), + ('type', 'Interaction Type (Name)'), + ('description', 'Description'), + ('notes', 'Notes'), +] + + +VOLUNTEER_MAPPABLE_FIELDS = [ + ('first_name', 'First Name'), + ('last_name', 'Last Name'), + ('email', 'Email'), + ('phone', 'Phone'), + ('notes', 'Notes'), +] + +VOTER_LIKELIHOOD_MAPPABLE_FIELDS = [ + ('voter_id', 'Voter ID'), + ('election_type', 'Election Type (Name)'), + ('likelihood', 'Likelihood'), +] + +VOTING_RECORD_MAPPABLE_FIELDS = [ + ('voter_id', 'Voter ID'), + ('election_date', 'Election Date'), + ('election_description', 'Election Description'), + ('primary_party', 'Primary Party'), +] + +class BaseImportAdminMixin: + def download_errors(self, request): + logger.info(f"download_errors called for {self.model._meta.model_name}") + session_key = f"{self.model._meta.model_name}_import_errors" + failed_rows = request.session.get(session_key, []) + if not failed_rows: + self.message_user(request, "No error log found in session.", level=messages.WARNING) + return redirect("..") + + response = HttpResponse(content_type="text/csv") + response["Content-Disposition"] = f"attachment; filename={self.model._meta.model_name}_import_errors.csv" + + if failed_rows: + all_keys = set() + for r in failed_rows: + all_keys.update(r.keys()) + + writer = csv.DictWriter(response, fieldnames=sorted(list(all_keys))) + writer.writeheader() + writer.writerows(failed_rows) + + return response + +class TenantUserRoleInline(admin.TabularInline): + model = TenantUserRole + extra = 1 + +class CampaignSettingsInline(admin.StackedInline): + model = CampaignSettings + can_delete = False + +@admin.register(Tenant) +class TenantAdmin(admin.ModelAdmin): + list_display = ('name', 'created_at') + search_fields = ('name',) + inlines = [TenantUserRoleInline, CampaignSettingsInline] + +@admin.register(TenantUserRole) +class TenantUserRoleAdmin(admin.ModelAdmin): + list_display = ('user', 'tenant', 'role') + list_filter = ('tenant', 'role') + search_fields = ('user__username', 'tenant__name') + +@admin.register(InteractionType) +class InteractionTypeAdmin(admin.ModelAdmin): + list_display = ('name', 'tenant', 'is_active') + list_filter = ('tenant', 'is_active') + search_fields = ('name',) + +@admin.register(DonationMethod) +class DonationMethodAdmin(admin.ModelAdmin): + list_display = ('name', 'tenant', 'is_active') + list_filter = ('tenant', 'is_active') + search_fields = ('name',) + +@admin.register(ElectionType) +class ElectionTypeAdmin(admin.ModelAdmin): + list_display = ('name', 'tenant', 'is_active') + list_filter = ('tenant', 'is_active') + search_fields = ('name',) + +@admin.register(EventType) +class EventTypeAdmin(admin.ModelAdmin): + list_display = ('name', 'tenant', 'is_active') + list_filter = ('tenant', 'is_active') + search_fields = ('name',) + + +@admin.register(ParticipationStatus) +class ParticipationStatusAdmin(admin.ModelAdmin): + list_display = ('name', 'tenant', 'is_active') + list_filter = ('tenant', 'is_active') + search_fields = ('name',) + change_list_template = 'admin/participationstatus_change_list.html' + + def changelist_view(self, request, extra_context=None): + extra_context = extra_context or {} + from core.models import Tenant + extra_context['tenants'] = Tenant.objects.all() + return super().changelist_view(request, extra_context=extra_context) + +@admin.register(Interest) +class InterestAdmin(admin.ModelAdmin): + list_display = ('name', 'tenant') + list_filter = ('tenant',) + fields = ('tenant', 'name') + search_fields = ('name',) + +class VotingRecordInline(admin.TabularInline): + model = VotingRecord + extra = 1 + +class DonationInline(admin.TabularInline): + model = Donation + extra = 1 + +class InteractionInline(admin.TabularInline): + model = Interaction + extra = 1 + +class VoterLikelihoodInline(admin.TabularInline): + model = VoterLikelihood + extra = 1 + +class VolunteerEventInline(admin.TabularInline): + model = VolunteerEvent + extra = 1 + +@admin.register(Voter) +class VoterAdmin(BaseImportAdminMixin, admin.ModelAdmin): + list_display = ('first_name', 'last_name', 'nickname', 'voter_id', 'tenant', 'district', 'candidate_support', 'is_targeted', 'city', 'state', 'prior_state') + list_filter = ('tenant', 'candidate_support', 'is_targeted', 'phone_type', 'yard_sign', 'district', 'city', 'state', 'prior_state') + search_fields = ('first_name', 'last_name', 'nickname', 'voter_id', 'address', 'city', 'state', 'prior_state', 'zip_code', 'county') + inlines = [VotingRecordInline, DonationInline, InteractionInline, VoterLikelihoodInline] + readonly_fields = ('address',) + change_list_template = "admin/voter_change_list.html" + + def changelist_view(self, request, extra_context=None): + extra_context = extra_context or {} + from core.models import Tenant + extra_context["tenants"] = Tenant.objects.all() + return super().changelist_view(request, extra_context=extra_context) + + def get_urls(self): + urls = super().get_urls() + my_urls = [ + path('download-errors/', self.admin_site.admin_view(self.download_errors), name='voter-download-errors'), + path('import-voters/', self.admin_site.admin_view(self.import_voters), name='import-voters'), + ] + return my_urls + urls + + def import_voters(self, request): + if request.method == "POST": + if "_preview" in request.POST: + file_path = request.POST.get("file_path") + tenant_id = request.POST.get("tenant") + tenant = Tenant.objects.get(id=tenant_id) + + mapping = {} + for field_name, _ in VOTER_MAPPABLE_FIELDS: + mapping[field_name] = request.POST.get(f"map_{field_name}") + + try: + with open(file_path, "r", encoding="utf-8-sig") as f: + # Optimization: Fast count and partial preview + total_count = sum(1 for line in f) - 1 + f.seek(0) + reader = csv.DictReader(f) + preview_rows = [] + voter_ids_for_preview = [] + for i, row in enumerate(reader): + if i < 10: + preview_rows.append(row) + v_id = row.get(mapping.get("voter_id")) + if v_id: + voter_ids_for_preview.append(v_id) + else: + break + + existing_preview_ids = set(Voter.objects.filter(tenant=tenant, voter_id__in=voter_ids_for_preview).values_list("voter_id", flat=True)) + + preview_data = [] + for row in preview_rows: + v_id = row.get(mapping.get("voter_id")) + action = "update" if v_id in existing_preview_ids else "create" + preview_data.append({ + "action": action, + "identifier": v_id, + "details": f"{row.get(mapping.get('first_name', '')) or ''} {row.get(mapping.get('last_name', '')) or ''}".strip() + }) + + update_count = "N/A" + create_count = "N/A" + + context = self.admin_site.each_context(request) + context.update({ + "title": "Import Preview", + "total_count": total_count, + "create_count": create_count, + "update_count": update_count, + "preview_data": preview_data, + "mapping": mapping, + "file_path": file_path, + "tenant_id": tenant_id, + "action_url": request.path, + "opts": self.model._meta, + }) + return render(request, "admin/import_preview.html", context) + except Exception as e: + self.message_user(request, f"Error processing preview: {e}", level=messages.ERROR) + return redirect("..") + + + elif "_import" in request.POST: + file_path = request.POST.get("file_path") + tenant_id = request.POST.get("tenant") + tenant = Tenant.objects.get(id=tenant_id) + + mapping = {k: request.POST.get(f"map_{k}") for k, _ in VOTER_MAPPABLE_FIELDS if request.POST.get(f"map_{k}")} + + try: + count = 0 + created_count = 0 + updated_count = 0 + skipped_no_change = 0 + skipped_no_id = 0 + errors = 0 + failed_rows = [] + batch_size = 500 + + support_choices = dict(Voter.SUPPORT_CHOICES) + support_reverse = {v.lower(): k for k, v in support_choices.items()} + yard_sign_choices = dict(Voter.YARD_SIGN_CHOICES) + yard_sign_reverse = {v.lower(): k for k, v in yard_sign_choices.items()} + window_sticker_choices = dict(Voter.WINDOW_STICKER_CHOICES) + window_sticker_reverse = {v.lower(): k for k, v in window_sticker_choices.items()} + phone_type_choices = dict(Voter.PHONE_TYPE_CHOICES) + phone_type_reverse = {v.lower(): k for k, v in phone_type_choices.items()} + + valid_fields = {f.name for f in Voter._meta.get_fields()} + mapped_fields = {f for f in mapping.keys() if f in valid_fields} + # Ensure derived/special fields are in update_fields + update_fields = list(mapped_fields | {"address", "phone", "secondary_phone", "secondary_phone_type", "longitude", "latitude"}) + if "voter_id" in update_fields: update_fields.remove("voter_id") + + def chunk_reader(reader, size): + chunk = [] + for row in reader: + chunk.append(row) + if len(chunk) == size: + yield chunk + chunk = [] + if chunk: + yield chunk + + with open(file_path, "r", encoding="utf-8-sig") as f: + reader = csv.DictReader(f) + v_id_col = mapping.get("voter_id") + if not v_id_col: + raise ValueError("Voter ID mapping is missing") + + print(f"DEBUG: Starting voter import. Tenant: {tenant.name}. Voter ID column: {v_id_col}") + + total_processed = 0 + for chunk_index, chunk in enumerate(chunk_reader(reader, batch_size)): + with transaction.atomic(): + voter_ids = [str(row.get(v_id_col)).strip() for row in chunk if row.get(v_id_col)] + existing_voters = {v.voter_id: v for v in Voter.objects.filter(tenant=tenant, voter_id__in=voter_ids)} + + to_create = [] + to_update = [] + processed_in_batch = set() + + for row in chunk: + total_processed += 1 + try: + raw_voter_id = row.get(v_id_col) + if raw_voter_id is None: + skipped_no_id += 1 + continue + + voter_id = str(raw_voter_id).strip() + if not voter_id: + skipped_no_id += 1 + continue + + if voter_id in processed_in_batch: + continue + processed_in_batch.add(voter_id) + + voter = existing_voters.get(voter_id) + created = False + if not voter: + voter = Voter(tenant=tenant, voter_id=voter_id) + created = True + + changed = created + + for field_name, csv_col in mapping.items(): + if field_name == "voter_id": continue + val = row.get(csv_col) + if val is None: continue + val = str(val).strip() + if val == "": continue + + if field_name == "is_targeted": + val = str(val).lower() in ["true", "1", "yes"] + elif field_name in ["birthdate", "registration_date"]: + orig_val = val + parsed_date = None + for fmt in ["%Y-%m-%d", "%m/%d/%Y", "%d/%m/%Y", "%Y/%m/%d"]: + try: + parsed_date = datetime.strptime(val, fmt).date() + break + except: + continue + if parsed_date: + val = parsed_date + else: + # If parsing fails, keep original or skip? Let's skip updating this field. + continue + elif field_name == "candidate_support": + val_lower = val.lower() + if val_lower in support_choices: val = val_lower + elif val_lower in support_reverse: val = support_reverse[val_lower] + else: val = "unknown" + elif field_name == "yard_sign": + val_lower = val.lower() + if val_lower in yard_sign_choices: val = val_lower + elif val_lower in yard_sign_reverse: val = yard_sign_reverse[val_lower] + else: val = "none" + elif field_name == "window_sticker": + val_lower = val.lower() + if val_lower in window_sticker_choices: val = val_lower + elif val_lower in window_sticker_reverse: val = window_sticker_reverse[val_lower] + else: val = "none" + elif field_name in ["phone_type", "secondary_phone_type"]: + val_lower = val.lower() + if val_lower in phone_type_choices: + val = val_lower + elif val_lower in phone_type_reverse: + val = phone_type_reverse[val_lower] + else: + val = "cell" + + current_val = getattr(voter, field_name) + if current_val != val: + setattr(voter, field_name, val) + changed = True + + old_phone = voter.phone + voter.phone = format_phone_number(voter.phone) + if voter.phone != old_phone: + changed = True + + old_secondary_phone = voter.secondary_phone + voter.secondary_phone = format_phone_number(voter.secondary_phone) + if voter.secondary_phone != old_secondary_phone: + changed = True + + if voter.longitude: + try: + new_lon = Decimal(str(voter.longitude)[:12]) + if voter.longitude != new_lon: + voter.longitude = new_lon + changed = True + except: + pass + + old_address = voter.address + parts = [voter.address_street, voter.city, voter.state, voter.zip_code] + voter.address = ", ".join([p for p in parts if p]) + if voter.address != old_address: + changed = True + + if not changed: + skipped_no_change += 1 + continue + + if created: + to_create.append(voter) + created_count += 1 + else: + to_update.append(voter) + updated_count += 1 + + count += 1 + except Exception as e: + print(f"DEBUG: Error importing row {total_processed}: {e}") + row["Import Error"] = str(e) + failed_rows.append(row) + errors += 1 + + if to_create: + Voter.objects.bulk_create(to_create) + if to_update: + Voter.objects.bulk_update(to_update, update_fields, batch_size=250) + + print(f"DEBUG: Voter import progress: {total_processed} processed. {count} created/updated. {skipped_no_change} skipped (no change). {skipped_no_id} skipped (no ID). {errors} errors.") + + if os.path.exists(file_path): + os.remove(file_path) + + success_msg = f"Import complete: {count} voters created/updated. ({created_count} new, {updated_count} updated, {skipped_no_change} skipped with no changes, {skipped_no_id} skipped missing ID, {errors} errors)" + self.message_user(request, success_msg) + + request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows + request.session.modified = True + if errors > 0: + error_url = reverse("admin:voter-download-errors") + self.message_user(request, mark_safe(f"Failed to import {errors} rows. Download failed records"), level=messages.WARNING) + return redirect("..") + except Exception as e: + print(f"DEBUG: Voter import failed: {e}") + self.message_user(request, f"Error processing file: {e}", level=messages.ERROR) + return redirect("..") + else: + form = VoterImportForm(request.POST, request.FILES) + if form.is_valid(): + csv_file = request.FILES["file"] + tenant = form.cleaned_data["tenant"] + + if not csv_file.name.endswith(".csv"): + self.message_user(request, "Please upload a CSV file.", level=messages.ERROR) + return redirect("..") + + with tempfile.NamedTemporaryFile(delete=False, suffix=".csv") as tmp: + for chunk in csv_file.chunks(): + tmp.write(chunk) + file_path = tmp.name + + with open(file_path, "r", encoding="utf-8-sig") as f: + reader = csv.reader(f) + headers = next(reader) + + context = self.admin_site.each_context(request) + context.update({ + "title": "Map Voter Fields", + "headers": headers, + "model_fields": VOTER_MAPPABLE_FIELDS, + "tenant_id": tenant.id, + "file_path": file_path, + "action_url": request.path, + "opts": self.model._meta, + }) + return render(request, "admin/import_mapping.html", context) + else: + form = VoterImportForm() + + context = self.admin_site.each_context(request) + context["form"] = form + context["title"] = "Import Voters" + context["opts"] = self.model._meta + return render(request, "admin/import_csv.html", context) +@admin.register(Event) +class EventAdmin(BaseImportAdminMixin, admin.ModelAdmin): + list_display = ('id', 'name', 'event_type', 'date', 'location_name', 'city', 'state', 'tenant') + list_filter = ('tenant', 'date', 'event_type', 'city', 'state') + search_fields = ('name', 'description', 'location_name', 'address', 'city', 'state', 'zip_code') + change_list_template = "admin/event_change_list.html" + + def changelist_view(self, request, extra_context=None): + extra_context = extra_context or {} + from core.models import Tenant + extra_context["tenants"] = Tenant.objects.all() + return super().changelist_view(request, extra_context=extra_context) + + def get_urls(self): + urls = super().get_urls() + my_urls = [ + path('download-errors/', self.admin_site.admin_view(self.download_errors), name='event-download-errors'), + path('import-events/', self.admin_site.admin_view(self.import_events), name='import-events'), + ] + return my_urls + urls + + def import_events(self, request): + if request.method == "POST": + if "_preview" in request.POST: + file_path = request.POST.get('file_path') + tenant_id = request.POST.get('tenant') + tenant = Tenant.objects.get(id=tenant_id) + mapping = {} + for field_name, _ in EVENT_MAPPABLE_FIELDS: + mapping[field_name] = request.POST.get(f'map_{field_name}') + try: + with open(file_path, 'r', encoding='UTF-8') as f: + reader = csv.DictReader(f) + total_count = 0 + create_count = 0 + update_count = 0 + preview_data = [] + for row in reader: + total_count += 1 + date = row.get(mapping.get('date')) + event_type_name = row.get(mapping.get('event_type')) + event_name = row.get(mapping.get('name')) + exists = False + if date and event_type_name: + q = Event.objects.filter(tenant=tenant, date=date, event_type__name=event_type_name) + if event_name: + q = q.filter(name=event_name) + exists = q.exists() + + if exists: + update_count += 1 + action = 'update' + else: + create_count += 1 + action = 'create' + + if len(preview_data) < 10: + preview_data.append({ + 'action': action, + 'identifier': f"{event_name or 'No Name'} ({date} - {event_type_name})", + 'details': f"{row.get(mapping.get('city', '')) or ''}, {row.get(mapping.get('state', '')) or ''}" + }) + context = self.admin_site.each_context(request) + context.update({ + 'title': "Import Preview", + 'total_count': total_count, + 'create_count': create_count, + 'update_count': update_count, + 'preview_data': preview_data, + 'mapping': mapping, + 'file_path': file_path, + 'tenant_id': tenant_id, + 'action_url': request.path, + 'opts': self.model._meta, + }) + return render(request, "admin/import_preview.html", context) + except Exception as e: + self.message_user(request, f"Error processing preview: {e}", level=messages.ERROR) + return redirect("..") + + elif "_import" in request.POST: + file_path = request.POST.get('file_path') + tenant_id = request.POST.get('tenant') + tenant = Tenant.objects.get(id=tenant_id) + + mapping = {} + for field_name, _ in EVENT_MAPPABLE_FIELDS: + mapping[field_name] = request.POST.get(f'map_{field_name}') + + try: + with open(file_path, 'r', encoding='UTF-8') as f: + reader = csv.DictReader(f) + count = 0 + errors = 0 + failed_rows = [] + for row in reader: + try: + date = row.get(mapping.get('date')) if mapping.get('date') else None + event_type_name = row.get(mapping.get('event_type')) if mapping.get('event_type') else None + description = row.get(mapping.get('description')) if mapping.get('description') else None + location_name = row.get(mapping.get('location_name')) if mapping.get('location_name') else None + name = row.get(mapping.get('name')) if mapping.get('name') else None + start_time = row.get(mapping.get('start_time')) if mapping.get('start_time') else None + end_time = row.get(mapping.get('end_time')) if mapping.get('end_time') else None + address = row.get(mapping.get('address')) if mapping.get('address') else None + city = row.get(mapping.get('city')) if mapping.get('city') else None + state = row.get(mapping.get('state')) if mapping.get('state') else None + zip_code = row.get(mapping.get('zip_code')) if mapping.get('zip_code') else None + latitude = row.get(mapping.get('latitude')) if mapping.get('latitude') else None + longitude = row.get(mapping.get('longitude')) if mapping.get('longitude') else None + + if not date or not event_type_name: + row["Import Error"] = "Missing date or event type" + failed_rows.append(row) + errors += 1 + continue + + event_type, _ = EventType.objects.get_or_create( + tenant=tenant, + name=event_type_name + ) + + defaults = {} + if description and description.strip(): + defaults['description'] = description + if location_name and location_name.strip(): + defaults['location_name'] = location_name + if name and name.strip(): + defaults['name'] = name + if start_time and start_time.strip(): + defaults['start_time'] = start_time + if end_time and end_time.strip(): + defaults['end_time'] = end_time + if address and address.strip(): + defaults['address'] = address + if city and city.strip(): + defaults['city'] = city + if state and state.strip(): + defaults['state'] = state + if zip_code and zip_code.strip(): + defaults['zip_code'] = zip_code + if latitude and latitude.strip(): + defaults['latitude'] = latitude + if longitude and longitude.strip(): + defaults['longitude'] = longitude + + defaults['date'] = date + defaults['event_type'] = event_type + Event.objects.update_or_create( + tenant=tenant, + name=name or '', + defaults=defaults + ) + count += 1 + except Exception as e: + logger.error(f"Error importing: {e}") + row["Import Error"] = str(e) + failed_rows.append(row) + errors += 1 + + if os.path.exists(file_path): + os.remove(file_path) + self.message_user(request, f"Successfully imported {count} events.") + request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows + request.session.modified = True + logger.info(f"Stored {len(failed_rows)} failed rows in session for {self.model._meta.model_name}") + if errors > 0: + error_url = reverse("admin:event-download-errors") + self.message_user(request, mark_safe(f"Failed to import {errors} rows. Download failed records"), level=messages.WARNING) + return redirect("..") + except Exception as e: + self.message_user(request, f"Error processing file: {e}", level=messages.ERROR) + return redirect("..") + else: + form = EventImportForm(request.POST, request.FILES) + if form.is_valid(): + csv_file = request.FILES['file'] + tenant = form.cleaned_data['tenant'] + + if not csv_file.name.endswith('.csv'): + self.message_user(request, "Please upload a CSV file.", level=messages.ERROR) + return redirect("..") + + with tempfile.NamedTemporaryFile(delete=False, suffix='.csv') as tmp: + for chunk in csv_file.chunks(): + tmp.write(chunk) + file_path = tmp.name + + with open(file_path, 'r', encoding='UTF-8') as f: + reader = csv.reader(f) + headers = next(reader) + + context = self.admin_site.each_context(request) + context.update({ + 'title': "Map Event Fields", + 'headers': headers, + 'model_fields': EVENT_MAPPABLE_FIELDS, + 'tenant_id': tenant.id, + 'file_path': file_path, + 'action_url': request.path, + 'opts': self.model._meta, + }) + return render(request, "admin/import_mapping.html", context) + else: + form = EventImportForm() + + context = self.admin_site.each_context(request) + context['form'] = form + context['title'] = "Import Events" + context['opts'] = self.model._meta + return render(request, "admin/import_csv.html", context) + +@admin.register(Volunteer) +class VolunteerAdmin(BaseImportAdminMixin, admin.ModelAdmin): + list_display = ('first_name', 'last_name', 'email', 'phone', 'tenant', 'user') + list_filter = ('tenant',) + fields = ('tenant', 'user', 'first_name', 'last_name', 'email', 'phone', 'notes', 'interests') + search_fields = ('first_name', 'last_name', 'email', 'phone') + inlines = [VolunteerEventInline, InteractionInline] + filter_horizontal = ('interests',) + change_list_template = "admin/volunteer_change_list.html" + + def changelist_view(self, request, extra_context=None): + extra_context = extra_context or {} + from core.models import Tenant + extra_context["tenants"] = Tenant.objects.all() + return super().changelist_view(request, extra_context=extra_context) + + def get_urls(self): + urls = super().get_urls() + my_urls = [ + path('download-errors/', self.admin_site.admin_view(self.download_errors), name='volunteer-download-errors'), + path('import-volunteers/', self.admin_site.admin_view(self.import_volunteers), name='import-volunteers'), + ] + return my_urls + urls + + def import_volunteers(self, request): + if request.method == "POST": + if "_preview" in request.POST: + file_path = request.POST.get('file_path') + tenant_id = request.POST.get('tenant') + tenant = Tenant.objects.get(id=tenant_id) + mapping = {} + for field_name, _ in VOLUNTEER_MAPPABLE_FIELDS: + mapping[field_name] = request.POST.get(f'map_{field_name}') + try: + with open(file_path, 'r', encoding='UTF-8') as f: + reader = csv.DictReader(f) + total_count = 0 + create_count = 0 + update_count = 0 + preview_data = [] + for row in reader: + total_count += 1 + email = row.get(mapping.get('email')) + exists = Volunteer.objects.filter(tenant=tenant, email=email).exists() + if exists: + update_count += 1 + action = 'update' + else: + create_count += 1 + action = 'create' + if len(preview_data) < 10: + preview_data.append({ + 'action': action, + 'identifier': email, + 'details': f"{row.get(mapping.get('first_name', '')) or ''} {row.get(mapping.get('last_name', '')) or ''}".strip() + }) + context = self.admin_site.each_context(request) + context.update({ + 'title': "Import Preview", + 'total_count': total_count, + 'create_count': create_count, + 'update_count': update_count, + 'preview_data': preview_data, + 'mapping': mapping, + 'file_path': file_path, + 'tenant_id': tenant_id, + 'action_url': request.path, + 'opts': self.model._meta, + }) + return render(request, "admin/import_preview.html", context) + except Exception as e: + self.message_user(request, f"Error processing preview: {e}", level=messages.ERROR) + return redirect("..") + + elif "_import" in request.POST: + file_path = request.POST.get('file_path') + tenant_id = request.POST.get('tenant') + tenant = Tenant.objects.get(id=tenant_id) + mapping = {} + for field_name, _ in VOLUNTEER_MAPPABLE_FIELDS: + mapping[field_name] = request.POST.get(f'map_{field_name}') + try: + with open(file_path, 'r', encoding='UTF-8') as f: + reader = csv.DictReader(f) + count = 0 + errors = 0 + failed_rows = [] + for row in reader: + try: + email = row.get(mapping.get('email')) + if not email: + row["Import Error"] = "Missing email" + failed_rows.append(row) + errors += 1 + continue + volunteer_data = {} + for field_name, csv_col in mapping.items(): + if csv_col: + val = row.get(csv_col) + if val is not None and str(val).strip() != '': + if field_name == 'email': continue + volunteer_data[field_name] = val + Volunteer.objects.update_or_create( + tenant=tenant, + email=email, + defaults=volunteer_data + ) + count += 1 + except Exception as e: + logger.error(f"Error importing volunteer: {e}") + row["Import Error"] = str(e) + failed_rows.append(row) + errors += 1 + if os.path.exists(file_path): + os.remove(file_path) + self.message_user(request, f"Successfully imported {count} volunteers.") + request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows + request.session.modified = True + if errors > 0: + error_url = reverse("admin:volunteer-download-errors") + self.message_user(request, mark_safe(f"Failed to import {errors} rows. Download failed records"), level=messages.WARNING) + return redirect("..") + except Exception as e: + self.message_user(request, f"Error processing file: {e}", level=messages.ERROR) + return redirect("..") + else: + form = VolunteerImportForm, VotingRecordImportForm(request.POST, request.FILES) + if form.is_valid(): + csv_file = request.FILES['file'] + tenant = form.cleaned_data['tenant'] + if not csv_file.name.endswith('.csv'): + self.message_user(request, "Please upload a CSV file.", level=messages.ERROR) + return redirect("..") + with tempfile.NamedTemporaryFile(delete=False, suffix='.csv') as tmp: + for chunk in csv_file.chunks(): + tmp.write(chunk) + file_path = tmp.name + with open(file_path, 'r', encoding='UTF-8') as f: + reader = csv.reader(f) + headers = next(reader) + context = self.admin_site.each_context(request) + context.update({ + 'title': "Map Volunteer Fields", + 'headers': headers, + 'model_fields': VOLUNTEER_MAPPABLE_FIELDS, + 'tenant_id': tenant.id, + 'file_path': file_path, + 'action_url': request.path, + 'opts': self.model._meta, + }) + return render(request, "admin/import_mapping.html", context) + else: + form = VolunteerImportForm, VotingRecordImportForm() + context = self.admin_site.each_context(request) + context['form'] = form + context['title'] = "Import Volunteers" + context['opts'] = self.model._meta + return render(request, "admin/import_csv.html", context) + +@admin.register(VolunteerEvent) +class VolunteerEventAdmin(admin.ModelAdmin): + list_display = ('volunteer', 'event', 'role') + list_filter = ('event__tenant', 'event', 'role') + +@admin.register(EventParticipation) +class EventParticipationAdmin(BaseImportAdminMixin, admin.ModelAdmin): + list_display = ('voter', 'event', 'participation_status') + list_filter = ('event__tenant', 'event', 'participation_status') + change_list_template = "admin/eventparticipation_change_list.html" + + def get_urls(self): + urls = super().get_urls() + my_urls = [ + path('download-errors/', self.admin_site.admin_view(self.download_errors), name='eventparticipation-download-errors'), + path('import-event-participations/', self.admin_site.admin_view(self.import_event_participations), name='import-event-participations'), + ] + return my_urls + urls + + def import_event_participations(self, request): + if request.method == "POST": + if "_preview" in request.POST: + file_path = request.POST.get('file_path') + tenant_id = request.POST.get('tenant') + tenant = Tenant.objects.get(id=tenant_id) + mapping = {} + for field_name, _ in EVENT_PARTICIPATION_MAPPABLE_FIELDS: + mapping[field_name] = request.POST.get(f'map_{field_name}') + try: + with open(file_path, 'r', encoding='UTF-8') as f: + reader = csv.DictReader(f) + total_count = 0 + create_count = 0 + update_count = 0 + preview_data = [] + for row in reader: + total_count += 1 + voter_id = row.get(mapping.get('voter_id')) + event_name = row.get(mapping.get('event_name')) + + exists = False + if voter_id: + try: + voter = Voter.objects.get(tenant=tenant, voter_id=voter_id) + if event_name: + exists = EventParticipation.objects.filter(voter=voter, event__name=event_name).exists() + except Voter.DoesNotExist: + pass + + if exists: + update_count += 1 + action = 'update' + else: + create_count += 1 + action = 'create' + + if len(preview_data) < 10: + preview_data.append({ + 'action': action, + 'identifier': f"Voter: {voter_id}", + 'details': f"Participation: {row.get(mapping.get('participation_status', '')) or ''}" + }) + context = self.admin_site.each_context(request) + context.update({ + 'title': "Import Preview", + 'total_count': total_count, + 'create_count': create_count, + 'update_count': update_count, + 'preview_data': preview_data, + 'mapping': mapping, + 'file_path': file_path, + 'tenant_id': tenant_id, + 'action_url': request.path, + 'opts': self.model._meta, + }) + return render(request, "admin/import_preview.html", context) + except Exception as e: + self.message_user(request, f"Error processing preview: {e}", level=messages.ERROR) + return redirect("..") + + elif "_import" in request.POST: + file_path = request.POST.get('file_path') + tenant_id = request.POST.get('tenant') + tenant = Tenant.objects.get(id=tenant_id) + + mapping = {} + for field_name, _ in EVENT_PARTICIPATION_MAPPABLE_FIELDS: + mapping[field_name] = request.POST.get(f'map_{field_name}') + + try: + with open(file_path, 'r', encoding='UTF-8') as f: + reader = csv.DictReader(f) + count = 0 + errors = 0 + failed_rows = [] + for row in reader: + try: + voter_id = row.get(mapping.get('voter_id')) if mapping.get('voter_id') else None + participation_status_val = row.get(mapping.get('participation_status')) if mapping.get('participation_status') else None + + if not voter_id: + row["Import Error"] = "Missing voter ID" + failed_rows.append(row) + errors += 1 + continue + + try: + voter = Voter.objects.get(tenant=tenant, voter_id=voter_id) + except Voter.DoesNotExist: + error_msg = f"Voter with ID {voter_id} not found" + logger.error(error_msg) + row["Import Error"] = error_msg + failed_rows.append(row) + errors += 1 + continue + + event = None + event_name = row.get(mapping.get('event_name')) if mapping.get('event_name') else None + if event_name: + try: + event = Event.objects.get(tenant=tenant, name=event_name) + except Event.DoesNotExist: + pass + + if not event: + error_msg = "Event not found (check Event Name)" + logger.error(error_msg) + row["Import Error"] = error_msg + failed_rows.append(row) + errors += 1 + continue + + defaults = {} + if participation_status_val and participation_status_val.strip(): + status_obj, _ = ParticipationStatus.objects.get_or_create(tenant=tenant, name=participation_status_val.strip()) + defaults['participation_status'] = status_obj + else: + # Default to 'Invited' if not specified + status_obj, _ = ParticipationStatus.objects.get_or_create(tenant=tenant, name='Invited') + defaults['participation_status'] = status_obj + EventParticipation.objects.update_or_create( + event=event, + voter=voter, + defaults=defaults + ) + count += 1 + except Exception as e: + logger.error(f"Error importing: {e}") + row["Import Error"] = str(e) + failed_rows.append(row) + errors += 1 + + if os.path.exists(file_path): + os.remove(file_path) + self.message_user(request, f"Successfully imported {count} participations.") + request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows + request.session.modified = True + logger.info(f"Stored {len(failed_rows)} failed rows in session for {self.model._meta.model_name}") + if errors > 0: + error_url = reverse("admin:eventparticipation-download-errors") + self.message_user(request, mark_safe(f"Failed to import {errors} rows. Download failed records"), level=messages.WARNING) + return redirect("..") + except Exception as e: + self.message_user(request, f"Error processing file: {e}", level=messages.ERROR) + return redirect("..") + else: + form = EventParticipationImportForm(request.POST, request.FILES) + if form.is_valid(): + csv_file = request.FILES['file'] + tenant = form.cleaned_data['tenant'] + + if not csv_file.name.endswith('.csv'): + self.message_user(request, "Please upload a CSV file.", level=messages.ERROR) + return redirect("..") + + with tempfile.NamedTemporaryFile(delete=False, suffix='.csv') as tmp: + for chunk in csv_file.chunks(): + tmp.write(chunk) + file_path = tmp.name + + with open(file_path, 'r', encoding='UTF-8') as f: + reader = csv.reader(f) + headers = next(reader) + + context = self.admin_site.each_context(request) + context.update({ + 'title': "Map Participation Fields", + 'headers': headers, + 'model_fields': EVENT_PARTICIPATION_MAPPABLE_FIELDS, + 'tenant_id': tenant.id, + 'file_path': file_path, + 'action_url': request.path, + 'opts': self.model._meta, + }) + return render(request, "admin/import_mapping.html", context) + else: + form = EventParticipationImportForm() + + context = self.admin_site.each_context(request) + context['form'] = form + context['title'] = "Import Participations" + context['opts'] = self.model._meta + return render(request, "admin/import_csv.html", context) + +@admin.register(Donation) +class DonationAdmin(BaseImportAdminMixin, admin.ModelAdmin): + list_display = ('id', 'voter', 'date', 'amount', 'method') + list_filter = ('voter__tenant', 'date', 'method') + search_fields = ('voter__first_name', 'voter__last_name', 'voter__voter_id') + change_list_template = "admin/donation_change_list.html" + + def get_urls(self): + urls = super().get_urls() + my_urls = [ + path('download-errors/', self.admin_site.admin_view(self.download_errors), name='donation-download-errors'), + path('import-donations/', self.admin_site.admin_view(self.import_donations), name='import-donations'), + ] + return my_urls + urls + + def import_donations(self, request): + if request.method == "POST": + if "_preview" in request.POST: + file_path = request.POST.get('file_path') + tenant_id = request.POST.get('tenant') + tenant = Tenant.objects.get(id=tenant_id) + mapping = {} + for field_name, _ in DONATION_MAPPABLE_FIELDS: + mapping[field_name] = request.POST.get(f'map_{field_name}') + try: + with open(file_path, 'r', encoding='UTF-8') as f: + reader = csv.DictReader(f) + total_count = 0 + create_count = 0 + update_count = 0 + preview_data = [] + for row in reader: + total_count += 1 + voter_id = row.get(mapping.get('voter_id')) + date = row.get(mapping.get('date')) + amount = row.get(mapping.get('amount')) + exists = False + if voter_id and date and amount: + exists = Donation.objects.filter(voter__tenant=tenant, voter__voter_id=voter_id, date=date, amount=amount).exists() + + if exists: + update_count += 1 + action = 'update' + else: + create_count += 1 + action = 'create' + + if len(preview_data) < 10: + preview_data.append({ + 'action': action, + 'identifier': f"Voter: {voter_id}", + 'details': f"Date: {date}, Amount: {amount}" + }) + context = self.admin_site.each_context(request) + context.update({ + 'title': "Import Preview", + 'total_count': total_count, + 'create_count': create_count, + 'update_count': update_count, + 'preview_data': preview_data, + 'mapping': mapping, + 'file_path': file_path, + 'tenant_id': tenant_id, + 'action_url': request.path, + 'opts': self.model._meta, + }) + return render(request, "admin/import_preview.html", context) + except Exception as e: + self.message_user(request, f"Error processing preview: {e}", level=messages.ERROR) + return redirect("..") + + elif "_import" in request.POST: + file_path = request.POST.get('file_path') + tenant_id = request.POST.get('tenant') + tenant = Tenant.objects.get(id=tenant_id) + + mapping = {} + for field_name, _ in DONATION_MAPPABLE_FIELDS: + mapping[field_name] = request.POST.get(f'map_{field_name}') + + try: + with open(file_path, 'r', encoding='UTF-8') as f: + reader = csv.DictReader(f) + count = 0 + errors = 0 + failed_rows = [] + for row in reader: + try: + voter_id = row.get(mapping.get('voter_id')) if mapping.get('voter_id') else None + if not voter_id: + row["Import Error"] = "Missing voter ID" + failed_rows.append(row) + errors += 1 + continue + + try: + voter = Voter.objects.get(tenant=tenant, voter_id=voter_id) + except Voter.DoesNotExist: + row["Import Error"] = f"Voter {voter_id} not found" + failed_rows.append(row) + errors += 1 + continue + + date = row.get(mapping.get('date')) + amount = row.get(mapping.get('amount')) + method_name = row.get(mapping.get('method')) + + if not date or not amount: + row["Import Error"] = "Missing date or amount" + failed_rows.append(row) + errors += 1 + continue + + method = None + if method_name and method_name.strip(): + method, _ = DonationMethod.objects.get_or_create( + tenant=tenant, + name=method_name + ) + + defaults = {} + if method: + defaults['method'] = method + + Donation.objects.update_or_create( + voter=voter, + date=date, + amount=amount, + defaults=defaults + ) + count += 1 + except Exception as e: + logger.error(f"Error importing: {e}") + row["Import Error"] = str(e) + failed_rows.append(row) + errors += 1 + + if os.path.exists(file_path): + os.remove(file_path) + self.message_user(request, f"Successfully imported {count} donations.") + request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows + request.session.modified = True + logger.info(f"Stored {len(failed_rows)} failed rows in session for {self.model._meta.model_name}") + if errors > 0: + error_url = reverse("admin:donation-download-errors") + self.message_user(request, mark_safe(f"Failed to import {errors} rows. Download failed records"), level=messages.WARNING) + return redirect("..") + except Exception as e: + self.message_user(request, f"Error processing file: {e}", level=messages.ERROR) + return redirect("..") + else: + form = DonationImportForm(request.POST, request.FILES) + if form.is_valid(): + csv_file = request.FILES['file'] + tenant = form.cleaned_data['tenant'] + + if not csv_file.name.endswith('.csv'): + self.message_user(request, "Please upload a CSV file.", level=messages.ERROR) + return redirect("..") + + with tempfile.NamedTemporaryFile(delete=False, suffix='.csv') as tmp: + for chunk in csv_file.chunks(): + tmp.write(chunk) + file_path = tmp.name + + with open(file_path, 'r', encoding='UTF-8') as f: + reader = csv.reader(f) + headers = next(reader) + + context = self.admin_site.each_context(request) + context.update({ + 'title': "Map Donation Fields", + 'headers': headers, + 'model_fields': DONATION_MAPPABLE_FIELDS, + 'tenant_id': tenant.id, + 'file_path': file_path, + 'action_url': request.path, + 'opts': self.model._meta, + }) + return render(request, "admin/import_mapping.html", context) + else: + form = DonationImportForm() + + context = self.admin_site.each_context(request) + context['form'] = form + context['title'] = "Import Donations" + context['opts'] = self.model._meta + return render(request, "admin/import_csv.html", context) + +@admin.register(Interaction) +class InteractionAdmin(BaseImportAdminMixin, admin.ModelAdmin): + list_display = ('id', 'voter', 'volunteer', 'type', 'date', 'description') + list_filter = ('voter__tenant', 'type', 'date', 'volunteer') + search_fields = ('voter__first_name', 'voter__last_name', 'voter__voter_id', 'description', 'volunteer__first_name', 'volunteer__last_name') + change_list_template = "admin/interaction_change_list.html" + + def get_urls(self): + urls = super().get_urls() + my_urls = [ + path('download-errors/', self.admin_site.admin_view(self.download_errors), name='interaction-download-errors'), + path('import-interactions/', self.admin_site.admin_view(self.import_interactions), name='import-interactions'), + ] + return my_urls + urls + + def import_interactions(self, request): + if request.method == "POST": + if "_preview" in request.POST: + file_path = request.POST.get('file_path') + tenant_id = request.POST.get('tenant') + tenant = Tenant.objects.get(id=tenant_id) + mapping = {} + for field_name, _ in INTERACTION_MAPPABLE_FIELDS: + mapping[field_name] = request.POST.get(f'map_{field_name}') + try: + with open(file_path, 'r', encoding='UTF-8') as f: + reader = csv.DictReader(f) + total_count = 0 + create_count = 0 + update_count = 0 + preview_data = [] + for row in reader: + total_count += 1 + voter_id = row.get(mapping.get('voter_id')) + date = row.get(mapping.get('date')) + exists = False + if voter_id and date: + exists = Interaction.objects.filter(voter__tenant=tenant, voter__voter_id=voter_id, date=date).exists() + + if exists: + update_count += 1 + action = 'update' + else: + create_count += 1 + action = 'create' + + if len(preview_data) < 10: + preview_data.append({ + 'action': action, + 'identifier': f"Voter: {voter_id}", + 'details': f"Date: {date}, Desc: {row.get(mapping.get('description', '')) or ''}" + }) + context = self.admin_site.each_context(request) + context.update({ + 'title': "Import Preview", + 'total_count': total_count, + 'create_count': create_count, + 'update_count': update_count, + 'preview_data': preview_data, + 'mapping': mapping, + 'file_path': file_path, + 'tenant_id': tenant_id, + 'action_url': request.path, + 'opts': self.model._meta, + }) + return render(request, "admin/import_preview.html", context) + except Exception as e: + self.message_user(request, f"Error processing preview: {e}", level=messages.ERROR) + return redirect("..") + + elif "_import" in request.POST: + file_path = request.POST.get('file_path') + tenant_id = request.POST.get('tenant') + tenant = Tenant.objects.get(id=tenant_id) + + mapping = {} + for field_name, _ in INTERACTION_MAPPABLE_FIELDS: + mapping[field_name] = request.POST.get(f'map_{field_name}') + + try: + with open(file_path, 'r', encoding='UTF-8') as f: + reader = csv.DictReader(f) + count = 0 + errors = 0 + failed_rows = [] + for row in reader: + try: + voter_id = row.get(mapping.get('voter_id')) if mapping.get('voter_id') else None + if not voter_id: + row["Import Error"] = "Missing voter ID" + failed_rows.append(row) + errors += 1 + continue + + try: + voter = Voter.objects.get(tenant=tenant, voter_id=voter_id) + except Voter.DoesNotExist: + row["Import Error"] = f"Voter {voter_id} not found" + failed_rows.append(row) + errors += 1 + continue + + date = row.get(mapping.get('date')) + type_name = row.get(mapping.get('type')) + volunteer_email = row.get(mapping.get('volunteer_email')) + description = row.get(mapping.get('description')) + notes = row.get(mapping.get('notes')) + + if not date or not description: + row["Import Error"] = "Missing date or description" + failed_rows.append(row) + errors += 1 + continue + + volunteer = None + if volunteer_email and volunteer_email.strip(): + try: + volunteer = Volunteer.objects.get(tenant=tenant, email=volunteer_email.strip()) + except Volunteer.DoesNotExist: + pass + interaction_type = None + if type_name and type_name.strip(): + interaction_type, _ = InteractionType.objects.get_or_create( + tenant=tenant, + name=type_name + ) + + defaults = {} + if volunteer: + defaults['volunteer'] = volunteer + if interaction_type: + defaults['type'] = interaction_type + if description and description.strip(): + defaults['description'] = description + if notes and notes.strip(): + defaults['notes'] = notes + + Interaction.objects.update_or_create( + voter=voter, + date=date, + defaults=defaults + ) + count += 1 + except Exception as e: + logger.error(f"Error importing: {e}") + row["Import Error"] = str(e) + failed_rows.append(row) + errors += 1 + + if os.path.exists(file_path): + os.remove(file_path) + self.message_user(request, f"Successfully imported {count} interactions.") + request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows + request.session.modified = True + logger.info(f"Stored {len(failed_rows)} failed rows in session for {self.model._meta.model_name}") + if errors > 0: + error_url = reverse("admin:interaction-download-errors") + self.message_user(request, mark_safe(f"Failed to import {errors} rows. Download failed records"), level=messages.WARNING) + return redirect("..") + except Exception as e: + self.message_user(request, f"Error processing file: {e}", level=messages.ERROR) + return redirect("..") + else: + form = InteractionImportForm(request.POST, request.FILES) + if form.is_valid(): + csv_file = request.FILES['file'] + tenant = form.cleaned_data['tenant'] + + if not csv_file.name.endswith('.csv'): + self.message_user(request, "Please upload a CSV file.", level=messages.ERROR) + return redirect("..") + + with tempfile.NamedTemporaryFile(delete=False, suffix='.csv') as tmp: + for chunk in csv_file.chunks(): + tmp.write(chunk) + file_path = tmp.name + + with open(file_path, 'r', encoding='UTF-8') as f: + reader = csv.reader(f) + headers = next(reader) + + context = self.admin_site.each_context(request) + context.update({ + 'title': "Map Interaction Fields", + 'headers': headers, + 'model_fields': INTERACTION_MAPPABLE_FIELDS, + 'tenant_id': tenant.id, + 'file_path': file_path, + 'action_url': request.path, + 'opts': self.model._meta, + }) + return render(request, "admin/import_mapping.html", context) + else: + form = InteractionImportForm() + + context = self.admin_site.each_context(request) + context['form'] = form + context['title'] = "Import Interactions" + context['opts'] = self.model._meta + return render(request, "admin/import_csv.html", context) + +@admin.register(VoterLikelihood) +class VoterLikelihoodAdmin(BaseImportAdminMixin, admin.ModelAdmin): + list_display = ('id', 'voter', 'election_type', 'likelihood') + list_filter = ('voter__tenant', 'election_type', 'likelihood') + search_fields = ('voter__first_name', 'voter__last_name', 'voter__voter_id') + change_list_template = "admin/voterlikelihood_change_list.html" + + def get_urls(self): + urls = super().get_urls() + my_urls = [ + path('download-errors/', self.admin_site.admin_view(self.download_errors), name='voterlikelihood-download-errors'), + path('import-likelihoods/', self.admin_site.admin_view(self.import_likelihoods), name='import-likelihoods'), + ] + return my_urls + urls + + def import_likelihoods(self, request): + if request.method == "POST": + if "_preview" in request.POST: + file_path = request.POST.get('file_path') + tenant_id = request.POST.get('tenant') + tenant = Tenant.objects.get(id=tenant_id) + mapping = {k: request.POST.get(f"map_{k}") for k, _ in VOTER_LIKELIHOOD_MAPPABLE_FIELDS if request.POST.get(f"map_{k}")} + + try: + with open(file_path, 'r', encoding='utf-8-sig') as f: + # Fast count and partial preview + total_count = sum(1 for line in f) - 1 + f.seek(0) + reader = csv.DictReader(f) + preview_rows = [] + voter_ids_for_preview = set() + election_types_for_preview = set() + + v_id_col = mapping.get('voter_id') + et_col = mapping.get('election_type') + + if not v_id_col or not et_col: + raise ValueError("Missing mapping for Voter ID or Election Type") + + for i, row in enumerate(reader): + if i < 10: + preview_rows.append(row) + v_id = row.get(v_id_col) + et_name = row.get(et_col) + if v_id: voter_ids_for_preview.add(str(v_id).strip()) + if et_name: election_types_for_preview.add(str(et_name).strip()) + else: + break + + existing_likelihoods = set(VoterLikelihood.objects.filter( + voter__tenant=tenant, + voter__voter_id__in=voter_ids_for_preview, + election_type__name__in=election_types_for_preview + ).values_list("voter__voter_id", "election_type__name")) + + preview_data = [] + for row in preview_rows: + v_id = str(row.get(v_id_col, '')).strip() + et_name = str(row.get(et_col, '')).strip() + action = "update" if (v_id, et_name) in existing_likelihoods else "create" + preview_data.append({ + "action": action, + "identifier": f"Voter: {v_id}, Election: {et_name}", + "details": f"Likelihood: {row.get(mapping.get('likelihood', '')) or ''}" + }) + + context = self.admin_site.each_context(request) + context.update({ + "title": "Import Preview", + "total_count": total_count, + "create_count": "N/A", + "update_count": "N/A", + "preview_data": preview_data, + "mapping": mapping, + "file_path": file_path, + "tenant_id": tenant_id, + "action_url": request.path, + "opts": self.model._meta, + }) + return render(request, "admin/import_preview.html", context) + except Exception as e: + self.message_user(request, f"Error processing preview: {e}", level=messages.ERROR) + return redirect("..") + + elif "_import" in request.POST: + file_path = request.POST.get('file_path') + tenant_id = request.POST.get('tenant') + tenant = Tenant.objects.get(id=tenant_id) + mapping = {k: request.POST.get(f"map_{k}") for k, _ in VOTER_LIKELIHOOD_MAPPABLE_FIELDS if request.POST.get(f"map_{k}")} + + try: + count = 0 + created_count = 0 + updated_count = 0 + skipped_no_change = 0 + skipped_no_id = 0 + errors = 0 + failed_rows = [] + batch_size = 500 + + likelihood_choices = dict(VoterLikelihood.LIKELIHOOD_CHOICES) + likelihood_reverse = {v.lower(): k for k, v in likelihood_choices.items()} + + # Pre-fetch election types for this tenant + election_types = {et.name: et for et in ElectionType.objects.filter(tenant=tenant)} + + def chunk_reader(reader, size): + chunk = [] + for row in reader: + chunk.append(row) + if len(chunk) == size: + yield chunk + chunk = [] + if chunk: + yield chunk + + with open(file_path, "r", encoding="utf-8-sig") as f: + reader = csv.DictReader(f) + v_id_col = mapping.get("voter_id") + et_col = mapping.get("election_type") + l_col = mapping.get("likelihood") + + if not v_id_col or not et_col or not l_col: + raise ValueError("Missing mapping for Voter ID, Election Type, or Likelihood") + + print(f"DEBUG: Starting likelihood import. Tenant: {tenant.name}") + + total_processed = 0 + for chunk in chunk_reader(reader, batch_size): + with transaction.atomic(): + voter_ids = [str(row.get(v_id_col)).strip() for row in chunk if row.get(v_id_col)] + et_names = [str(row.get(et_col)).strip() for row in chunk if row.get(et_col)] + + # Fetch existing voters + voters = {v.voter_id: v for v in Voter.objects.filter(tenant=tenant, voter_id__in=voter_ids).only("id", "voter_id")} + + # Fetch existing likelihoods + existing_likelihoods = { + (vl.voter.voter_id, vl.election_type.name): vl + for vl in VoterLikelihood.objects.filter( + voter__tenant=tenant, + voter__voter_id__in=voter_ids, + election_type__name__in=et_names + ).select_related("voter", "election_type") + } + + to_create = [] + to_update = [] + processed_in_batch = set() + + for row in chunk: + total_processed += 1 + try: + raw_v_id = row.get(v_id_col) + raw_et_name = row.get(et_col) + raw_l_val = row.get(l_col) + + if raw_v_id is None or raw_et_name is None or raw_l_val is None: + skipped_no_id += 1 + continue + + v_id = str(raw_v_id).strip() + et_name = str(raw_et_name).strip() + l_val = str(raw_l_val).strip() + + if not v_id or not et_name or not l_val: + skipped_no_id += 1 + continue + + if (v_id, et_name) in processed_in_batch: + continue + processed_in_batch.add((v_id, et_name)) + + voter = voters.get(v_id) + if not voter: + print(f"DEBUG: Voter {v_id} not found for likelihood import") + row["Import Error"] = f"Voter {v_id} not found" + failed_rows.append(row) + errors += 1 + continue + + # Get or create election type + if et_name not in election_types: + election_type, _ = ElectionType.objects.get_or_create(tenant=tenant, name=et_name) + election_types[et_name] = election_type + election_type = election_types[et_name] + + # Normalize likelihood + normalized_l = None + l_val_lower = l_val.lower().replace(' ', '_') + if l_val_lower in likelihood_choices: + normalized_l = l_val_lower + elif l_val_lower in likelihood_reverse: + normalized_l = likelihood_reverse[l_val_lower] + else: + # Try to find by display name more broadly + for k, v in likelihood_choices.items(): + if v.lower() == l_val.lower(): + normalized_l = k + break + + if not normalized_l: + row["Import Error"] = f"Invalid likelihood value: {l_val}" + failed_rows.append(row) + errors += 1 + continue + + vl = existing_likelihoods.get((v_id, et_name)) + created = False + if not vl: + vl = VoterLikelihood(voter=voter, election_type=election_type, likelihood=normalized_l) + created = True + + if not created and vl.likelihood == normalized_l: + skipped_no_change += 1 + continue + + vl.likelihood = normalized_l + + if created: + to_create.append(vl) + created_count += 1 + else: + to_update.append(vl) + updated_count += 1 + + count += 1 + except Exception as e: + print(f"DEBUG: Error importing row {total_processed}: {e}") + row["Import Error"] = str(e) + failed_rows.append(row) + errors += 1 + + if to_create: + VoterLikelihood.objects.bulk_create(to_create) + if to_update: + VoterLikelihood.objects.bulk_update(to_update, ["likelihood"], batch_size=250) + + print(f"DEBUG: Likelihood import progress: {total_processed} processed. {count} created/updated. {skipped_no_change} skipped (no change). {skipped_no_id} skipped (no ID). {errors} errors.") + + if os.path.exists(file_path): + os.remove(file_path) + + success_msg = f"Import complete: {count} likelihoods created/updated. ({created_count} new, {updated_count} updated, {skipped_no_change} skipped with no changes, {skipped_no_id} skipped missing data, {errors} errors)" + self.message_user(success_msg) + + request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows + request.session.modified = True + if errors > 0: + error_url = reverse("admin:voterlikelihood-download-errors") + self.message_user(request, mark_safe(f"Failed to import {errors} rows. Download failed records"), level=messages.WARNING) + return redirect("..") + except Exception as e: + print(f"DEBUG: Likelihood import failed: {e}") + self.message_user(request, f"Error processing file: {e}", level=messages.ERROR) + return redirect("..") + else: + form = VoterLikelihoodImportForm(request.POST, request.FILES) + if form.is_valid(): + csv_file = request.FILES['file'] + tenant = form.cleaned_data['tenant'] + + if not csv_file.name.endswith('.csv'): + self.message_user(request, "Please upload a CSV file.", level=messages.ERROR) + return redirect("..") + + with tempfile.NamedTemporaryFile(delete=False, suffix='.csv') as tmp: + for chunk in csv_file.chunks(): + tmp.write(chunk) + file_path = tmp.name + + with open(file_path, 'r', encoding='utf-8-sig') as f: + reader = csv.reader(f) + headers = next(reader) + + context = self.admin_site.each_context(request) + context.update({ + 'title': "Map Likelihood Fields", + 'headers': headers, + 'model_fields': VOTER_LIKELIHOOD_MAPPABLE_FIELDS, + 'tenant_id': tenant.id, + 'file_path': file_path, + 'action_url': request.path, + 'opts': self.model._meta, + }) + return render(request, "admin/import_mapping.html", context) + else: + form = VoterLikelihoodImportForm() + + context = self.admin_site.each_context(request) + context['form'] = form + context['title'] = "Import Likelihoods" + context['opts'] = self.model._meta + return render(request, "admin/import_csv.html", context) + +@admin.register(CampaignSettings) +class CampaignSettingsAdmin(admin.ModelAdmin): + list_display = ('tenant', 'donation_goal', 'twilio_from_number') + list_filter = ('tenant',) + fields = ('tenant', 'donation_goal', 'twilio_account_sid', 'twilio_auth_token', 'twilio_from_number') \ No newline at end of file diff --git a/core/forms.py b/core/forms.py index 9072933..107a47f 100644 --- a/core/forms.py +++ b/core/forms.py @@ -296,3 +296,12 @@ class VolunteerEventAddForm(forms.ModelForm): for field in self.fields.values(): field.widget.attrs.update({'class': 'form-control'}) self.fields['volunteer'].widget.attrs.update({'class': 'form-select'}) + +class VotingRecordImportForm(forms.Form): + tenant = forms.ModelChoiceField(queryset=Tenant.objects.all(), label="Campaign") + file = forms.FileField(label="Select CSV file") + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.fields['tenant'].widget.attrs.update({'class': 'form-control form-select'}) + self.fields['file'].widget.attrs.update({'class': 'form-control'}) \ No newline at end of file diff --git a/core/middleware.py b/core/middleware.py new file mode 100644 index 0000000..65898ba --- /dev/null +++ b/core/middleware.py @@ -0,0 +1,35 @@ +from django.shortcuts import redirect +from django.urls import reverse +from django.conf import settings + +class LoginRequiredMiddleware: + def __init__(self, get_response): + self.get_response = get_response + + def __call__(self, request): + if not request.user.is_authenticated: + path = request.path_info + + # Allow access to login, logout, admin, and any other exempted paths + # We use try/except in case URLs are not defined yet + try: + login_url = reverse('login') + logout_url = reverse('logout') + except: + login_url = '/accounts/login/' + logout_url = '/accounts/logout/' + + exempt_urls = [ + login_url, + logout_url, + '/admin/', + ] + + # Check if path starts with any of the exempt URLs + is_exempt = any(path.startswith(url) for url in exempt_urls) + + if not is_exempt: + return redirect(f"{login_url}?next={path}") + + response = self.get_response(request) + return response \ No newline at end of file diff --git a/core/templates/admin/votingrecord_change_list.html b/core/templates/admin/votingrecord_change_list.html new file mode 100644 index 0000000..57151b0 --- /dev/null +++ b/core/templates/admin/votingrecord_change_list.html @@ -0,0 +1,38 @@ +{% extends "admin/change_list.html" %} +{% load i18n admin_urls static admin_list %} + +{% block object-tools-items %} +
  • + Import Voting Records +
  • + {{ block.super }} +{% endblock %} + +{% block search %} + {{ block.super }} +
    + + +
    + + +{% endblock %} diff --git a/core/templates/base.html b/core/templates/base.html index b90e601..73437e3 100644 --- a/core/templates/base.html +++ b/core/templates/base.html @@ -50,7 +50,13 @@
    Admin Panel {% if user.is_authenticated %} - {{ user.username }} + {{ user.username }} +
    + {% csrf_token %} + +
    + {% else %} + Login {% endif %}
    @@ -111,4 +117,4 @@ }); - + \ No newline at end of file diff --git a/core/templates/core/volunteer_list.html b/core/templates/core/volunteer_list.html index f37eb25..0f8947c 100644 --- a/core/templates/core/volunteer_list.html +++ b/core/templates/core/volunteer_list.html @@ -51,8 +51,7 @@ Name Email Phone - Interests - Actions + Interests @@ -68,20 +67,17 @@ {{ volunteer.email }} {{ volunteer.phone|default:"-" }} - + {% for interest in volunteer.interests.all %} {{ interest.name }} {% empty %} No interests listed {% endfor %} - - View & Edit - {% empty %} - +

    No volunteers found matching your search.

    Add the first volunteer @@ -208,4 +204,4 @@ document.addEventListener('DOMContentLoaded', function() { } }); -{% endblock %} +{% endblock %} \ No newline at end of file diff --git a/core/templates/registration/login.html b/core/templates/registration/login.html new file mode 100644 index 0000000..3e62255 --- /dev/null +++ b/core/templates/registration/login.html @@ -0,0 +1,52 @@ +{% extends "base.html" %} +{% load static %} + +{% block content %} +
    +
    +
    +
    +
    +

    Login

    +

    Please log in to access the Grassroots Campaign Manager.

    + + {% if form.errors %} +
    + Your username and password didn't match. Please try again. +
    + {% endif %} + + {% if next %} + {% if user.is_authenticated %} +
    + Your account doesn't have access to this page. To proceed, + please log in with an account that has access. +
    + {% else %} +
    + Please log in to see this page. +
    + {% endif %} + {% endif %} + +
    + {% csrf_token %} +
    + + +
    +
    + + +
    +
    + +
    + +
    +
    +
    +
    +
    +
    +{% endblock %}