2040 lines
105 KiB
Python
2040 lines
105 KiB
Python
from decimal import Decimal
|
|
from datetime import datetime, date
|
|
from django.db import transaction
|
|
from django.http import HttpResponse
|
|
from django.utils.safestring import mark_safe
|
|
import csv
|
|
import io
|
|
import logging
|
|
import tempfile
|
|
import os
|
|
from django.contrib import admin, messages
|
|
from django.urls import path, reverse
|
|
from django.shortcuts import render, redirect
|
|
from django.template.response import TemplateResponse
|
|
from .models import (
|
|
format_phone_number,
|
|
Tenant, TenantUserRole, InteractionType, DonationMethod, ElectionType, EventType, Voter,
|
|
VotingRecord, Event, EventParticipation, Donation, Interaction, VoterLikelihood, CampaignSettings,
|
|
Interest, Volunteer, VolunteerEvent, ParticipationStatus, VolunteerRole
|
|
)
|
|
from .forms import (
|
|
VoterImportForm, EventImportForm, EventParticipationImportForm,
|
|
DonationImportForm, InteractionImportForm, VoterLikelihoodImportForm,
|
|
VolunteerImportForm, VotingRecordImportForm
|
|
)
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
VOTER_MAPPABLE_FIELDS = [
|
|
('voter_id', 'Voter ID'),
|
|
('first_name', 'First Name'),
|
|
('last_name', 'Last Name'),
|
|
('nickname', 'Nickname'),
|
|
('birthdate', 'Birthdate'),
|
|
('address_street', 'Street Address'),
|
|
('city', 'City'),
|
|
('state', 'State'),
|
|
('prior_state', 'Prior State'),
|
|
('zip_code', 'Zip Code'),
|
|
('county', 'County'),
|
|
('neighborhood', 'Neighborhood'),
|
|
('phone', 'Phone'),
|
|
('notes', 'Notes'),
|
|
('phone_type', 'Phone Type'),
|
|
('email', 'Email'),
|
|
('district', 'District'),
|
|
('precinct', 'Precinct'),
|
|
('registration_date', 'Registration Date'),
|
|
('is_targeted', 'Is Targeted'),
|
|
('candidate_support', 'Candidate Support'),
|
|
('yard_sign', 'Yard Sign'),
|
|
('window_sticker', 'Window Sticker'),
|
|
('latitude', 'Latitude'),
|
|
('longitude', 'Longitude'),
|
|
('secondary_phone', 'Secondary Phone'),
|
|
('secondary_phone_type', 'Secondary Phone Type'),
|
|
('door_visit', 'Door Visit'),
|
|
]
|
|
|
|
EVENT_MAPPABLE_FIELDS = [
|
|
('name', 'Name'),
|
|
('date', 'Date'),
|
|
('start_time', 'Start Time'),
|
|
('end_time', 'End Time'),
|
|
('event_type', 'Event Type (Name)'),
|
|
('description', 'Description'),
|
|
('location_name', 'Location Name'),
|
|
('address', 'Address'),
|
|
('city', 'City'),
|
|
('state', 'State'),
|
|
('zip_code', 'Zip Code'),
|
|
('latitude', 'Latitude'),
|
|
('longitude', 'Longitude'),
|
|
]
|
|
|
|
EVENT_PARTICIPATION_MAPPABLE_FIELDS = [
|
|
('voter_id', 'Voter ID'),
|
|
('event_name', 'Event Name'),
|
|
('participation_status', 'Participation Status'),
|
|
]
|
|
|
|
DONATION_MAPPABLE_FIELDS = [
|
|
('voter_id', 'Voter ID'),
|
|
('date', 'Date'),
|
|
('amount', 'Amount'),
|
|
('method', 'Donation Method (Name)'),
|
|
]
|
|
|
|
INTERACTION_MAPPABLE_FIELDS = [
|
|
('voter_id', 'Voter ID'),
|
|
('volunteer_email', 'Volunteer Email'),
|
|
('date', 'Date'),
|
|
('type', 'Interaction Type (Name)'),
|
|
('description', 'Description'),
|
|
('notes', 'Notes'),
|
|
]
|
|
|
|
|
|
VOLUNTEER_MAPPABLE_FIELDS = [
|
|
('first_name', 'First Name'),
|
|
('last_name', 'Last Name'),
|
|
('email', 'Email'),
|
|
('phone', 'Phone'),
|
|
('notes', 'Notes'),
|
|
]
|
|
|
|
VOTER_LIKELIHOOD_MAPPABLE_FIELDS = [
|
|
('voter_id', 'Voter ID'),
|
|
('election_type', 'Election Type (Name)'),
|
|
('likelihood', 'Likelihood'),
|
|
]
|
|
|
|
VOTING_RECORD_MAPPABLE_FIELDS = [
|
|
('voter_id', 'Voter ID'),
|
|
('election_date', 'Election Date'),
|
|
('election_description', 'Election Description'),
|
|
('primary_party', 'Primary Party'),
|
|
]
|
|
|
|
class BaseImportAdminMixin:
|
|
def download_errors(self, request):
|
|
logger.info(f"download_errors called for {self.model._meta.model_name}")
|
|
session_key = f"{self.model._meta.model_name}_import_errors"
|
|
failed_rows = request.session.get(session_key, [])
|
|
if not failed_rows:
|
|
self.message_user(request, "No error log found in session.", level=messages.WARNING)
|
|
return redirect("..")
|
|
|
|
response = HttpResponse(content_type="text/csv")
|
|
response["Content-Disposition"] = f"attachment; filename={self.model._meta.model_name}_import_errors.csv"
|
|
|
|
if failed_rows:
|
|
all_keys = set()
|
|
for r in failed_rows:
|
|
all_keys.update(r.keys())
|
|
|
|
writer = csv.DictWriter(response, fieldnames=sorted(list(all_keys)))
|
|
writer.writeheader()
|
|
writer.writerows(failed_rows)
|
|
|
|
return response
|
|
|
|
def chunk_reader(self, reader, size):
|
|
chunk = []
|
|
for row in reader:
|
|
chunk.append(row)
|
|
if len(chunk) == size:
|
|
yield chunk
|
|
chunk = []
|
|
if chunk:
|
|
yield chunk
|
|
class TenantUserRoleInline(admin.TabularInline):
|
|
model = TenantUserRole
|
|
extra = 1
|
|
|
|
class CampaignSettingsInline(admin.StackedInline):
|
|
model = CampaignSettings
|
|
can_delete = False
|
|
|
|
@admin.register(Tenant)
|
|
class TenantAdmin(admin.ModelAdmin):
|
|
list_display = ('name', 'created_at')
|
|
search_fields = ('name',)
|
|
inlines = [TenantUserRoleInline, CampaignSettingsInline]
|
|
|
|
@admin.register(TenantUserRole)
|
|
class TenantUserRoleAdmin(admin.ModelAdmin):
|
|
list_display = ('user', 'tenant', 'role')
|
|
list_filter = ('tenant', 'role')
|
|
search_fields = ('user__username', 'tenant__name')
|
|
|
|
@admin.register(InteractionType)
|
|
class InteractionTypeAdmin(admin.ModelAdmin):
|
|
list_display = ('name', 'tenant', 'is_active')
|
|
list_filter = ('tenant', 'is_active')
|
|
search_fields = ('name',)
|
|
|
|
@admin.register(DonationMethod)
|
|
class DonationMethodAdmin(admin.ModelAdmin):
|
|
list_display = ('name', 'tenant', 'is_active')
|
|
list_filter = ('tenant', 'is_active')
|
|
search_fields = ('name',)
|
|
|
|
@admin.register(VolunteerRole)
|
|
class VolunteerRoleAdmin(admin.ModelAdmin):
|
|
list_display = ("name", "tenant", "is_active")
|
|
list_filter = ("tenant", "is_active")
|
|
search_fields = ("name",)
|
|
|
|
@admin.register(ElectionType)
|
|
class ElectionTypeAdmin(admin.ModelAdmin):
|
|
list_display = ('name', 'tenant', 'is_active')
|
|
list_filter = ('tenant', 'is_active')
|
|
search_fields = ('name',)
|
|
|
|
@admin.register(EventType)
|
|
class EventTypeAdmin(admin.ModelAdmin):
|
|
list_display = ('name', 'tenant', 'is_active', 'default_volunteer_role')
|
|
list_filter = ('tenant', 'is_active')
|
|
search_fields = ('name',)
|
|
filter_horizontal = ('available_roles',)
|
|
|
|
|
|
@admin.register(ParticipationStatus)
|
|
class ParticipationStatusAdmin(admin.ModelAdmin):
|
|
list_display = ('name', 'tenant', 'is_active')
|
|
list_filter = ('tenant', 'is_active')
|
|
search_fields = ('name',)
|
|
change_list_template = 'admin/participationstatus_change_list.html'
|
|
|
|
def changelist_view(self, request, extra_context=None):
|
|
extra_context = extra_context or {}
|
|
from core.models import Tenant
|
|
extra_context['tenants'] = Tenant.objects.all()
|
|
return super().changelist_view(request, extra_context=extra_context)
|
|
|
|
@admin.register(Interest)
|
|
class InterestAdmin(admin.ModelAdmin):
|
|
list_display = ('name', 'tenant')
|
|
list_filter = ('tenant',)
|
|
fields = ('tenant', 'name')
|
|
search_fields = ('name',)
|
|
|
|
class VotingRecordInline(admin.TabularInline):
|
|
model = VotingRecord
|
|
extra = 1
|
|
|
|
class DonationInline(admin.TabularInline):
|
|
model = Donation
|
|
extra = 1
|
|
|
|
class InteractionInline(admin.TabularInline):
|
|
model = Interaction
|
|
extra = 1
|
|
autocomplete_fields = ['voter', 'type', 'volunteer']
|
|
|
|
class VoterLikelihoodInline(admin.TabularInline):
|
|
model = VoterLikelihood
|
|
extra = 1
|
|
|
|
class VolunteerEventInline(admin.TabularInline):
|
|
model = VolunteerEvent
|
|
extra = 1
|
|
|
|
@admin.register(Voter)
|
|
class VoterAdmin(BaseImportAdminMixin, admin.ModelAdmin):
|
|
list_display = ('first_name', 'last_name', 'nickname', 'voter_id', 'tenant', 'district', 'candidate_support', 'is_targeted', 'city', 'state', 'prior_state')
|
|
list_filter = ('tenant', 'candidate_support', 'is_targeted', 'phone_type', 'yard_sign', 'district', 'city', 'state', 'prior_state')
|
|
search_fields = ('first_name', 'last_name', 'nickname', 'voter_id', 'address', 'city', 'state', 'prior_state', 'zip_code', 'county')
|
|
inlines = [VotingRecordInline, DonationInline, InteractionInline, VoterLikelihoodInline]
|
|
readonly_fields = ('address',)
|
|
change_list_template = "admin/voter_change_list.html"
|
|
|
|
def changelist_view(self, request, extra_context=None):
|
|
extra_context = extra_context or {}
|
|
from core.models import Tenant
|
|
extra_context["tenants"] = Tenant.objects.all()
|
|
return super().changelist_view(request, extra_context=extra_context)
|
|
|
|
def get_urls(self):
|
|
urls = super().get_urls()
|
|
my_urls = [
|
|
path('download-errors/', self.admin_site.admin_view(self.download_errors), name='voter-download-errors'),
|
|
path('import-voters/', self.admin_site.admin_view(self.import_voters), name='import-voters'),
|
|
]
|
|
return my_urls + urls
|
|
|
|
|
|
def import_voters(self, request):
|
|
if request.method == "POST":
|
|
if "_preview" in request.POST:
|
|
file_path = request.POST.get("file_path")
|
|
tenant_id = request.POST.get("tenant")
|
|
tenant = Tenant.objects.get(id=tenant_id)
|
|
|
|
mapping = {}
|
|
for field_name, _ in VOTER_MAPPABLE_FIELDS:
|
|
mapping[field_name] = request.POST.get(f"map_{field_name}")
|
|
|
|
try:
|
|
with open(file_path, "r", encoding="utf-8-sig") as f:
|
|
# Optimization: Skip full count for very large files in preview if needed,
|
|
# but here we'll keep it for accuracy unless it's a known bottleneck.
|
|
# For now, let's just do a fast line count.
|
|
total_count = sum(1 for line in f) - 1
|
|
f.seek(0)
|
|
reader = csv.DictReader(f)
|
|
preview_rows = []
|
|
voter_ids_for_preview = []
|
|
for i, row in enumerate(reader):
|
|
if i < 10:
|
|
preview_rows.append(row)
|
|
v_id = row.get(mapping.get("voter_id"))
|
|
if v_id:
|
|
voter_ids_for_preview.append(v_id)
|
|
else:
|
|
break
|
|
|
|
existing_preview_ids = set(Voter.objects.filter(tenant=tenant, voter_id__in=voter_ids_for_preview).values_list("voter_id", flat=True))
|
|
|
|
preview_data = []
|
|
for row in preview_rows:
|
|
v_id = row.get(mapping.get("voter_id"))
|
|
action = "update" if v_id in existing_preview_ids else "create"
|
|
preview_data.append({
|
|
"action": action,
|
|
"identifier": v_id,
|
|
"details": f"{row.get(mapping.get('first_name', '')) or ''} {row.get(mapping.get('last_name', '')) or ''}".strip()
|
|
})
|
|
|
|
context = self.admin_site.each_context(request)
|
|
context.update({
|
|
"title": "Import Preview",
|
|
"total_count": total_count,
|
|
"create_count": "N/A",
|
|
"update_count": "N/A",
|
|
"preview_data": preview_data,
|
|
"mapping": mapping,
|
|
"file_path": file_path,
|
|
"tenant_id": tenant_id,
|
|
"action_url": request.path,
|
|
"opts": self.model._meta,
|
|
})
|
|
return render(request, "admin/import_preview.html", context)
|
|
except Exception as e:
|
|
self.message_user(request, f"Error processing preview: {e}", level=messages.ERROR)
|
|
return redirect("..")
|
|
|
|
elif "_import" in request.POST:
|
|
file_path = request.POST.get("file_path")
|
|
tenant_id = request.POST.get("tenant")
|
|
tenant = Tenant.objects.get(id=tenant_id)
|
|
|
|
mapping = {k: request.POST.get(f"map_{k}") for k, _ in VOTER_MAPPABLE_FIELDS if request.POST.get(f"map_{k}")}
|
|
|
|
try:
|
|
count = 0
|
|
created_count = 0
|
|
updated_count = 0
|
|
skipped_no_change = 0
|
|
skipped_no_id = 0
|
|
errors = 0
|
|
failed_rows = []
|
|
batch_size = 2000 # Increased batch size
|
|
|
|
# Pre-calculate choices and reverse mappings
|
|
support_choices = dict(Voter.SUPPORT_CHOICES)
|
|
support_reverse = {v.lower(): k for k, v in support_choices.items()}
|
|
yard_sign_choices = dict(Voter.YARD_SIGN_CHOICES)
|
|
yard_sign_reverse = {v.lower(): k for k, v in yard_sign_choices.items()}
|
|
window_sticker_choices = dict(Voter.WINDOW_STICKER_CHOICES)
|
|
window_sticker_reverse = {v.lower(): k for k, v in window_sticker_choices.items()}
|
|
phone_type_choices = dict(Voter.PHONE_TYPE_CHOICES)
|
|
phone_type_reverse = {v.lower(): k for k, v in phone_type_choices.items()}
|
|
|
|
# Identify what type of data is being imported to skip unnecessary logic
|
|
mapped_fields = set(mapping.keys())
|
|
is_address_related = any(f in mapped_fields for f in ["address_street", "city", "state", "zip_code"])
|
|
is_phone_related = any(f in mapped_fields for f in ["phone", "secondary_phone", "phone_type", "secondary_phone_type"])
|
|
is_coords_related = any(f in mapped_fields for f in ["latitude", "longitude"])
|
|
|
|
with open(file_path, "r", encoding="utf-8-sig") as f:
|
|
# Optimization: Use csv.reader instead of DictReader for performance
|
|
raw_reader = csv.reader(f)
|
|
headers = next(raw_reader)
|
|
header_to_idx = {h: i for i, h in enumerate(headers)}
|
|
|
|
v_id_col_name = mapping.get("voter_id")
|
|
if not v_id_col_name or v_id_col_name not in header_to_idx:
|
|
raise ValueError(f"Voter ID mapping '{v_id_col_name}' is missing or invalid")
|
|
|
|
v_id_idx = header_to_idx[v_id_col_name]
|
|
|
|
# Map internal field names to CSV column indices
|
|
mapping_indices = {k: header_to_idx[v] for k, v in mapping.items() if v in header_to_idx}
|
|
|
|
# Optimization: Only fetch needed fields
|
|
fields_to_fetch = {"id", "voter_id"} | mapped_fields
|
|
if is_address_related: fields_to_fetch.add("address")
|
|
|
|
print(f"DEBUG: Starting optimized voter import. Tenant: {tenant.name}. Fields: {mapped_fields}")
|
|
|
|
total_processed = 0
|
|
# Use chunk_reader with the raw_reader
|
|
for chunk in self.chunk_reader(raw_reader, batch_size):
|
|
with transaction.atomic():
|
|
voter_ids = []
|
|
chunk_data = []
|
|
for row in chunk:
|
|
if len(row) <= v_id_idx: continue
|
|
v_id = row[v_id_idx].strip()
|
|
if v_id:
|
|
voter_ids.append(v_id)
|
|
chunk_data.append((v_id, row))
|
|
else:
|
|
skipped_no_id += 1
|
|
|
|
# Fetch existing voters in one query
|
|
existing_voters = {
|
|
v.voter_id: v for v in Voter.objects.filter(tenant=tenant, voter_id__in=voter_ids)
|
|
.only(*fields_to_fetch)
|
|
}
|
|
|
|
to_create = []
|
|
to_update = []
|
|
batch_updated_fields = set()
|
|
processed_in_batch = set()
|
|
|
|
for voter_id, row in chunk_data:
|
|
total_processed += 1
|
|
try:
|
|
if voter_id in processed_in_batch: continue
|
|
processed_in_batch.add(voter_id)
|
|
|
|
voter = existing_voters.get(voter_id)
|
|
created = False
|
|
if not voter:
|
|
voter = Voter(tenant=tenant, voter_id=voter_id)
|
|
created = True
|
|
|
|
changed = created
|
|
record_updated_fields = set()
|
|
|
|
# Process mapped fields
|
|
for field_name, idx in mapping_indices.items():
|
|
if field_name == "voter_id": continue
|
|
if idx >= len(row): continue
|
|
val = row[idx].strip()
|
|
if val == "" and not created: continue # Skip empty updates for existing records unless specifically desired?
|
|
|
|
# Type conversion and normalization
|
|
if field_name in ["is_targeted", "door_visit"]:
|
|
val = val.lower() in ["true", "1", "yes"]
|
|
elif field_name in ["birthdate", "registration_date"]:
|
|
parsed_date = None
|
|
for fmt in ["%Y-%m-%d", "%m/%d/%Y", "%d/%m/%Y", "%Y/%m/%d"]:
|
|
try:
|
|
parsed_date = datetime.strptime(val, fmt).date()
|
|
break
|
|
except: continue
|
|
if parsed_date: val = parsed_date
|
|
else: continue
|
|
elif field_name == "candidate_support":
|
|
val_lower = val.lower()
|
|
if val_lower in support_choices: val = val_lower
|
|
elif val_lower in support_reverse: val = support_reverse[val_lower]
|
|
else: val = "unknown"
|
|
elif field_name == "yard_sign":
|
|
val_lower = val.lower()
|
|
if val_lower in yard_sign_choices: val = val_lower
|
|
elif val_lower in yard_sign_reverse: val = yard_sign_reverse[val_lower]
|
|
else: val = "none"
|
|
elif field_name == "window_sticker":
|
|
val_lower = val.lower()
|
|
if val_lower in window_sticker_choices: val = val_lower
|
|
elif val_lower in window_sticker_reverse: val = window_sticker_reverse[val_lower]
|
|
else: val = "none"
|
|
elif field_name in ["phone_type", "secondary_phone_type"]:
|
|
val_lower = val.lower()
|
|
if val_lower in phone_type_choices: val = val_lower
|
|
elif val_lower in phone_type_reverse: val = phone_type_reverse[val_lower]
|
|
else: val = "cell"
|
|
|
|
if getattr(voter, field_name) != val:
|
|
setattr(voter, field_name, val)
|
|
changed = True
|
|
record_updated_fields.add(field_name)
|
|
|
|
# Optimization: Only perform transformations if related fields are mapped
|
|
if is_phone_related or created:
|
|
old_p = voter.phone
|
|
voter.phone = format_phone_number(voter.phone)
|
|
if voter.phone != old_p:
|
|
changed = True
|
|
record_updated_fields.add("phone")
|
|
|
|
old_sp = voter.secondary_phone
|
|
voter.secondary_phone = format_phone_number(voter.secondary_phone)
|
|
if voter.secondary_phone != old_sp:
|
|
changed = True
|
|
record_updated_fields.add("secondary_phone")
|
|
|
|
if (is_coords_related or created) and voter.longitude:
|
|
try:
|
|
new_lon = Decimal(str(voter.longitude)[:12])
|
|
if voter.longitude != new_lon:
|
|
voter.longitude = new_lon
|
|
changed = True
|
|
record_updated_fields.add("longitude")
|
|
except: pass
|
|
|
|
if is_address_related or created:
|
|
old_addr = voter.address
|
|
parts = [voter.address_street, voter.city, voter.state, voter.zip_code]
|
|
voter.address = ", ".join([p for p in parts if p])
|
|
if voter.address != old_addr:
|
|
changed = True
|
|
record_updated_fields.add("address")
|
|
|
|
if not changed:
|
|
skipped_no_change += 1
|
|
continue
|
|
|
|
if created:
|
|
to_create.append(voter)
|
|
created_count += 1
|
|
else:
|
|
to_update.append(voter)
|
|
batch_updated_fields.update(record_updated_fields)
|
|
updated_count += 1
|
|
|
|
count += 1
|
|
except Exception as e:
|
|
errors += 1
|
|
if len(failed_rows) < 1000:
|
|
row_dict = dict(zip(headers, row))
|
|
row_dict["Import Error"] = str(e)
|
|
failed_rows.append(row_dict)
|
|
|
|
if to_create:
|
|
Voter.objects.bulk_create(to_create, batch_size=batch_size)
|
|
if to_update:
|
|
Voter.objects.bulk_update(to_update, list(batch_updated_fields), batch_size=batch_size)
|
|
|
|
print(f"DEBUG: Voter import progress: {total_processed} processed. {count} created/updated. Errors: {errors}")
|
|
|
|
if os.path.exists(file_path):
|
|
os.remove(file_path)
|
|
|
|
self.message_user(request, f"Import complete: {count} voters created/updated. ({created_count} new, {updated_count} updated, {skipped_no_change} skipped with no changes, {skipped_no_id} skipped missing ID, {errors} errors)")
|
|
|
|
request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows
|
|
request.session.modified = True
|
|
if errors > 0:
|
|
error_url = reverse("admin:voter-download-errors")
|
|
self.message_user(request, mark_safe(f"Failed to import {errors} rows. <a href='{error_url}' download>Download failed records</a>"), level=messages.WARNING)
|
|
return redirect("..")
|
|
except Exception as e:
|
|
print(f"DEBUG: Voter import failed: {e}")
|
|
self.message_user(request, f"Error processing file: {e}", level=messages.ERROR)
|
|
return redirect("..")
|
|
else:
|
|
form = VoterImportForm(request.POST, request.FILES)
|
|
if form.is_valid():
|
|
csv_file = request.FILES["file"]
|
|
tenant = form.cleaned_data["tenant"]
|
|
if not csv_file.name.endswith(".csv"):
|
|
self.message_user(request, "Please upload a CSV file.", level=messages.ERROR)
|
|
return redirect("..")
|
|
|
|
with tempfile.NamedTemporaryFile(delete=False, suffix=".csv") as tmp:
|
|
for chunk in csv_file.chunks(): tmp.write(chunk)
|
|
file_path = tmp.name
|
|
|
|
with open(file_path, "r", encoding="utf-8-sig") as f:
|
|
reader = csv.reader(f)
|
|
headers = next(reader)
|
|
|
|
context = self.admin_site.each_context(request)
|
|
context.update({
|
|
"title": "Map Voter Fields",
|
|
"headers": headers,
|
|
"model_fields": VOTER_MAPPABLE_FIELDS,
|
|
"tenant_id": tenant.id,
|
|
"file_path": file_path,
|
|
"action_url": request.path,
|
|
"opts": self.model._meta,
|
|
})
|
|
return render(request, "admin/import_mapping.html", context)
|
|
else:
|
|
form = VoterImportForm()
|
|
|
|
context = self.admin_site.each_context(request)
|
|
context["form"] = form
|
|
context["title"] = "Import Voters"
|
|
context["opts"] = self.model._meta
|
|
return render(request, "admin/import_csv.html", context)
|
|
@admin.register(Event)
|
|
class EventAdmin(BaseImportAdminMixin, admin.ModelAdmin):
|
|
list_display = ('id', 'name', 'event_type', 'date', 'location_name', 'city', 'state', 'tenant')
|
|
list_filter = ('tenant', 'date', 'event_type', 'city', 'state')
|
|
search_fields = ('name', 'description', 'location_name', 'address', 'city', 'state', 'zip_code')
|
|
change_list_template = "admin/event_change_list.html"
|
|
|
|
def changelist_view(self, request, extra_context=None):
|
|
extra_context = extra_context or {}
|
|
from core.models import Tenant
|
|
extra_context["tenants"] = Tenant.objects.all()
|
|
return super().changelist_view(request, extra_context=extra_context)
|
|
|
|
def get_urls(self):
|
|
urls = super().get_urls()
|
|
my_urls = [
|
|
path('download-errors/', self.admin_site.admin_view(self.download_errors), name='event-download-errors'),
|
|
path('import-events/', self.admin_site.admin_view(self.import_events), name='import-events'),
|
|
]
|
|
return my_urls + urls
|
|
|
|
def import_events(self, request):
|
|
if request.method == "POST":
|
|
if "_preview" in request.POST:
|
|
file_path = request.POST.get('file_path')
|
|
tenant_id = request.POST.get('tenant')
|
|
tenant = Tenant.objects.get(id=tenant_id)
|
|
mapping = {}
|
|
for field_name, _ in EVENT_MAPPABLE_FIELDS:
|
|
mapping[field_name] = request.POST.get(f'map_{field_name}')
|
|
try:
|
|
with open(file_path, 'r', encoding='UTF-8') as f:
|
|
reader = csv.DictReader(f)
|
|
total_count = 0
|
|
create_count = 0
|
|
update_count = 0
|
|
preview_data = []
|
|
for row in reader:
|
|
total_count += 1
|
|
date = row.get(mapping.get('date'))
|
|
event_type_name = row.get(mapping.get('event_type'))
|
|
event_name = row.get(mapping.get('name'))
|
|
exists = False
|
|
if date and event_type_name:
|
|
q = Event.objects.filter(tenant=tenant, date=date, event_type__name=event_type_name)
|
|
if event_name:
|
|
q = q.filter(name=event_name)
|
|
exists = q.exists()
|
|
|
|
if exists:
|
|
update_count += 1
|
|
action = 'update'
|
|
else:
|
|
create_count += 1
|
|
action = 'create'
|
|
|
|
if len(preview_data) < 10:
|
|
preview_data.append({
|
|
'action': action,
|
|
'identifier': f"{event_name or 'No Name'} ({date} - {event_type_name})",
|
|
'details': f"{row.get(mapping.get('city', '')) or ''}, {row.get(mapping.get('state', '')) or ''}"
|
|
})
|
|
context = self.admin_site.each_context(request)
|
|
context.update({
|
|
'title': "Import Preview",
|
|
'total_count': total_count,
|
|
'create_count': create_count,
|
|
'update_count': update_count,
|
|
'preview_data': preview_data,
|
|
'mapping': mapping,
|
|
'file_path': file_path,
|
|
'tenant_id': tenant_id,
|
|
'action_url': request.path,
|
|
'opts': self.model._meta,
|
|
})
|
|
return render(request, "admin/import_preview.html", context)
|
|
except Exception as e:
|
|
self.message_user(request, f"Error processing preview: {e}", level=messages.ERROR)
|
|
return redirect("..")
|
|
|
|
elif "_import" in request.POST:
|
|
file_path = request.POST.get('file_path')
|
|
tenant_id = request.POST.get('tenant')
|
|
tenant = Tenant.objects.get(id=tenant_id)
|
|
|
|
mapping = {}
|
|
for field_name, _ in EVENT_MAPPABLE_FIELDS:
|
|
mapping[field_name] = request.POST.get(f'map_{field_name}')
|
|
|
|
try:
|
|
with open(file_path, 'r', encoding='UTF-8') as f:
|
|
reader = csv.DictReader(f)
|
|
count = 0
|
|
errors = 0
|
|
failed_rows = []
|
|
for row in reader:
|
|
try:
|
|
date = row.get(mapping.get('date')) if mapping.get('date') else None
|
|
event_type_name = row.get(mapping.get('event_type')) if mapping.get('event_type') else None
|
|
description = row.get(mapping.get('description')) if mapping.get('description') else None
|
|
location_name = row.get(mapping.get('location_name')) if mapping.get('location_name') else None
|
|
name = row.get(mapping.get('name')) if mapping.get('name') else None
|
|
start_time = row.get(mapping.get('start_time')) if mapping.get('start_time') else None
|
|
end_time = row.get(mapping.get('end_time')) if mapping.get('end_time') else None
|
|
address = row.get(mapping.get('address')) if mapping.get('address') else None
|
|
city = row.get(mapping.get('city')) if mapping.get('city') else None
|
|
state = row.get(mapping.get('state')) if mapping.get('state') else None
|
|
zip_code = row.get(mapping.get('zip_code')) if mapping.get('zip_code') else None
|
|
latitude = row.get(mapping.get('latitude')) if mapping.get('latitude') else None
|
|
longitude = row.get(mapping.get('longitude')) if mapping.get('longitude') else None
|
|
|
|
if not date or not event_type_name:
|
|
row["Import Error"] = "Missing date or event type"
|
|
failed_rows.append(row)
|
|
errors += 1
|
|
continue
|
|
|
|
event_type, _ = EventType.objects.get_or_create(
|
|
tenant=tenant,
|
|
name=event_type_name
|
|
)
|
|
|
|
defaults = {}
|
|
if description and description.strip():
|
|
defaults['description'] = description
|
|
if location_name and location_name.strip():
|
|
defaults['location_name'] = location_name
|
|
if name and name.strip():
|
|
defaults['name'] = name
|
|
if start_time and start_time.strip():
|
|
defaults['start_time'] = start_time
|
|
if end_time and end_time.strip():
|
|
defaults['end_time'] = end_time
|
|
if address and address.strip():
|
|
defaults['address'] = address
|
|
if city and city.strip():
|
|
defaults['city'] = city
|
|
if state and state.strip():
|
|
defaults['state'] = state
|
|
if zip_code and zip_code.strip():
|
|
defaults['zip_code'] = zip_code
|
|
if latitude and latitude.strip():
|
|
defaults['latitude'] = latitude
|
|
if longitude and longitude.strip():
|
|
defaults['longitude'] = longitude
|
|
|
|
defaults['date'] = date
|
|
defaults['event_type'] = event_type
|
|
Event.objects.update_or_create(
|
|
tenant=tenant,
|
|
name=name or '',
|
|
defaults=defaults
|
|
)
|
|
count += 1
|
|
except Exception as e:
|
|
logger.error(f"Error importing: {e}")
|
|
row["Import Error"] = str(e)
|
|
failed_rows.append(row)
|
|
errors += 1
|
|
|
|
if os.path.exists(file_path):
|
|
os.remove(file_path)
|
|
self.message_user(request, f"Successfully imported {count} events.")
|
|
# Optimization: Limit error log size in session to avoid overflow
|
|
request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows[:1000]
|
|
request.session.modified = True
|
|
logger.info(f"Stored {len(failed_rows)} failed rows in session for {self.model._meta.model_name}")
|
|
if errors > 0:
|
|
error_url = reverse("admin:event-download-errors")
|
|
self.message_user(request, mark_safe(f"Failed to import {errors} rows. <a href='{error_url}' download>Download failed records</a>"), level=messages.WARNING)
|
|
return redirect("..")
|
|
except Exception as e:
|
|
self.message_user(request, f"Error processing file: {e}", level=messages.ERROR)
|
|
return redirect("..")
|
|
else:
|
|
form = EventImportForm(request.POST, request.FILES)
|
|
if form.is_valid():
|
|
csv_file = request.FILES['file']
|
|
tenant = form.cleaned_data['tenant']
|
|
|
|
if not csv_file.name.endswith('.csv'):
|
|
self.message_user(request, "Please upload a CSV file.", level=messages.ERROR)
|
|
return redirect("..")
|
|
|
|
with tempfile.NamedTemporaryFile(delete=False, suffix='.csv') as tmp:
|
|
for chunk in csv_file.chunks():
|
|
tmp.write(chunk)
|
|
file_path = tmp.name
|
|
|
|
with open(file_path, 'r', encoding='UTF-8') as f:
|
|
reader = csv.reader(f)
|
|
headers = next(reader)
|
|
|
|
context = self.admin_site.each_context(request)
|
|
context.update({
|
|
'title': "Map Event Fields",
|
|
'headers': headers,
|
|
'model_fields': EVENT_MAPPABLE_FIELDS,
|
|
'tenant_id': tenant.id,
|
|
'file_path': file_path,
|
|
'action_url': request.path,
|
|
'opts': self.model._meta,
|
|
})
|
|
return render(request, "admin/import_mapping.html", context)
|
|
else:
|
|
form = EventImportForm()
|
|
|
|
context = self.admin_site.each_context(request)
|
|
context['form'] = form
|
|
context['title'] = "Import Events"
|
|
context['opts'] = self.model._meta
|
|
return render(request, "admin/import_csv.html", context)
|
|
|
|
@admin.register(Volunteer)
|
|
class VolunteerAdmin(BaseImportAdminMixin, admin.ModelAdmin):
|
|
list_display = ('first_name', 'last_name', 'email', 'phone', 'tenant', 'user')
|
|
ordering = ("last_name", "first_name")
|
|
list_filter = ('tenant',)
|
|
fields = ('tenant', 'user', 'first_name', 'last_name', 'email', 'phone', 'notes', 'interests')
|
|
search_fields = ('first_name', 'last_name', 'email', 'phone')
|
|
inlines = [VolunteerEventInline]
|
|
filter_horizontal = ('interests',)
|
|
change_list_template = "admin/volunteer_change_list.html"
|
|
|
|
def changelist_view(self, request, extra_context=None):
|
|
extra_context = extra_context or {}
|
|
from core.models import Tenant
|
|
extra_context["tenants"] = Tenant.objects.all()
|
|
return super().changelist_view(request, extra_context=extra_context)
|
|
|
|
def get_urls(self):
|
|
urls = super().get_urls()
|
|
my_urls = [
|
|
path('download-errors/', self.admin_site.admin_view(self.download_errors), name='volunteer-download-errors'),
|
|
path('import-volunteers/', self.admin_site.admin_view(self.import_volunteers), name='import-volunteers'),
|
|
]
|
|
return my_urls + urls
|
|
|
|
def import_volunteers(self, request):
|
|
if request.method == "POST":
|
|
if "_preview" in request.POST:
|
|
file_path = request.POST.get('file_path')
|
|
tenant_id = request.POST.get('tenant')
|
|
tenant = Tenant.objects.get(id=tenant_id)
|
|
mapping = {}
|
|
for field_name, _ in VOLUNTEER_MAPPABLE_FIELDS:
|
|
mapping[field_name] = request.POST.get(f'map_{field_name}')
|
|
try:
|
|
with open(file_path, 'r', encoding='UTF-8') as f:
|
|
reader = csv.DictReader(f)
|
|
total_count = 0
|
|
create_count = 0
|
|
update_count = 0
|
|
preview_data = []
|
|
for row in reader:
|
|
total_count += 1
|
|
email = row.get(mapping.get('email'))
|
|
exists = Volunteer.objects.filter(tenant=tenant, email=email).exists()
|
|
if exists:
|
|
update_count += 1
|
|
action = 'update'
|
|
else:
|
|
create_count += 1
|
|
action = 'create'
|
|
if len(preview_data) < 10:
|
|
preview_data.append({
|
|
'action': action,
|
|
'identifier': email,
|
|
'details': f"{row.get(mapping.get('first_name', '')) or ''} {row.get(mapping.get('last_name', '')) or ''}".strip()
|
|
})
|
|
context = self.admin_site.each_context(request)
|
|
context.update({
|
|
'title': "Import Preview",
|
|
'total_count': total_count,
|
|
'create_count': create_count,
|
|
'update_count': update_count,
|
|
'preview_data': preview_data,
|
|
'mapping': mapping,
|
|
'file_path': file_path,
|
|
'tenant_id': tenant_id,
|
|
'action_url': request.path,
|
|
'opts': self.model._meta,
|
|
})
|
|
return render(request, "admin/import_preview.html", context)
|
|
except Exception as e:
|
|
self.message_user(request, f"Error processing preview: {e}", level=messages.ERROR)
|
|
return redirect("..")
|
|
|
|
elif "_import" in request.POST:
|
|
file_path = request.POST.get('file_path')
|
|
tenant_id = request.POST.get('tenant')
|
|
tenant = Tenant.objects.get(id=tenant_id)
|
|
mapping = {}
|
|
for field_name, _ in VOLUNTEER_MAPPABLE_FIELDS:
|
|
mapping[field_name] = request.POST.get(f'map_{field_name}')
|
|
try:
|
|
with open(file_path, 'r', encoding='UTF-8') as f:
|
|
reader = csv.DictReader(f)
|
|
count = 0
|
|
errors = 0
|
|
failed_rows = []
|
|
for row in reader:
|
|
try:
|
|
email = row.get(mapping.get('email'))
|
|
if not email:
|
|
row["Import Error"] = "Missing email"
|
|
failed_rows.append(row)
|
|
errors += 1
|
|
continue
|
|
volunteer_data = {}
|
|
for field_name, csv_col in mapping.items():
|
|
if csv_col:
|
|
val = row.get(csv_col)
|
|
if val is not None and str(val).strip() != '':
|
|
if field_name == 'email': continue
|
|
volunteer_data[field_name] = val
|
|
Volunteer.objects.update_or_create(
|
|
tenant=tenant,
|
|
email=email,
|
|
defaults=volunteer_data
|
|
)
|
|
count += 1
|
|
except Exception as e:
|
|
logger.error(f"Error importing volunteer: {e}")
|
|
row["Import Error"] = str(e)
|
|
failed_rows.append(row)
|
|
errors += 1
|
|
if os.path.exists(file_path):
|
|
os.remove(file_path)
|
|
self.message_user(request, f"Successfully imported {count} volunteers.")
|
|
# Optimization: Limit error log size in session to avoid overflow
|
|
request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows[:1000]
|
|
request.session.modified = True
|
|
if errors > 0:
|
|
error_url = reverse("admin:volunteer-download-errors")
|
|
self.message_user(request, mark_safe(f"Failed to import {errors} rows. <a href='{error_url}' download>Download failed records</a>"), level=messages.WARNING)
|
|
return redirect("..")
|
|
except Exception as e:
|
|
self.message_user(request, f"Error processing file: {e}", level=messages.ERROR)
|
|
return redirect("..")
|
|
else:
|
|
form = VolunteerImportForm(request.POST, request.FILES)
|
|
if form.is_valid():
|
|
csv_file = request.FILES['file']
|
|
tenant = form.cleaned_data['tenant']
|
|
if not csv_file.name.endswith('.csv'):
|
|
self.message_user(request, "Please upload a CSV file.", level=messages.ERROR)
|
|
return redirect("..")
|
|
with tempfile.NamedTemporaryFile(delete=False, suffix='.csv') as tmp:
|
|
for chunk in csv_file.chunks():
|
|
tmp.write(chunk)
|
|
file_path = tmp.name
|
|
with open(file_path, 'r', encoding='UTF-8') as f:
|
|
reader = csv.reader(f)
|
|
headers = next(reader)
|
|
context = self.admin_site.each_context(request)
|
|
context.update({
|
|
'title': "Map Volunteer Fields",
|
|
'headers': headers,
|
|
'model_fields': VOLUNTEER_MAPPABLE_FIELDS,
|
|
'tenant_id': tenant.id,
|
|
'file_path': file_path,
|
|
'action_url': request.path,
|
|
'opts': self.model._meta,
|
|
})
|
|
return render(request, "admin/import_mapping.html", context)
|
|
else:
|
|
form = VolunteerImportForm()
|
|
context = self.admin_site.each_context(request)
|
|
context['form'] = form
|
|
context['title'] = "Import Volunteers"
|
|
context['opts'] = self.model._meta
|
|
return render(request, "admin/import_csv.html", context)
|
|
|
|
@admin.register(VolunteerEvent)
|
|
class VolunteerEventAdmin(admin.ModelAdmin):
|
|
list_display = ('volunteer', 'event', 'role_type')
|
|
list_filter = ('event__tenant', 'event', 'role_type')
|
|
autocomplete_fields = ["volunteer", "event"]
|
|
|
|
@admin.register(EventParticipation)
|
|
class EventParticipationAdmin(BaseImportAdminMixin, admin.ModelAdmin):
|
|
list_display = ('voter', 'event', 'participation_status')
|
|
list_filter = ('event__tenant', 'event', 'participation_status')
|
|
autocomplete_fields = ["voter", "event"]
|
|
change_list_template = "admin/eventparticipation_change_list.html"
|
|
|
|
def get_urls(self):
|
|
urls = super().get_urls()
|
|
my_urls = [
|
|
path('download-errors/', self.admin_site.admin_view(self.download_errors), name='eventparticipation-download-errors'),
|
|
path('import-event-participations/', self.admin_site.admin_view(self.import_event_participations), name='import-event-participations'),
|
|
]
|
|
return my_urls + urls
|
|
|
|
def import_event_participations(self, request):
|
|
if request.method == "POST":
|
|
if "_preview" in request.POST:
|
|
file_path = request.POST.get('file_path')
|
|
tenant_id = request.POST.get('tenant')
|
|
tenant = Tenant.objects.get(id=tenant_id)
|
|
mapping = {}
|
|
for field_name, _ in EVENT_PARTICIPATION_MAPPABLE_FIELDS:
|
|
mapping[field_name] = request.POST.get(f'map_{field_name}')
|
|
try:
|
|
with open(file_path, 'r', encoding='UTF-8') as f:
|
|
reader = csv.DictReader(f)
|
|
total_count = 0
|
|
create_count = 0
|
|
update_count = 0
|
|
preview_data = []
|
|
for row in reader:
|
|
total_count += 1
|
|
voter_id = row.get(mapping.get('voter_id'))
|
|
event_name = row.get(mapping.get('event_name'))
|
|
|
|
exists = False
|
|
if voter_id:
|
|
try:
|
|
voter = Voter.objects.get(tenant=tenant, voter_id=voter_id)
|
|
if event_name:
|
|
exists = EventParticipation.objects.filter(voter=voter, event__name=event_name).exists()
|
|
except Voter.DoesNotExist:
|
|
pass
|
|
|
|
if exists:
|
|
update_count += 1
|
|
action = 'update'
|
|
else:
|
|
create_count += 1
|
|
action = 'create'
|
|
|
|
if len(preview_data) < 10:
|
|
preview_data.append({
|
|
'action': action,
|
|
'identifier': f"Voter: {voter_id}",
|
|
'details': f"Participation: {row.get(mapping.get('participation_status', '')) or ''}"
|
|
})
|
|
context = self.admin_site.each_context(request)
|
|
context.update({
|
|
'title': "Import Preview",
|
|
'total_count': total_count,
|
|
'create_count': create_count,
|
|
'update_count': update_count,
|
|
'preview_data': preview_data,
|
|
'mapping': mapping,
|
|
'file_path': file_path,
|
|
'tenant_id': tenant_id,
|
|
'action_url': request.path,
|
|
'opts': self.model._meta,
|
|
})
|
|
return render(request, "admin/import_preview.html", context)
|
|
except Exception as e:
|
|
self.message_user(request, f"Error processing preview: {e}", level=messages.ERROR)
|
|
return redirect("..")
|
|
|
|
elif "_import" in request.POST:
|
|
file_path = request.POST.get('file_path')
|
|
tenant_id = request.POST.get('tenant')
|
|
tenant = Tenant.objects.get(id=tenant_id)
|
|
|
|
mapping = {}
|
|
for field_name, _ in EVENT_PARTICIPATION_MAPPABLE_FIELDS:
|
|
mapping[field_name] = request.POST.get(f'map_{field_name}')
|
|
|
|
try:
|
|
with open(file_path, 'r', encoding='UTF-8') as f:
|
|
reader = csv.DictReader(f)
|
|
count = 0
|
|
errors = 0
|
|
failed_rows = []
|
|
for row in reader:
|
|
try:
|
|
voter_id = row.get(mapping.get('voter_id')) if mapping.get('voter_id') else None
|
|
participation_status_val = row.get(mapping.get('participation_status')) if mapping.get('participation_status') else None
|
|
|
|
if not voter_id:
|
|
row["Import Error"] = "Missing voter ID"
|
|
failed_rows.append(row)
|
|
errors += 1
|
|
continue
|
|
|
|
try:
|
|
voter = Voter.objects.get(tenant=tenant, voter_id=voter_id)
|
|
except Voter.DoesNotExist:
|
|
error_msg = f"Voter with ID {voter_id} not found"
|
|
logger.error(error_msg)
|
|
row["Import Error"] = error_msg
|
|
failed_rows.append(row)
|
|
errors += 1
|
|
continue
|
|
|
|
event = None
|
|
event_name = row.get(mapping.get('event_name')) if mapping.get('event_name') else None
|
|
if event_name:
|
|
try:
|
|
event = Event.objects.get(tenant=tenant, name=event_name)
|
|
except Event.DoesNotExist:
|
|
pass
|
|
|
|
if not event:
|
|
error_msg = "Event not found (check Event Name)"
|
|
logger.error(error_msg)
|
|
row["Import Error"] = error_msg
|
|
failed_rows.append(row)
|
|
errors += 1
|
|
continue
|
|
|
|
defaults = {}
|
|
if participation_status_val and participation_status_val.strip():
|
|
status_obj, _ = ParticipationStatus.objects.get_or_create(tenant=tenant, name=participation_status_val.strip())
|
|
defaults['participation_status'] = status_obj
|
|
else:
|
|
# Default to 'Invited' if not specified
|
|
status_obj, _ = ParticipationStatus.objects.get_or_create(tenant=tenant, name='Invited')
|
|
defaults['participation_status'] = status_obj
|
|
EventParticipation.objects.update_or_create(
|
|
event=event,
|
|
voter=voter,
|
|
defaults=defaults
|
|
)
|
|
count += 1
|
|
except Exception as e:
|
|
logger.error(f"Error importing: {e}")
|
|
row["Import Error"] = str(e)
|
|
failed_rows.append(row)
|
|
errors += 1
|
|
|
|
if os.path.exists(file_path):
|
|
os.remove(file_path)
|
|
self.message_user(request, f"Successfully imported {count} participations.")
|
|
# Optimization: Limit error log size in session to avoid overflow
|
|
request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows[:1000]
|
|
request.session.modified = True
|
|
logger.info(f"Stored {len(failed_rows)} failed rows in session for {self.model._meta.model_name}")
|
|
if errors > 0:
|
|
error_url = reverse("admin:eventparticipation-download-errors")
|
|
self.message_user(request, mark_safe(f"Failed to import {errors} rows. <a href='{error_url}' download>Download failed records</a>"), level=messages.WARNING)
|
|
return redirect("..")
|
|
except Exception as e:
|
|
self.message_user(request, f"Error processing file: {e}", level=messages.ERROR)
|
|
return redirect("..")
|
|
else:
|
|
form = EventParticipationImportForm(request.POST, request.FILES)
|
|
if form.is_valid():
|
|
csv_file = request.FILES['file']
|
|
tenant = form.cleaned_data['tenant']
|
|
|
|
if not csv_file.name.endswith('.csv'):
|
|
self.message_user(request, "Please upload a CSV file.", level=messages.ERROR)
|
|
return redirect("..")
|
|
|
|
with tempfile.NamedTemporaryFile(delete=False, suffix='.csv') as tmp:
|
|
for chunk in csv_file.chunks():
|
|
tmp.write(chunk)
|
|
file_path = tmp.name
|
|
|
|
with open(file_path, 'r', encoding='UTF-8') as f:
|
|
reader = csv.reader(f)
|
|
headers = next(reader)
|
|
|
|
context = self.admin_site.each_context(request)
|
|
context.update({
|
|
'title': "Map Participation Fields",
|
|
'headers': headers,
|
|
'model_fields': EVENT_PARTICIPATION_MAPPABLE_FIELDS,
|
|
'tenant_id': tenant.id,
|
|
'file_path': file_path,
|
|
'action_url': request.path,
|
|
'opts': self.model._meta,
|
|
})
|
|
return render(request, "admin/import_mapping.html", context)
|
|
else:
|
|
form = EventParticipationImportForm()
|
|
|
|
context = self.admin_site.each_context(request)
|
|
context['form'] = form
|
|
context['title'] = "Import Participations"
|
|
context['opts'] = self.model._meta
|
|
return render(request, "admin/import_csv.html", context)
|
|
|
|
@admin.register(Donation)
|
|
class DonationAdmin(BaseImportAdminMixin, admin.ModelAdmin):
|
|
list_display = ('id', 'voter', 'date', 'amount', 'method')
|
|
list_filter = ('voter__tenant', 'date', 'method')
|
|
search_fields = ('voter__first_name', 'voter__last_name', 'voter__voter_id')
|
|
autocomplete_fields = ["voter"]
|
|
change_list_template = "admin/donation_change_list.html"
|
|
|
|
def get_urls(self):
|
|
urls = super().get_urls()
|
|
my_urls = [
|
|
path('download-errors/', self.admin_site.admin_view(self.download_errors), name='donation-download-errors'),
|
|
path('import-donations/', self.admin_site.admin_view(self.import_donations), name='import-donations'),
|
|
]
|
|
return my_urls + urls
|
|
|
|
def import_donations(self, request):
|
|
if request.method == "POST":
|
|
if "_preview" in request.POST:
|
|
file_path = request.POST.get('file_path')
|
|
tenant_id = request.POST.get('tenant')
|
|
tenant = Tenant.objects.get(id=tenant_id)
|
|
mapping = {}
|
|
for field_name, _ in DONATION_MAPPABLE_FIELDS:
|
|
mapping[field_name] = request.POST.get(f'map_{field_name}')
|
|
try:
|
|
with open(file_path, 'r', encoding='UTF-8') as f:
|
|
reader = csv.DictReader(f)
|
|
total_count = 0
|
|
create_count = 0
|
|
update_count = 0
|
|
preview_data = []
|
|
for row in reader:
|
|
total_count += 1
|
|
voter_id = row.get(mapping.get('voter_id'))
|
|
date = row.get(mapping.get('date'))
|
|
amount = row.get(mapping.get('amount'))
|
|
exists = False
|
|
if voter_id and date and amount:
|
|
exists = Donation.objects.filter(voter__tenant=tenant, voter__voter_id=voter_id, date=date, amount=amount).exists()
|
|
|
|
if exists:
|
|
update_count += 1
|
|
action = 'update'
|
|
else:
|
|
create_count += 1
|
|
action = 'create'
|
|
|
|
if len(preview_data) < 10:
|
|
preview_data.append({
|
|
'action': action,
|
|
'identifier': f"Voter: {voter_id}",
|
|
'details': f"Date: {date}, Amount: {amount}"
|
|
})
|
|
context = self.admin_site.each_context(request)
|
|
context.update({
|
|
'title': "Import Preview",
|
|
'total_count': total_count,
|
|
'create_count': create_count,
|
|
'update_count': update_count,
|
|
'preview_data': preview_data,
|
|
'mapping': mapping,
|
|
'file_path': file_path,
|
|
'tenant_id': tenant_id,
|
|
'action_url': request.path,
|
|
'opts': self.model._meta,
|
|
})
|
|
return render(request, "admin/import_preview.html", context)
|
|
except Exception as e:
|
|
self.message_user(request, f"Error processing preview: {e}", level=messages.ERROR)
|
|
return redirect("..")
|
|
|
|
elif "_import" in request.POST:
|
|
file_path = request.POST.get('file_path')
|
|
tenant_id = request.POST.get('tenant')
|
|
tenant = Tenant.objects.get(id=tenant_id)
|
|
|
|
mapping = {}
|
|
for field_name, _ in DONATION_MAPPABLE_FIELDS:
|
|
mapping[field_name] = request.POST.get(f'map_{field_name}')
|
|
|
|
try:
|
|
with open(file_path, 'r', encoding='UTF-8') as f:
|
|
reader = csv.DictReader(f)
|
|
count = 0
|
|
errors = 0
|
|
failed_rows = []
|
|
for row in reader:
|
|
try:
|
|
voter_id = row.get(mapping.get('voter_id')) if mapping.get('voter_id') else None
|
|
if not voter_id:
|
|
row["Import Error"] = "Missing voter ID"
|
|
failed_rows.append(row)
|
|
errors += 1
|
|
continue
|
|
|
|
try:
|
|
voter = Voter.objects.get(tenant=tenant, voter_id=voter_id)
|
|
except Voter.DoesNotExist:
|
|
row["Import Error"] = f"Voter {voter_id} not found"
|
|
failed_rows.append(row)
|
|
errors += 1
|
|
continue
|
|
|
|
date = row.get(mapping.get('date'))
|
|
amount = row.get(mapping.get('amount'))
|
|
method_name = row.get(mapping.get('method'))
|
|
|
|
if not date or not amount:
|
|
row["Import Error"] = "Missing date or amount"
|
|
failed_rows.append(row)
|
|
errors += 1
|
|
continue
|
|
|
|
method = None
|
|
if method_name and method_name.strip():
|
|
method, _ = DonationMethod.objects.get_or_create(
|
|
tenant=tenant,
|
|
name=method_name
|
|
)
|
|
|
|
defaults = {}
|
|
if method:
|
|
defaults['method'] = method
|
|
|
|
Donation.objects.update_or_create(
|
|
voter=voter,
|
|
date=date,
|
|
amount=amount,
|
|
defaults=defaults
|
|
)
|
|
count += 1
|
|
except Exception as e:
|
|
logger.error(f"Error importing: {e}")
|
|
row["Import Error"] = str(e)
|
|
failed_rows.append(row)
|
|
errors += 1
|
|
|
|
if os.path.exists(file_path):
|
|
os.remove(file_path)
|
|
self.message_user(request, f"Successfully imported {count} donations.")
|
|
# Optimization: Limit error log size in session to avoid overflow
|
|
request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows[:1000]
|
|
request.session.modified = True
|
|
logger.info(f"Stored {len(failed_rows)} failed rows in session for {self.model._meta.model_name}")
|
|
if errors > 0:
|
|
error_url = reverse("admin:donation-download-errors")
|
|
self.message_user(request, mark_safe(f"Failed to import {errors} rows. <a href='{error_url}' download>Download failed records</a>"), level=messages.WARNING)
|
|
return redirect("..")
|
|
except Exception as e:
|
|
self.message_user(request, f"Error processing file: {e}", level=messages.ERROR)
|
|
return redirect("..")
|
|
else:
|
|
form = DonationImportForm(request.POST, request.FILES)
|
|
if form.is_valid():
|
|
csv_file = request.FILES['file']
|
|
tenant = form.cleaned_data['tenant']
|
|
|
|
if not csv_file.name.endswith('.csv'):
|
|
self.message_user(request, "Please upload a CSV file.", level=messages.ERROR)
|
|
return redirect("..")
|
|
|
|
with tempfile.NamedTemporaryFile(delete=False, suffix='.csv') as tmp:
|
|
for chunk in csv_file.chunks():
|
|
tmp.write(chunk)
|
|
file_path = tmp.name
|
|
|
|
with open(file_path, 'r', encoding='UTF-8') as f:
|
|
reader = csv.reader(f)
|
|
headers = next(reader)
|
|
|
|
context = self.admin_site.each_context(request)
|
|
context.update({
|
|
'title': "Map Donation Fields",
|
|
'headers': headers,
|
|
'model_fields': DONATION_MAPPABLE_FIELDS,
|
|
'tenant_id': tenant.id,
|
|
'file_path': file_path,
|
|
'action_url': request.path,
|
|
'opts': self.model._meta,
|
|
})
|
|
return render(request, "admin/import_mapping.html", context)
|
|
else:
|
|
form = DonationImportForm()
|
|
|
|
context = self.admin_site.each_context(request)
|
|
context['form'] = form
|
|
context['title'] = "Import Donations"
|
|
context['opts'] = self.model._meta
|
|
return render(request, "admin/import_csv.html", context)
|
|
|
|
@admin.register(Interaction)
|
|
class InteractionAdmin(BaseImportAdminMixin, admin.ModelAdmin):
|
|
list_display = ('id', 'voter', 'volunteer', 'type', 'date', 'description')
|
|
list_filter = ('voter__tenant', 'type', 'date', 'volunteer')
|
|
search_fields = ('voter__first_name', 'voter__last_name', 'voter__voter_id', 'description', 'volunteer__first_name', 'volunteer__last_name')
|
|
autocomplete_fields = ["voter", "volunteer"]
|
|
change_list_template = "admin/interaction_change_list.html"
|
|
|
|
def get_urls(self):
|
|
urls = super().get_urls()
|
|
my_urls = [
|
|
path('download-errors/', self.admin_site.admin_view(self.download_errors), name='interaction-download-errors'),
|
|
path('import-interactions/', self.admin_site.admin_view(self.import_interactions), name='import-interactions'),
|
|
]
|
|
return my_urls + urls
|
|
|
|
def import_interactions(self, request):
|
|
if request.method == "POST":
|
|
if "_preview" in request.POST:
|
|
file_path = request.POST.get('file_path')
|
|
tenant_id = request.POST.get('tenant')
|
|
tenant = Tenant.objects.get(id=tenant_id)
|
|
mapping = {}
|
|
for field_name, _ in INTERACTION_MAPPABLE_FIELDS:
|
|
mapping[field_name] = request.POST.get(f'map_{field_name}')
|
|
try:
|
|
with open(file_path, 'r', encoding='UTF-8') as f:
|
|
reader = csv.DictReader(f)
|
|
total_count = 0
|
|
create_count = 0
|
|
update_count = 0
|
|
preview_data = []
|
|
for row in reader:
|
|
total_count += 1
|
|
voter_id = row.get(mapping.get('voter_id'))
|
|
date = row.get(mapping.get('date'))
|
|
exists = False
|
|
if voter_id and date:
|
|
exists = Interaction.objects.filter(voter__tenant=tenant, voter__voter_id=voter_id, date=date).exists()
|
|
|
|
if exists:
|
|
update_count += 1
|
|
action = 'update'
|
|
else:
|
|
create_count += 1
|
|
action = 'create'
|
|
|
|
if len(preview_data) < 10:
|
|
preview_data.append({
|
|
'action': action,
|
|
'identifier': f"Voter: {voter_id}",
|
|
'details': f"Date: {date}, Desc: {row.get(mapping.get('description', '')) or ''}"
|
|
})
|
|
context = self.admin_site.each_context(request)
|
|
context.update({
|
|
'title': "Import Preview",
|
|
'total_count': total_count,
|
|
'create_count': create_count,
|
|
'update_count': update_count,
|
|
'preview_data': preview_data,
|
|
'mapping': mapping,
|
|
'file_path': file_path,
|
|
'tenant_id': tenant_id,
|
|
'action_url': request.path,
|
|
'opts': self.model._meta,
|
|
})
|
|
return render(request, "admin/import_preview.html", context)
|
|
except Exception as e:
|
|
self.message_user(request, f"Error processing preview: {e}", level=messages.ERROR)
|
|
return redirect("..")
|
|
|
|
elif "_import" in request.POST:
|
|
file_path = request.POST.get('file_path')
|
|
tenant_id = request.POST.get('tenant')
|
|
tenant = Tenant.objects.get(id=tenant_id)
|
|
|
|
mapping = {}
|
|
for field_name, _ in INTERACTION_MAPPABLE_FIELDS:
|
|
mapping[field_name] = request.POST.get(f'map_{field_name}')
|
|
|
|
try:
|
|
with open(file_path, 'r', encoding='UTF-8') as f:
|
|
reader = csv.DictReader(f)
|
|
count = 0
|
|
errors = 0
|
|
failed_rows = []
|
|
for row in reader:
|
|
try:
|
|
voter_id = row.get(mapping.get('voter_id')) if mapping.get('voter_id') else None
|
|
if not voter_id:
|
|
row["Import Error"] = "Missing voter ID"
|
|
failed_rows.append(row)
|
|
errors += 1
|
|
continue
|
|
|
|
try:
|
|
voter = Voter.objects.get(tenant=tenant, voter_id=voter_id)
|
|
except Voter.DoesNotExist:
|
|
row["Import Error"] = f"Voter {voter_id} not found"
|
|
failed_rows.append(row)
|
|
errors += 1
|
|
continue
|
|
|
|
date = row.get(mapping.get('date'))
|
|
type_name = row.get(mapping.get('type'))
|
|
volunteer_email = row.get(mapping.get('volunteer_email'))
|
|
description = row.get(mapping.get('description'))
|
|
notes = row.get(mapping.get('notes'))
|
|
|
|
if not date or not description:
|
|
row["Import Error"] = "Missing date or description"
|
|
failed_rows.append(row)
|
|
errors += 1
|
|
continue
|
|
|
|
volunteer = None
|
|
if volunteer_email and volunteer_email.strip():
|
|
try:
|
|
volunteer = Volunteer.objects.get(tenant=tenant, email=volunteer_email.strip())
|
|
except Volunteer.DoesNotExist:
|
|
pass
|
|
interaction_type = None
|
|
if type_name and type_name.strip():
|
|
interaction_type, _ = InteractionType.objects.get_or_create(
|
|
tenant=tenant,
|
|
name=type_name
|
|
)
|
|
|
|
defaults = {}
|
|
if volunteer:
|
|
defaults['volunteer'] = volunteer
|
|
if interaction_type:
|
|
defaults['type'] = interaction_type
|
|
if description and description.strip():
|
|
defaults['description'] = description
|
|
if notes and notes.strip():
|
|
defaults['notes'] = notes
|
|
|
|
Interaction.objects.update_or_create(
|
|
voter=voter,
|
|
date=date,
|
|
defaults=defaults
|
|
)
|
|
count += 1
|
|
except Exception as e:
|
|
logger.error(f"Error importing: {e}")
|
|
row["Import Error"] = str(e)
|
|
failed_rows.append(row)
|
|
errors += 1
|
|
|
|
if os.path.exists(file_path):
|
|
os.remove(file_path)
|
|
self.message_user(request, f"Successfully imported {count} interactions.")
|
|
# Optimization: Limit error log size in session to avoid overflow
|
|
request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows[:1000]
|
|
request.session.modified = True
|
|
logger.info(f"Stored {len(failed_rows)} failed rows in session for {self.model._meta.model_name}")
|
|
if errors > 0:
|
|
error_url = reverse("admin:interaction-download-errors")
|
|
self.message_user(request, mark_safe(f"Failed to import {errors} rows. <a href='{error_url}' download>Download failed records</a>"), level=messages.WARNING)
|
|
return redirect("..")
|
|
except Exception as e:
|
|
self.message_user(request, f"Error processing file: {e}", level=messages.ERROR)
|
|
return redirect("..")
|
|
else:
|
|
form = InteractionImportForm(request.POST, request.FILES)
|
|
if form.is_valid():
|
|
csv_file = request.FILES['file']
|
|
tenant = form.cleaned_data['tenant']
|
|
|
|
if not csv_file.name.endswith('.csv'):
|
|
self.message_user(request, "Please upload a CSV file.", level=messages.ERROR)
|
|
return redirect("..")
|
|
|
|
with tempfile.NamedTemporaryFile(delete=False, suffix='.csv') as tmp:
|
|
for chunk in csv_file.chunks():
|
|
tmp.write(chunk)
|
|
file_path = tmp.name
|
|
|
|
with open(file_path, 'r', encoding='UTF-8') as f:
|
|
reader = csv.reader(f)
|
|
headers = next(reader)
|
|
|
|
context = self.admin_site.each_context(request)
|
|
context.update({
|
|
'title': "Map Interaction Fields",
|
|
'headers': headers,
|
|
'model_fields': INTERACTION_MAPPABLE_FIELDS,
|
|
'tenant_id': tenant.id,
|
|
'file_path': file_path,
|
|
'action_url': request.path,
|
|
'opts': self.model._meta,
|
|
})
|
|
return render(request, "admin/import_mapping.html", context)
|
|
else:
|
|
form = InteractionImportForm()
|
|
|
|
context = self.admin_site.each_context(request)
|
|
context['form'] = form
|
|
context['title'] = "Import Interactions"
|
|
context['opts'] = self.model._meta
|
|
return render(request, "admin/import_csv.html", context)
|
|
|
|
@admin.register(VoterLikelihood)
|
|
class VoterLikelihoodAdmin(BaseImportAdminMixin, admin.ModelAdmin):
|
|
list_display = ('id', 'voter', 'election_type', 'likelihood')
|
|
list_filter = ('voter__tenant', 'election_type', 'likelihood')
|
|
search_fields = ('voter__first_name', 'voter__last_name', 'voter__voter_id')
|
|
autocomplete_fields = ["voter"]
|
|
change_list_template = "admin/voterlikelihood_change_list.html"
|
|
|
|
def get_urls(self):
|
|
urls = super().get_urls()
|
|
my_urls = [
|
|
path('download-errors/', self.admin_site.admin_view(self.download_errors), name='voterlikelihood-download-errors'),
|
|
path('import-likelihoods/', self.admin_site.admin_view(self.import_likelihoods), name='import-likelihoods'),
|
|
]
|
|
return my_urls + urls
|
|
|
|
|
|
def import_likelihoods(self, request):
|
|
if request.method == "POST":
|
|
if "_preview" in request.POST:
|
|
file_path = request.POST.get('file_path')
|
|
tenant_id = request.POST.get('tenant')
|
|
tenant = Tenant.objects.get(id=tenant_id)
|
|
mapping = {k: request.POST.get(f"map_{k}") for k, _ in VOTER_LIKELIHOOD_MAPPABLE_FIELDS if request.POST.get(f"map_{k}")}
|
|
|
|
try:
|
|
with open(file_path, 'r', encoding='utf-8-sig') as f:
|
|
total_count = sum(1 for line in f) - 1
|
|
f.seek(0)
|
|
reader = csv.DictReader(f)
|
|
preview_rows = []
|
|
voter_ids_for_preview = set()
|
|
election_types_for_preview = set()
|
|
|
|
v_id_col = mapping.get('voter_id')
|
|
et_col = mapping.get('election_type')
|
|
|
|
if not v_id_col or not et_col:
|
|
raise ValueError("Missing mapping for Voter ID or Election Type")
|
|
|
|
for i, row in enumerate(reader):
|
|
if i < 10:
|
|
preview_rows.append(row)
|
|
v_id = row.get(v_id_col)
|
|
et_name = row.get(et_col)
|
|
if v_id: voter_ids_for_preview.add(str(v_id).strip())
|
|
if et_name: election_types_for_preview.add(str(et_name).strip())
|
|
else:
|
|
break
|
|
|
|
existing_likelihoods = set(VoterLikelihood.objects.filter(
|
|
voter__tenant=tenant,
|
|
voter__voter_id__in=voter_ids_for_preview,
|
|
election_type__name__in=election_types_for_preview
|
|
).values_list("voter__voter_id", "election_type__name"))
|
|
|
|
preview_data = []
|
|
for row in preview_rows:
|
|
v_id = str(row.get(v_id_col, '')).strip()
|
|
et_name = str(row.get(et_col, '')).strip()
|
|
action = "update" if (v_id, et_name) in existing_likelihoods else "create"
|
|
preview_data.append({
|
|
"action": action,
|
|
"identifier": f"Voter: {v_id}, Election: {et_name}",
|
|
"details": f"Likelihood: {row.get(mapping.get('likelihood', '')) or ''}"
|
|
})
|
|
|
|
context = self.admin_site.each_context(request)
|
|
context.update({
|
|
"title": "Import Preview",
|
|
"total_count": total_count,
|
|
"create_count": "N/A",
|
|
"update_count": "N/A",
|
|
"preview_data": preview_data,
|
|
"mapping": mapping,
|
|
"file_path": file_path,
|
|
"tenant_id": tenant_id,
|
|
"action_url": request.path,
|
|
"opts": self.model._meta,
|
|
})
|
|
return render(request, "admin/import_preview.html", context)
|
|
except Exception as e:
|
|
self.message_user(request, f"Error processing preview: {e}", level=messages.ERROR)
|
|
return redirect("..")
|
|
|
|
elif "_import" in request.POST:
|
|
file_path = request.POST.get('file_path')
|
|
tenant_id = request.POST.get('tenant')
|
|
tenant = Tenant.objects.get(id=tenant_id)
|
|
mapping = {k: request.POST.get(f"map_{k}") for k, _ in VOTER_LIKELIHOOD_MAPPABLE_FIELDS if request.POST.get(f"map_{k}")}
|
|
|
|
try:
|
|
count = 0
|
|
created_count = 0
|
|
updated_count = 0
|
|
skipped_no_change = 0
|
|
skipped_no_id = 0
|
|
errors = 0
|
|
failed_rows = []
|
|
batch_size = 2000
|
|
|
|
likelihood_choices = dict(VoterLikelihood.LIKELIHOOD_CHOICES)
|
|
likelihood_reverse = {v.lower(): k for k, v in likelihood_choices.items()}
|
|
election_types = {et.name: et for et in ElectionType.objects.filter(tenant=tenant)}
|
|
|
|
with open(file_path, "r", encoding="utf-8-sig") as f:
|
|
raw_reader = csv.reader(f)
|
|
headers = next(raw_reader)
|
|
h_idx = {h: i for i, h in enumerate(headers)}
|
|
|
|
v_id_col = mapping.get("voter_id")
|
|
et_col = mapping.get("election_type")
|
|
l_col = mapping.get("likelihood")
|
|
|
|
if not v_id_col or not et_col or not l_col:
|
|
raise ValueError("Missing mapping for Voter ID, Election Type, or Likelihood")
|
|
|
|
v_idx = h_idx[v_id_col]
|
|
e_idx = h_idx[et_col]
|
|
l_idx = h_idx[l_col]
|
|
|
|
total_processed = 0
|
|
for chunk in self.chunk_reader(raw_reader, batch_size):
|
|
with transaction.atomic():
|
|
voter_ids = []
|
|
chunk_data = []
|
|
for row in chunk:
|
|
if len(row) <= max(v_idx, e_idx, l_idx): continue
|
|
v_id = row[v_idx].strip()
|
|
et_name = row[e_idx].strip()
|
|
l_val = row[l_idx].strip()
|
|
if v_id and et_name and l_val:
|
|
voter_ids.append(v_id)
|
|
chunk_data.append((v_id, et_name, l_val, row))
|
|
else:
|
|
skipped_no_id += 1
|
|
|
|
voters = {v.voter_id: v for v in Voter.objects.filter(tenant=tenant, voter_id__in=voter_ids).only("id", "voter_id")}
|
|
et_names = [d[1] for d in chunk_data]
|
|
existing_likelihoods = {
|
|
(vl.voter.voter_id, vl.election_type.name): vl
|
|
for vl in VoterLikelihood.objects.filter(
|
|
voter__tenant=tenant,
|
|
voter__voter_id__in=voter_ids,
|
|
election_type__name__in=et_names
|
|
).only("id", "likelihood", "voter__voter_id", "election_type__name").select_related("voter", "election_type")
|
|
}
|
|
|
|
to_create = []
|
|
to_update = []
|
|
processed_in_batch = set()
|
|
|
|
for v_id, et_name, l_val, row in chunk_data:
|
|
total_processed += 1
|
|
try:
|
|
if (v_id, et_name) in processed_in_batch: continue
|
|
processed_in_batch.add((v_id, et_name))
|
|
|
|
voter = voters.get(v_id)
|
|
if not voter:
|
|
errors += 1
|
|
continue
|
|
|
|
if et_name not in election_types:
|
|
election_type, _ = ElectionType.objects.get_or_create(tenant=tenant, name=et_name)
|
|
election_types[et_name] = election_type
|
|
election_type = election_types[et_name]
|
|
|
|
normalized_l = None
|
|
l_val_lower = l_val.lower().replace(' ', '_')
|
|
if l_val_lower in likelihood_choices: normalized_l = l_val_lower
|
|
elif l_val_lower in likelihood_reverse: normalized_l = likelihood_reverse[l_val_lower]
|
|
else:
|
|
for k, v in likelihood_choices.items():
|
|
if v.lower() == l_val.lower():
|
|
normalized_l = k
|
|
break
|
|
|
|
if not normalized_l:
|
|
errors += 1
|
|
continue
|
|
|
|
vl = existing_likelihoods.get((v_id, et_name))
|
|
if not vl:
|
|
to_create.append(VoterLikelihood(voter=voter, election_type=election_type, likelihood=normalized_l))
|
|
created_count += 1
|
|
elif vl.likelihood != normalized_l:
|
|
vl.likelihood = normalized_l
|
|
to_update.append(vl)
|
|
updated_count += 1
|
|
else:
|
|
skipped_no_change += 1
|
|
|
|
count += 1
|
|
except Exception as e:
|
|
errors += 1
|
|
|
|
if to_create: VoterLikelihood.objects.bulk_create(to_create, batch_size=batch_size)
|
|
if to_update: VoterLikelihood.objects.bulk_update(to_update, ["likelihood"], batch_size=batch_size)
|
|
|
|
print(f"DEBUG: Likelihood import progress: {total_processed} processed. {count} created/updated.")
|
|
|
|
if os.path.exists(file_path):
|
|
os.remove(file_path)
|
|
|
|
self.message_user(request, f"Import complete: {count} likelihoods created/updated. ({created_count} new, {updated_count} updated, {skipped_no_change} skipped, {errors} errors)")
|
|
return redirect("..")
|
|
except Exception as e:
|
|
self.message_user(request, f"Error processing file: {e}", level=messages.ERROR)
|
|
return redirect("..")
|
|
else:
|
|
form = VoterLikelihoodImportForm(request.POST, request.FILES)
|
|
if form.is_valid():
|
|
csv_file = request.FILES['file']
|
|
tenant = form.cleaned_data['tenant']
|
|
if not csv_file.name.endswith('.csv'):
|
|
self.message_user(request, "Please upload a CSV file.", level=messages.ERROR)
|
|
return redirect("..")
|
|
with tempfile.NamedTemporaryFile(delete=False, suffix='.csv') as tmp:
|
|
for chunk in csv_file.chunks(): tmp.write(chunk)
|
|
file_path = tmp.name
|
|
with open(file_path, 'r', encoding='utf-8-sig') as f:
|
|
reader = csv.reader(f)
|
|
headers = next(reader)
|
|
context = self.admin_site.each_context(request)
|
|
context.update({
|
|
'title': "Map Likelihood Fields",
|
|
'headers': headers,
|
|
'model_fields': VOTER_LIKELIHOOD_MAPPABLE_FIELDS,
|
|
'tenant_id': tenant.id,
|
|
'file_path': file_path,
|
|
'action_url': request.path,
|
|
'opts': self.model._meta,
|
|
})
|
|
return render(request, "admin/import_mapping.html", context)
|
|
else:
|
|
form = VoterLikelihoodImportForm()
|
|
context = self.admin_site.each_context(request)
|
|
context['form'] = form
|
|
context['title'] = "Import Likelihoods"
|
|
context['opts'] = self.model._meta
|
|
return render(request, "admin/import_csv.html", context)
|
|
|
|
@admin.register(CampaignSettings)
|
|
class CampaignSettingsAdmin(admin.ModelAdmin):
|
|
list_display = ('tenant', 'donation_goal', 'twilio_from_number', 'timezone')
|
|
list_filter = ('tenant',)
|
|
fields = ('tenant', 'donation_goal', 'twilio_account_sid', 'twilio_auth_token', 'twilio_from_number', 'timezone')
|
|
|
|
@admin.register(VotingRecord)
|
|
class VotingRecordAdmin(BaseImportAdminMixin, admin.ModelAdmin):
|
|
list_display = ('voter', 'election_date', 'election_description', 'primary_party')
|
|
list_filter = ('voter__tenant', 'election_date', 'primary_party')
|
|
search_fields = ('voter__first_name', 'voter__last_name', 'voter__voter_id', 'election_description')
|
|
autocomplete_fields = ["voter"]
|
|
change_list_template = "admin/votingrecord_change_list.html"
|
|
|
|
def changelist_view(self, request, extra_context=None):
|
|
extra_context = extra_context or {}
|
|
from core.models import Tenant
|
|
extra_context["tenants"] = Tenant.objects.all()
|
|
return super().changelist_view(request, extra_context=extra_context)
|
|
|
|
def get_urls(self):
|
|
urls = super().get_urls()
|
|
my_urls = [
|
|
path('download-errors/', self.admin_site.admin_view(self.download_errors), name='votingrecord-download-errors'),
|
|
path('import-voting-records/', self.admin_site.admin_view(self.import_voting_records), name='import-voting-records'),
|
|
]
|
|
return my_urls + urls
|
|
|
|
|
|
def import_voting_records(self, request):
|
|
if request.method == "POST":
|
|
if "_preview" in request.POST:
|
|
file_path = request.POST.get('file_path')
|
|
tenant_id = request.POST.get('tenant')
|
|
tenant = Tenant.objects.get(id=tenant_id)
|
|
mapping = {k: request.POST.get(f"map_{k}") for k, _ in VOTING_RECORD_MAPPABLE_FIELDS if request.POST.get(f"map_{k}")}
|
|
|
|
try:
|
|
with open(file_path, 'r', encoding='utf-8-sig') as f:
|
|
total_count = sum(1 for line in f) - 1
|
|
f.seek(0)
|
|
reader = csv.DictReader(f)
|
|
preview_rows = []
|
|
voter_ids_for_preview = set()
|
|
|
|
v_id_col = mapping.get('voter_id')
|
|
ed_col = mapping.get('election_date')
|
|
desc_col = mapping.get('election_description')
|
|
|
|
if not v_id_col or not ed_col or not desc_col:
|
|
raise ValueError("Missing mapping for Voter ID, Election Date, or Description")
|
|
|
|
for i, row in enumerate(reader):
|
|
if i < 10:
|
|
preview_rows.append(row)
|
|
v_id = row.get(v_id_col)
|
|
if v_id: voter_ids_for_preview.add(str(v_id).strip())
|
|
else:
|
|
break
|
|
|
|
existing_records = set(VotingRecord.objects.filter(
|
|
voter__tenant=tenant,
|
|
voter__voter_id__in=voter_ids_for_preview
|
|
).values_list("voter__voter_id", "election_date", "election_description"))
|
|
|
|
preview_data = []
|
|
for row in preview_rows:
|
|
v_id = str(row.get(v_id_col, '')).strip()
|
|
e_date_raw = row.get(ed_col)
|
|
e_desc = str(row.get(desc_col, '')).strip()
|
|
|
|
e_date = None
|
|
if e_date_raw:
|
|
for fmt in ["%Y-%m-%d", "%m/%d/%Y", "%d/%m/%Y", "%Y/%m/%d"]:
|
|
try:
|
|
e_date = datetime.strptime(str(e_date_raw).strip(), fmt).date()
|
|
break
|
|
except: continue
|
|
|
|
action = "update" if (v_id, e_date, e_desc) in existing_records else "create"
|
|
preview_data.append({
|
|
"action": action,
|
|
"identifier": f"Voter: {v_id}, Election: {e_desc}",
|
|
"details": f"Date: {e_date or e_date_raw}"
|
|
})
|
|
|
|
context = self.admin_site.each_context(request)
|
|
context.update({
|
|
"title": "Import Preview",
|
|
"total_count": total_count,
|
|
"create_count": "N/A",
|
|
"update_count": "N/A",
|
|
"preview_data": preview_data,
|
|
"mapping": mapping,
|
|
"file_path": file_path,
|
|
"tenant_id": tenant_id,
|
|
"action_url": request.path,
|
|
"opts": self.model._meta,
|
|
})
|
|
return render(request, "admin/import_preview.html", context)
|
|
except Exception as e:
|
|
self.message_user(request, f"Error processing preview: {e}", level=messages.ERROR)
|
|
return redirect("..")
|
|
|
|
elif "_import" in request.POST:
|
|
file_path = request.POST.get('file_path')
|
|
tenant_id = request.POST.get('tenant')
|
|
tenant = Tenant.objects.get(id=tenant_id)
|
|
mapping = {k: request.POST.get(f"map_{k}") for k, _ in VOTING_RECORD_MAPPABLE_FIELDS if request.POST.get(f"map_{k}")}
|
|
|
|
try:
|
|
count = 0
|
|
created_count = 0
|
|
updated_count = 0
|
|
skipped_no_change = 0
|
|
errors = 0
|
|
batch_size = 2000
|
|
|
|
with open(file_path, "r", encoding="utf-8-sig") as f:
|
|
raw_reader = csv.reader(f)
|
|
headers = next(raw_reader)
|
|
h_idx = {h: i for i, h in enumerate(headers)}
|
|
|
|
v_id_col = mapping.get("voter_id")
|
|
ed_col = mapping.get("election_date")
|
|
desc_col = mapping.get("election_description")
|
|
party_col = mapping.get("primary_party")
|
|
|
|
if not v_id_col or not ed_col or not desc_col:
|
|
raise ValueError("Missing mapping for Voter ID, Election Date, or Description")
|
|
|
|
v_idx = h_idx[v_id_col]
|
|
ed_idx = h_idx[ed_col]
|
|
desc_idx = h_idx[desc_col]
|
|
p_idx = h_idx.get(party_col)
|
|
|
|
total_processed = 0
|
|
for chunk in self.chunk_reader(raw_reader, batch_size):
|
|
with transaction.atomic():
|
|
voter_ids = [row[v_idx].strip() for row in chunk if len(row) > v_idx and row[v_idx].strip()]
|
|
voters = {v.voter_id: v for v in Voter.objects.filter(tenant=tenant, voter_id__in=voter_ids).only("id", "voter_id")}
|
|
|
|
existing_records = {
|
|
(vr.voter.voter_id, vr.election_date, vr.election_description): vr
|
|
for vr in VotingRecord.objects.filter(
|
|
voter__tenant=tenant,
|
|
voter__voter_id__in=voter_ids
|
|
).only("id", "election_date", "election_description", "voter__voter_id").select_related("voter")
|
|
}
|
|
|
|
to_create = []
|
|
to_update = []
|
|
processed_in_batch = set()
|
|
|
|
for row in chunk:
|
|
total_processed += 1
|
|
try:
|
|
if len(row) <= max(v_idx, ed_idx, desc_idx): continue
|
|
v_id = row[v_idx].strip()
|
|
raw_ed = row[ed_idx].strip()
|
|
desc = row[desc_idx].strip()
|
|
party = row[p_idx].strip() if p_idx is not None and len(row) > p_idx else ""
|
|
|
|
if not v_id or not raw_ed or not desc: continue
|
|
|
|
if (v_id, raw_ed, desc) in processed_in_batch: continue
|
|
processed_in_batch.add((v_id, raw_ed, desc))
|
|
|
|
voter = voters.get(v_id)
|
|
if not voter:
|
|
errors += 1
|
|
continue
|
|
|
|
e_date = None
|
|
for fmt in ["%Y-%m-%d", "%m/%d/%Y", "%d/%m/%Y", "%Y/%m/%d"]:
|
|
try:
|
|
e_date = datetime.strptime(raw_ed, fmt).date()
|
|
break
|
|
except: continue
|
|
|
|
if not e_date:
|
|
errors += 1
|
|
continue
|
|
|
|
vr = existing_records.get((v_id, e_date, desc))
|
|
if not vr:
|
|
to_create.append(VotingRecord(voter=voter, election_date=e_date, election_description=desc, primary_party=party))
|
|
created_count += 1
|
|
elif vr.primary_party != party:
|
|
vr.primary_party = party
|
|
to_update.append(vr)
|
|
updated_count += 1
|
|
else:
|
|
skipped_no_change += 1
|
|
|
|
count += 1
|
|
except Exception as e:
|
|
errors += 1
|
|
|
|
if to_create: VotingRecord.objects.bulk_create(to_create, batch_size=batch_size)
|
|
if to_update: VotingRecord.objects.bulk_update(to_update, ["primary_party"], batch_size=batch_size)
|
|
|
|
print(f"DEBUG: Voting record import progress: {total_processed} processed. {count} created/updated.")
|
|
|
|
if os.path.exists(file_path):
|
|
os.remove(file_path)
|
|
|
|
self.message_user(request, f"Import complete: {count} voting records created/updated. ({created_count} new, {updated_count} updated, {skipped_no_change} skipped, {errors} errors)")
|
|
return redirect("..")
|
|
except Exception as e:
|
|
self.message_user(request, f"Error processing file: {e}", level=messages.ERROR)
|
|
return redirect("..")
|
|
else:
|
|
form = VotingRecordImportForm(request.POST, request.FILES)
|
|
if form.is_valid():
|
|
csv_file = request.FILES['file']
|
|
tenant = form.cleaned_data['tenant']
|
|
if not csv_file.name.endswith('.csv'):
|
|
self.message_user(request, "Please upload a CSV file.", level=messages.ERROR)
|
|
return redirect("..")
|
|
with tempfile.NamedTemporaryFile(delete=False, suffix='.csv') as tmp:
|
|
for chunk in csv_file.chunks(): tmp.write(chunk)
|
|
file_path = tmp.name
|
|
with open(file_path, 'r', encoding='utf-8-sig') as f:
|
|
reader = csv.reader(f)
|
|
headers = next(reader)
|
|
context = self.admin_site.each_context(request)
|
|
context.update({
|
|
'title': "Map Voting Record Fields",
|
|
'headers': headers,
|
|
'model_fields': VOTING_RECORD_MAPPABLE_FIELDS,
|
|
'tenant_id': tenant.id,
|
|
'file_path': file_path,
|
|
'action_url': request.path,
|
|
'opts': self.model._meta,
|
|
})
|
|
return render(request, "admin/import_mapping.html", context)
|
|
else:
|
|
form = VotingRecordImportForm()
|
|
context = self.admin_site.each_context(request)
|
|
context['form'] = form
|
|
context['title'] = "Import Voting Records"
|
|
context['opts'] = self.model._meta
|
|
return render(request, "admin/import_csv.html", context)
|