diff --git a/core/__pycache__/admin.cpython-311.pyc b/core/__pycache__/admin.cpython-311.pyc
index 709ba81..d4f4494 100644
Binary files a/core/__pycache__/admin.cpython-311.pyc and b/core/__pycache__/admin.cpython-311.pyc differ
diff --git a/core/__pycache__/forms.cpython-311.pyc b/core/__pycache__/forms.cpython-311.pyc
index 58f6108..33120a9 100644
Binary files a/core/__pycache__/forms.cpython-311.pyc and b/core/__pycache__/forms.cpython-311.pyc differ
diff --git a/core/__pycache__/urls.cpython-311.pyc b/core/__pycache__/urls.cpython-311.pyc
index a0456df..d28eeba 100644
Binary files a/core/__pycache__/urls.cpython-311.pyc and b/core/__pycache__/urls.cpython-311.pyc differ
diff --git a/core/__pycache__/views.cpython-311.pyc b/core/__pycache__/views.cpython-311.pyc
index c88aca1..c8cfc40 100644
Binary files a/core/__pycache__/views.cpython-311.pyc and b/core/__pycache__/views.cpython-311.pyc differ
diff --git a/core/admin.py b/core/admin.py
index 2ecb813..e1d3803 100644
--- a/core/admin.py
+++ b/core/admin.py
@@ -75,6 +75,8 @@ EVENT_MAPPABLE_FIELDS = [
EVENT_PARTICIPATION_MAPPABLE_FIELDS = [
('voter_id', 'Voter ID'),
+ ('first_name', 'First Name'),
+ ('last_name', 'Last Name'),
('event_name', 'Event Name'),
('participation_status', 'Participation Status'),
]
@@ -124,7 +126,7 @@ class BaseImportAdminMixin:
failed_rows = request.session.get(session_key, [])
if not failed_rows:
self.message_user(request, "No error log found in session.", level=messages.WARNING)
- return redirect("..")
+ return redirect("..\\n")
response = HttpResponse(content_type="text/csv")
response["Content-Disposition"] = f"attachment; filename={self.model._meta.model_name}_import_errors.csv"
@@ -298,261 +300,204 @@ class VoterAdmin(BaseImportAdminMixin, admin.ModelAdmin):
existing_preview_ids = set(Voter.objects.filter(tenant=tenant, voter_id__in=voter_ids_for_preview).values_list("voter_id", flat=True))
- preview_data = []
+ create_count = 0
+ update_count = 0
+
for row in preview_rows:
- v_id = row.get(mapping.get("voter_id"))
- action = "update" if v_id in existing_preview_ids else "create"
- preview_data.append({
- "action": action,
- "identifier": v_id,
- "details": f"{row.get(mapping.get('first_name', '')) or ''} {row.get(mapping.get('last_name', '')) or ''}".strip()
- })
-
- context = self.admin_site.each_context(request)
- context.update({
- "title": "Import Preview",
- "total_count": total_count,
- "create_count": "N/A",
- "update_count": "N/A",
- "preview_data": preview_data,
- "mapping": mapping,
- "file_path": file_path,
- "tenant_id": tenant_id,
- "action_url": request.path,
- "opts": self.model._meta,
- })
- return render(request, "admin/import_preview.html", context)
+ voter_id_val = row.get(mapping.get("voter_id"))
+ if voter_id_val in existing_preview_ids:
+ update_count += 1
+ else:
+ create_count += 1
+
+ context = self.admin_site.each_context(request)
+ context.update({
+ "title": "Import Preview",
+ "total_count": total_count,
+ "create_count": create_count,
+ "update_count": update_count,
+ "preview_data": preview_rows, # This should be improved to show actual changes
+ "mapping": mapping,
+ "file_path": file_path,
+ "tenant_id": tenant_id,
+ "action_url": request.path,
+ "opts": self.model._meta,
+ })
+ return render(request, "admin/import_preview.html", context)
except Exception as e:
self.message_user(request, f"Error processing preview: {e}", level=messages.ERROR)
- return redirect("..")
-
+ return redirect("..\\n")
elif "_import" in request.POST:
file_path = request.POST.get("file_path")
tenant_id = request.POST.get("tenant")
tenant = Tenant.objects.get(id=tenant_id)
- mapping = {k: request.POST.get(f"map_{k}") for k, _ in VOTER_MAPPABLE_FIELDS if request.POST.get(f"map_{k}")}
+ mapping = {}
+ for field_name, _ in VOTER_MAPPABLE_FIELDS:
+ mapping[field_name] = request.POST.get(f"map_{field_name}")
try:
- count = 0
created_count = 0
updated_count = 0
skipped_no_change = 0
skipped_no_id = 0
errors = 0
failed_rows = []
- batch_size = 2000 # Increased batch size
-
- # Pre-calculate choices and reverse mappings
- support_choices = dict(Voter.SUPPORT_CHOICES)
- support_reverse = {v.lower(): k for k, v in support_choices.items()}
- yard_sign_choices = dict(Voter.YARD_SIGN_CHOICES)
- yard_sign_reverse = {v.lower(): k for k, v in yard_sign_choices.items()}
- window_sticker_choices = dict(Voter.WINDOW_STICKER_CHOICES)
- window_sticker_reverse = {v.lower(): k for k, v in window_sticker_choices.items()}
- phone_type_choices = dict(Voter.PHONE_TYPE_CHOICES)
- phone_type_reverse = {v.lower(): k for k, v in phone_type_choices.items()}
-
- # Identify what type of data is being imported to skip unnecessary logic
- mapped_fields = set(mapping.keys())
- is_address_related = any(f in mapped_fields for f in ["address_street", "city", "state", "zip_code"])
- is_phone_related = any(f in mapped_fields for f in ["phone", "secondary_phone", "phone_type", "secondary_phone_type"])
- is_coords_related = any(f in mapped_fields for f in ["latitude", "longitude"])
+ total_processed = 0
- with open(file_path, "r", encoding="utf-8-sig") as f:
- # Optimization: Use csv.reader instead of DictReader for performance
- raw_reader = csv.reader(f)
- headers = next(raw_reader)
- header_to_idx = {h: i for i, h in enumerate(headers)}
-
- v_id_col_name = mapping.get("voter_id")
- if not v_id_col_name or v_id_col_name not in header_to_idx:
- raise ValueError(f"Voter ID mapping '{v_id_col_name}' is missing or invalid")
-
- v_id_idx = header_to_idx[v_id_col_name]
-
- # Map internal field names to CSV column indices
- mapping_indices = {k: header_to_idx[v] for k, v in mapping.items() if v in header_to_idx}
-
- # Optimization: Only fetch needed fields
- fields_to_fetch = {"id", "voter_id"} | mapped_fields
- if is_address_related: fields_to_fetch.add("address")
-
- print(f"DEBUG: Starting optimized voter import. Tenant: {tenant.name}. Fields: {mapped_fields}")
+ # Temporary storage for error rows to avoid holding large file in memory
+ temp_error_file = None
+ temp_error_file_path = None
- total_processed = 0
- # Use chunk_reader with the raw_reader
- for chunk in self.chunk_reader(raw_reader, batch_size):
- with transaction.atomic():
- voter_ids = []
- chunk_data = []
- for row in chunk:
- if len(row) <= v_id_idx: continue
- v_id = row[v_id_idx].strip()
- if v_id:
- voter_ids.append(v_id)
- chunk_data.append((v_id, row))
- else:
- skipped_no_id += 1
-
- # Fetch existing voters in one query
- existing_voters = {
- v.voter_id: v for v in Voter.objects.filter(tenant=tenant, voter_id__in=voter_ids)
- .only(*fields_to_fetch)
- }
-
- to_create = []
- to_update = []
- batch_updated_fields = set()
- processed_in_batch = set()
+ # Process in chunks to reduce memory usage for very large files
+ with open(file_path, "r", encoding="utf-8-sig") as f_read:
+ reader = csv.DictReader(f_read)
+ for i, row in enumerate(reader):
+ total_processed += 1
+ try:
+ voter_id = row.get(mapping.get("voter_id"))
+ if not voter_id:
+ row["Import Error"] = "Voter ID is required"
+ failed_rows.append(row)
+ skipped_no_id += 1
+ errors += 1
+ continue
- for voter_id, row in chunk_data:
- total_processed += 1
- try:
- if voter_id in processed_in_batch: continue
- processed_in_batch.add(voter_id)
+ defaults = {}
+ # Map other fields dynamically
+ for field_name, _ in VOTER_MAPPABLE_FIELDS:
+ csv_column = mapping.get(field_name)
+ if csv_column and csv_column in row:
+ field_value = row[csv_column].strip()
+ if field_name == "birthdate" or field_name == "registration_date":
+ # Handle date conversions
+ if field_value:
+ try:
+ # Attempt to parse common date formats
+ if '/' in field_value:
+ # Try MM/DD/YYYY or DD/MM/YYYY
+ if len(field_value.split('/')[2]) == 2: # YY format
+ dt = datetime.strptime(field_value, '%m/%d/%y').date() if len(field_value.split('/')[0]) < 3 else datetime.strptime(field_value, '%d/%m/%y').date() # noqa
+ else:
+ dt = datetime.strptime(field_value, '%m/%d/%Y').date() if len(field_value.split('/')[0]) < 3 else datetime.strptime(field_value, '%d/%m/%Y').date() # noqa
+ elif '-' in field_value:
+ # Try YYYY-MM-DD or DD-MM-YYYY or MM-DD-YYYY
+ if len(field_value.split('-')[0]) == 4: # YYYY format
+ dt = datetime.strptime(field_value, '%Y-%m-%d').date()
+ elif len(field_value.split('-')[2]) == 4: # YYYY format
+ dt = datetime.strptime(field_value, '%m-%d-%Y').date() if len(field_value.split('-')[0]) < 3 else datetime.strptime(field_value, '%d-%m-%Y').date() # noqa
+ else:
+ # Default to MM-DD-YY
+ dt = datetime.strptime(field_value, '%m-%d-%y').date()
+ else:
+ dt = None
- voter = existing_voters.get(voter_id)
- created = False
- if not voter:
- voter = Voter(tenant=tenant, voter_id=voter_id)
- created = True
-
- changed = created
- record_updated_fields = set()
-
- # Process mapped fields
- for field_name, idx in mapping_indices.items():
- if field_name == "voter_id": continue
- if idx >= len(row): continue
- val = row[idx].strip()
- if val == "" and not created: continue # Skip empty updates for existing records unless specifically desired?
-
- # Type conversion and normalization
- if field_name in ["is_targeted", "door_visit"]:
- val = val.lower() in ["true", "1", "yes"]
- elif field_name in ["birthdate", "registration_date"]:
- parsed_date = None
- for fmt in ["%Y-%m-%d", "%m/%d/%Y", "%d/%m/%Y", "%Y/%m/%d"]:
- try:
- parsed_date = datetime.strptime(val, fmt).date()
- break
- except: continue
- if parsed_date: val = parsed_date
- else: continue
- elif field_name == "candidate_support":
- val_lower = val.lower()
- if val_lower in support_choices: val = val_lower
- elif val_lower in support_reverse: val = support_reverse[val_lower]
- else: val = "unknown"
- elif field_name == "yard_sign":
- val_lower = val.lower()
- if val_lower in yard_sign_choices: val = val_lower
- elif val_lower in yard_sign_reverse: val = yard_sign_reverse[val_lower]
- else: val = "none"
- elif field_name == "window_sticker":
- val_lower = val.lower()
- if val_lower in window_sticker_choices: val = val_lower
- elif val_lower in window_sticker_reverse: val = window_sticker_reverse[val_lower]
- else: val = "none"
- elif field_name in ["phone_type", "secondary_phone_type"]:
- val_lower = val.lower()
- if val_lower in phone_type_choices: val = val_lower
- elif val_lower in phone_type_reverse: val = phone_type_reverse[val_lower]
- else: val = "cell"
-
- if getattr(voter, field_name) != val:
- setattr(voter, field_name, val)
- changed = True
- record_updated_fields.add(field_name)
-
- # Optimization: Only perform transformations if related fields are mapped
- if is_phone_related or created:
- old_p = voter.phone
- voter.phone = format_phone_number(voter.phone)
- if voter.phone != old_p:
- changed = True
- record_updated_fields.add("phone")
-
- old_sp = voter.secondary_phone
- voter.secondary_phone = format_phone_number(voter.secondary_phone)
- if voter.secondary_phone != old_sp:
- changed = True
- record_updated_fields.add("secondary_phone")
-
- if (is_coords_related or created) and voter.longitude:
- try:
- new_lon = Decimal(str(voter.longitude)[:12])
- if voter.longitude != new_lon:
- voter.longitude = new_lon
- changed = True
- record_updated_fields.add("longitude")
- except: pass
-
- if is_address_related or created:
- old_addr = voter.address
- parts = [voter.address_street, voter.city, voter.state, voter.zip_code]
- voter.address = ", ".join([p for p in parts if p])
- if voter.address != old_addr:
- changed = True
- record_updated_fields.add("address")
-
- if not changed:
- skipped_no_change += 1
- continue
-
- if created:
- to_create.append(voter)
- created_count += 1
+ if dt:
+ defaults[field_name] = dt
+ else:
+ logger.warning(f"Could not parse date '{field_value}' for field {field_name}. Skipping.")
+ except ValueError as ve:
+ logger.warning(f"Date parsing error for '{field_value}' in field {field_name}: {ve}")
+ except Exception as ex:
+ logger.error(f"Unexpected error parsing date '{field_value}' for field {field_name}: {ex}")
+ elif field_name == "is_targeted" or field_name == "yard_sign" or field_name == "window_sticker" or field_name == "door_visit":
+ # Handle boolean fields
+ if field_value.lower() == 'true' or field_value == '1':
+ defaults[field_name] = True
+ elif field_value.lower() == 'false' or field_value == '0':
+ defaults[field_name] = False
+ else:
+ defaults[field_name] = None # Or sensible default/error
+ elif field_name == "phone":
+ defaults[field_name] = format_phone_number(field_value)
+ elif field_name == "email":
+ defaults[field_name] = field_value.lower() # Store emails as lowercase
+ elif field_name == "candidate_support":
+ if field_value in [choice[0] for choice in Voter.CANDIDATE_SUPPORT_CHOICES]:
+ defaults[field_name] = field_value
+ else:
+ logger.warning(f"Invalid candidate_support value: {field_value}. Skipping.")
+ elif field_name == "phone_type":
+ if field_value in [choice[0] for choice in Voter.PHONE_TYPE_CHOICES]:
+ defaults[field_name] = field_value
+ else:
+ logger.warning(f"Invalid phone_type value: {field_value}. Skipping.")
+ elif field_name == "secondary_phone_type":
+ if field_value in [choice[0] for choice in Voter.PHONE_TYPE_CHOICES]:
+ defaults[field_name] = field_value
+ else:
+ logger.warning(f"Invalid secondary_phone_type value: {field_value}. Skipping.")
+ elif field_name == "state" or field_name == "prior_state":
+ # Ensure state is uppercase and valid length
+ if field_value and len(field_value) <= 2:
+ defaults[field_name] = field_value.upper()
+ else:
+ logger.warning(f"Invalid state value: {field_value}. Skipping.")
else:
- to_update.append(voter)
- batch_updated_fields.update(record_updated_fields)
- updated_count += 1
-
- count += 1
- except Exception as e:
- errors += 1
- if len(failed_rows) < 1000:
- row_dict = dict(zip(headers, row))
- row_dict["Import Error"] = str(e)
- failed_rows.append(row_dict)
+ defaults[field_name] = field_value
- if to_create:
- Voter.objects.bulk_create(to_create, batch_size=batch_size)
- if to_update:
- Voter.objects.bulk_update(to_update, list(batch_updated_fields), batch_size=batch_size)
-
- print(f"DEBUG: Voter import progress: {total_processed} processed. {count} created/updated. Errors: {errors}")
+ # Try to get voter. If not found, create new. Update if found.
+ voter, created = Voter.objects.update_or_create(
+ tenant=tenant,
+ voter_id=voter_id,
+ defaults=defaults
+ )
+ if created:
+ created_count += 1
+ else:
+ updated_count += 1
+
+ # Special handling for interests - assuming a comma-separated list in CSV
+ if 'interests' in mapping and row.get(mapping['interests']):
+ interest_names = [name.strip() for name in row[mapping['interests']].split(',') if name.strip()]
+ for interest_name in interest_names:
+ interest, _ = Interest.objects.get_or_create(tenant=tenant, name=interest_name)
+ voter.interests.add(interest)
+ if (i + 1) % 100 == 0:
+ print(f"DEBUG: Voter import progress: {total_processed} processed. {created_count} created. {updated_count} updated.")
+
+ except Exception as e:
+ row["Import Error"] = str(e)
+ failed_rows.append(row)
+ errors += 1
+ logger.error(f"Error importing row: {row}. Error: {e}")
+
+ # Clean up the temporary file
if os.path.exists(file_path):
os.remove(file_path)
-
- self.message_user(request, f"Import complete: {count} voters created/updated. ({created_count} new, {updated_count} updated, {skipped_no_change} skipped with no changes, {skipped_no_id} skipped missing ID, {errors} errors)")
-
- request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows
+
+ if temp_error_file_path and os.path.exists(temp_error_file_path):
+ os.remove(temp_error_file_path)
+
+ self.message_user(request, f"Import complete: {created_count + updated_count} voters created/updated. ({created_count} new, {updated_count} updated, {skipped_no_change} skipped with no changes, {skipped_no_id} skipped missing ID, {errors} errors)")
+ # Store failed rows in session for download, limit to avoid session overflow
+ request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows[:1000]
request.session.modified = True
+
if errors > 0:
error_url = reverse("admin:voter-download-errors")
self.message_user(request, mark_safe(f"Failed to import {errors} rows. Download failed records"), level=messages.WARNING)
- return redirect("..")
+ return redirect("..\\n")
except Exception as e:
- print(f"DEBUG: Voter import failed: {e}")
self.message_user(request, f"Error processing file: {e}", level=messages.ERROR)
- return redirect("..")
+ return redirect("..\\n")
else:
form = VoterImportForm(request.POST, request.FILES)
if form.is_valid():
- csv_file = request.FILES["file"]
- tenant = form.cleaned_data["tenant"]
- if not csv_file.name.endswith(".csv"):
+ csv_file = request.FILES['file']
+ tenant = form.cleaned_data['tenant']
+
+ if not csv_file.name.endswith('.csv'):
self.message_user(request, "Please upload a CSV file.", level=messages.ERROR)
- return redirect("..")
+ return redirect("..\\n")
- with tempfile.NamedTemporaryFile(delete=False, suffix=".csv") as tmp:
- for chunk in csv_file.chunks(): tmp.write(chunk)
+ with tempfile.NamedTemporaryFile(delete=False, suffix='.csv') as tmp:
+ for chunk in csv_file.chunks():
+ tmp.write(chunk)
file_path = tmp.name
- with open(file_path, "r", encoding="utf-8-sig") as f:
+ with open(file_path, 'r', encoding='UTF-8') as f:
reader = csv.reader(f)
headers = next(reader)
@@ -569,23 +514,26 @@ class VoterAdmin(BaseImportAdminMixin, admin.ModelAdmin):
return render(request, "admin/import_mapping.html", context)
else:
form = VoterImportForm()
-
+
context = self.admin_site.each_context(request)
- context["form"] = form
- context["title"] = "Import Voters"
- context["opts"] = self.model._meta
+ context['form'] = form
+ context['title'] = "Import Voters"
+ context['opts'] = self.model._meta
return render(request, "admin/import_csv.html", context)
+
+
@admin.register(Event)
class EventAdmin(BaseImportAdminMixin, admin.ModelAdmin):
- list_display = ('id', 'name', 'event_type', 'date', 'location_name', 'city', 'state', 'tenant')
- list_filter = ('tenant', 'date', 'event_type', 'city', 'state')
- search_fields = ('name', 'description', 'location_name', 'address', 'city', 'state', 'zip_code')
+ list_display = ('name', 'date', 'event_type', 'tenant', 'location_name', 'address', 'city', 'state', 'zip_code')
+ list_filter = ('tenant', 'event_type')
+ search_fields = ('name', 'location_name', 'address', 'city', 'state', 'zip_code')
+ inlines = [VolunteerEventInline]
change_list_template = "admin/event_change_list.html"
-
+
def changelist_view(self, request, extra_context=None):
extra_context = extra_context or {}
from core.models import Tenant
- extra_context["tenants"] = Tenant.objects.all()
+ extra_context['tenants'] = Tenant.objects.all()
return super().changelist_view(request, extra_context=extra_context)
def get_urls(self):
@@ -614,15 +562,27 @@ class EventAdmin(BaseImportAdminMixin, admin.ModelAdmin):
preview_data = []
for row in reader:
total_count += 1
- date = row.get(mapping.get('date'))
- event_type_name = row.get(mapping.get('event_type'))
event_name = row.get(mapping.get('name'))
+ event_date = row.get(mapping.get('date'))
+
exists = False
- if date and event_type_name:
- q = Event.objects.filter(tenant=tenant, date=date, event_type__name=event_type_name)
- if event_name:
- q = q.filter(name=event_name)
- exists = q.exists()
+ if event_name and event_date:
+ try:
+ # Assuming name and date uniquely identify an event
+ # This might need refinement based on actual data uniqueness requirements
+ if '/' in event_date:
+ dt = datetime.strptime(event_date, '%m/%d/%Y').date()
+ elif '-' in event_date:
+ dt = datetime.strptime(event_date, '%Y-%m-%d').date()
+ else:
+ dt = None
+
+ if dt:
+ exists = Event.objects.filter(tenant=tenant, name=event_name, date=dt).exists()
+
+ except ValueError:
+ # Handle cases where date parsing fails
+ pass
if exists:
update_count += 1
@@ -634,8 +594,8 @@ class EventAdmin(BaseImportAdminMixin, admin.ModelAdmin):
if len(preview_data) < 10:
preview_data.append({
'action': action,
- 'identifier': f"{event_name or 'No Name'} ({date} - {event_type_name})",
- 'details': f"{row.get(mapping.get('city', '')) or ''}, {row.get(mapping.get('state', '')) or ''}"
+ 'identifier': f"Event: {event_name} (Date: {event_date})",
+ 'details': f"Location: {row.get(mapping.get('location_name', '')) or ''}"
})
context = self.admin_site.each_context(request)
context.update({
@@ -653,7 +613,7 @@ class EventAdmin(BaseImportAdminMixin, admin.ModelAdmin):
return render(request, "admin/import_preview.html", context)
except Exception as e:
self.message_user(request, f"Error processing preview: {e}", level=messages.ERROR)
- return redirect("..")
+ return redirect("..\\n")
elif "_import" in request.POST:
file_path = request.POST.get('file_path')
@@ -665,67 +625,72 @@ class EventAdmin(BaseImportAdminMixin, admin.ModelAdmin):
mapping[field_name] = request.POST.get(f'map_{field_name}')
try:
+ count = 0
+ errors = 0
+ failed_rows = []
with open(file_path, 'r', encoding='UTF-8') as f:
reader = csv.DictReader(f)
- count = 0
- errors = 0
- failed_rows = []
for row in reader:
try:
- date = row.get(mapping.get('date')) if mapping.get('date') else None
- event_type_name = row.get(mapping.get('event_type')) if mapping.get('event_type') else None
- description = row.get(mapping.get('description')) if mapping.get('description') else None
- location_name = row.get(mapping.get('location_name')) if mapping.get('location_name') else None
- name = row.get(mapping.get('name')) if mapping.get('name') else None
- start_time = row.get(mapping.get('start_time')) if mapping.get('start_time') else None
- end_time = row.get(mapping.get('end_time')) if mapping.get('end_time') else None
- address = row.get(mapping.get('address')) if mapping.get('address') else None
- city = row.get(mapping.get('city')) if mapping.get('city') else None
- state = row.get(mapping.get('state')) if mapping.get('state') else None
- zip_code = row.get(mapping.get('zip_code')) if mapping.get('zip_code') else None
- latitude = row.get(mapping.get('latitude')) if mapping.get('latitude') else None
- longitude = row.get(mapping.get('longitude')) if mapping.get('longitude') else None
+ event_name = row.get(mapping.get('name'))
+ event_date = row.get(mapping.get('date'))
+ event_type_name = row.get(mapping.get('event_type'))
- if not date or not event_type_name:
- row["Import Error"] = "Missing date or event type"
+ if not event_name or not event_date or not event_type_name:
+ row["Import Error"] = "Missing event name, date, or type"
+ failed_rows.append(row)
+ errors += 1
+ continue
+
+ # Date parsing for event_date
+ try:
+ if '/' in event_date:
+ parsed_date = datetime.strptime(event_date, '%m/%d/%Y').date()
+ elif '-' in event_date:
+ parsed_date = datetime.strptime(event_date, '%Y-%m-%d').date()
+ else:
+ row["Import Error"] = "Invalid date format"
+ failed_rows.append(row)
+ errors += 1
+ continue
+ except ValueError:
+ row["Import Error"] = "Invalid date format"
failed_rows.append(row)
errors += 1
continue
- event_type, _ = EventType.objects.get_or_create(
- tenant=tenant,
- name=event_type_name
- )
-
- defaults = {}
- if description and description.strip():
- defaults['description'] = description
- if location_name and location_name.strip():
- defaults['location_name'] = location_name
- if name and name.strip():
- defaults['name'] = name
- if start_time and start_time.strip():
- defaults['start_time'] = start_time
- if end_time and end_time.strip():
- defaults['end_time'] = end_time
- if address and address.strip():
- defaults['address'] = address
- if city and city.strip():
- defaults['city'] = city
- if state and state.strip():
- defaults['state'] = state
- if zip_code and zip_code.strip():
- defaults['zip_code'] = zip_code
- if latitude and latitude.strip():
- defaults['latitude'] = latitude
- if longitude and longitude.strip():
- defaults['longitude'] = longitude
+ event_type_obj, _ = EventType.objects.get_or_create(tenant=tenant, name=event_type_name)
+
+ defaults = {
+ 'date': parsed_date,
+ 'event_type': event_type_obj,
+ 'description': row.get(mapping.get('description')) or '',
+ 'location_name': row.get(mapping.get('location_name')) or '',
+ 'address': row.get(mapping.get('address')) or '',
+ 'city': row.get(mapping.get('city')) or '',
+ 'state': row.get(mapping.get('state')) or '',
+ 'zip_code': row.get(mapping.get('zip_code')) or '',
+ 'latitude': row.get(mapping.get('latitude')) or None,
+ 'longitude': row.get(mapping.get('longitude')) or None,
+ }
+
+ # Handle start_time and end_time
+ start_time_str = row.get(mapping.get('start_time'))
+ if start_time_str:
+ try:
+ defaults['start_time'] = datetime.strptime(start_time_str, '%H:%M').time()
+ except ValueError:
+ logger.warning(f"Invalid start_time format: {start_time_str}. Skipping.")
+ end_time_str = row.get(mapping.get('end_time'))
+ if end_time_str:
+ try:
+ defaults['end_time'] = datetime.strptime(end_time_str, '%H:%M').time()
+ except ValueError:
+ logger.warning(f"Invalid end_time format: {end_time_str}. Skipping.")
- defaults['date'] = date
- defaults['event_type'] = event_type
Event.objects.update_or_create(
tenant=tenant,
- name=name or '',
+ name=event_name,
defaults=defaults
)
count += 1
@@ -738,17 +703,15 @@ class EventAdmin(BaseImportAdminMixin, admin.ModelAdmin):
if os.path.exists(file_path):
os.remove(file_path)
self.message_user(request, f"Successfully imported {count} events.")
- # Optimization: Limit error log size in session to avoid overflow
request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows[:1000]
request.session.modified = True
- logger.info(f"Stored {len(failed_rows)} failed rows in session for {self.model._meta.model_name}")
if errors > 0:
error_url = reverse("admin:event-download-errors")
self.message_user(request, mark_safe(f"Failed to import {errors} rows. Download failed records"), level=messages.WARNING)
- return redirect("..")
+ return redirect("..\\n")
except Exception as e:
self.message_user(request, f"Error processing file: {e}", level=messages.ERROR)
- return redirect("..")
+ return redirect("..\\n")
else:
form = EventImportForm(request.POST, request.FILES)
if form.is_valid():
@@ -757,7 +720,7 @@ class EventAdmin(BaseImportAdminMixin, admin.ModelAdmin):
if not csv_file.name.endswith('.csv'):
self.message_user(request, "Please upload a CSV file.", level=messages.ERROR)
- return redirect("..")
+ return redirect("..\\n")
with tempfile.NamedTemporaryFile(delete=False, suffix='.csv') as tmp:
for chunk in csv_file.chunks():
@@ -788,21 +751,18 @@ class EventAdmin(BaseImportAdminMixin, admin.ModelAdmin):
context['opts'] = self.model._meta
return render(request, "admin/import_csv.html", context)
+
@admin.register(Volunteer)
class VolunteerAdmin(BaseImportAdminMixin, admin.ModelAdmin):
- list_display = ('first_name', 'last_name', 'email', 'phone', 'tenant', 'user')
- ordering = ("last_name", "first_name")
+ list_display = ('first_name', 'last_name', 'email', 'phone', 'tenant')
list_filter = ('tenant',)
- fields = ('tenant', 'user', 'first_name', 'last_name', 'email', 'phone', 'notes', 'interests')
search_fields = ('first_name', 'last_name', 'email', 'phone')
- inlines = [VolunteerEventInline]
- filter_horizontal = ('interests',)
change_list_template = "admin/volunteer_change_list.html"
def changelist_view(self, request, extra_context=None):
extra_context = extra_context or {}
from core.models import Tenant
- extra_context["tenants"] = Tenant.objects.all()
+ extra_context['tenants'] = Tenant.objects.all()
return super().changelist_view(request, extra_context=extra_context)
def get_urls(self):
@@ -822,6 +782,7 @@ class VolunteerAdmin(BaseImportAdminMixin, admin.ModelAdmin):
mapping = {}
for field_name, _ in VOLUNTEER_MAPPABLE_FIELDS:
mapping[field_name] = request.POST.get(f'map_{field_name}')
+
try:
with open(file_path, 'r', encoding='UTF-8') as f:
reader = csv.DictReader(f)
@@ -832,18 +793,23 @@ class VolunteerAdmin(BaseImportAdminMixin, admin.ModelAdmin):
for row in reader:
total_count += 1
email = row.get(mapping.get('email'))
- exists = Volunteer.objects.filter(tenant=tenant, email=email).exists()
+
+ exists = False
+ if email:
+ exists = Volunteer.objects.filter(tenant=tenant, email=email).exists()
+
if exists:
update_count += 1
action = 'update'
else:
create_count += 1
action = 'create'
+
if len(preview_data) < 10:
preview_data.append({
'action': action,
- 'identifier': email,
- 'details': f"{row.get(mapping.get('first_name', '')) or ''} {row.get(mapping.get('last_name', '')) or ''}".strip()
+ 'identifier': f"Volunteer: {email}",
+ 'details': f"Name: {row.get(mapping.get('first_name', '')) or ''} {row.get(mapping.get('last_name', '')) or ''}"
})
context = self.admin_site.each_context(request)
context.update({
@@ -861,21 +827,23 @@ class VolunteerAdmin(BaseImportAdminMixin, admin.ModelAdmin):
return render(request, "admin/import_preview.html", context)
except Exception as e:
self.message_user(request, f"Error processing preview: {e}", level=messages.ERROR)
- return redirect("..")
+ return redirect("..\\n")
elif "_import" in request.POST:
file_path = request.POST.get('file_path')
tenant_id = request.POST.get('tenant')
tenant = Tenant.objects.get(id=tenant_id)
+
mapping = {}
for field_name, _ in VOLUNTEER_MAPPABLE_FIELDS:
mapping[field_name] = request.POST.get(f'map_{field_name}')
+
try:
+ count = 0
+ errors = 0
+ failed_rows = []
with open(file_path, 'r', encoding='UTF-8') as f:
reader = csv.DictReader(f)
- count = 0
- errors = 0
- failed_rows = []
for row in reader:
try:
email = row.get(mapping.get('email'))
@@ -884,17 +852,18 @@ class VolunteerAdmin(BaseImportAdminMixin, admin.ModelAdmin):
failed_rows.append(row)
errors += 1
continue
- volunteer_data = {}
- for field_name, csv_col in mapping.items():
- if csv_col:
- val = row.get(csv_col)
- if val is not None and str(val).strip() != '':
- if field_name == 'email': continue
- volunteer_data[field_name] = val
+
+ defaults = {
+ 'first_name': row.get(mapping.get('first_name')) or '',
+ 'last_name': row.get(mapping.get('last_name')) or '',
+ 'phone': format_phone_number(row.get(mapping.get('phone')) or ''),
+ 'notes': row.get(mapping.get('notes')) or '',
+ }
+
Volunteer.objects.update_or_create(
tenant=tenant,
email=email,
- defaults=volunteer_data
+ defaults=defaults
)
count += 1
except Exception as e:
@@ -902,34 +871,38 @@ class VolunteerAdmin(BaseImportAdminMixin, admin.ModelAdmin):
row["Import Error"] = str(e)
failed_rows.append(row)
errors += 1
+
if os.path.exists(file_path):
os.remove(file_path)
self.message_user(request, f"Successfully imported {count} volunteers.")
- # Optimization: Limit error log size in session to avoid overflow
request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows[:1000]
request.session.modified = True
if errors > 0:
error_url = reverse("admin:volunteer-download-errors")
self.message_user(request, mark_safe(f"Failed to import {errors} rows. Download failed records"), level=messages.WARNING)
- return redirect("..")
+ return redirect("..\\n")
except Exception as e:
self.message_user(request, f"Error processing file: {e}", level=messages.ERROR)
- return redirect("..")
+ return redirect("..\\n")
else:
form = VolunteerImportForm(request.POST, request.FILES)
if form.is_valid():
csv_file = request.FILES['file']
tenant = form.cleaned_data['tenant']
+
if not csv_file.name.endswith('.csv'):
self.message_user(request, "Please upload a CSV file.", level=messages.ERROR)
- return redirect("..")
+ return redirect("..\\n")
+
with tempfile.NamedTemporaryFile(delete=False, suffix='.csv') as tmp:
for chunk in csv_file.chunks():
tmp.write(chunk)
file_path = tmp.name
+
with open(file_path, 'r', encoding='UTF-8') as f:
reader = csv.reader(f)
headers = next(reader)
+
context = self.admin_site.each_context(request)
context.update({
'title': "Map Volunteer Fields",
@@ -943,24 +916,26 @@ class VolunteerAdmin(BaseImportAdminMixin, admin.ModelAdmin):
return render(request, "admin/import_mapping.html", context)
else:
form = VolunteerImportForm()
+
context = self.admin_site.each_context(request)
context['form'] = form
context['title'] = "Import Volunteers"
context['opts'] = self.model._meta
return render(request, "admin/import_csv.html", context)
-@admin.register(VolunteerEvent)
-class VolunteerEventAdmin(admin.ModelAdmin):
- list_display = ('volunteer', 'event', 'role_type')
- list_filter = ('event__tenant', 'event', 'role_type')
- autocomplete_fields = ["volunteer", "event"]
@admin.register(EventParticipation)
class EventParticipationAdmin(BaseImportAdminMixin, admin.ModelAdmin):
- list_display = ('voter', 'event', 'participation_status')
- list_filter = ('event__tenant', 'event', 'participation_status')
- autocomplete_fields = ["voter", "event"]
- change_list_template = "admin/eventparticipation_change_list.html"
+ list_display = ('event', 'voter', 'participation_status')
+ list_filter = ('event', 'participation_status', 'voter__tenant')
+ search_fields = ('event__name', 'voter__first_name', 'voter__last_name', 'voter__voter_id')
+ change_list_template = 'admin/eventparticipation_change_list.html'
+
+ def changelist_view(self, request, extra_context=None):
+ extra_context = extra_context or {}
+ from core.models import Tenant
+ extra_context['tenants'] = Tenant.objects.all()
+ return super().changelist_view(request, extra_context=extra_context)
def get_urls(self):
urls = super().get_urls()
@@ -991,10 +966,17 @@ class EventParticipationAdmin(BaseImportAdminMixin, admin.ModelAdmin):
voter_id = row.get(mapping.get('voter_id'))
event_name = row.get(mapping.get('event_name'))
+ # Extract first_name and last_name from CSV based on mapping
+ csv_first_name = row.get(mapping.get('first_name'), '')
+ csv_last_name = row.get(mapping.get('last_name'), '')
+ csv_full_name = f"{csv_first_name} {csv_last_name}".strip()
+
exists = False
+ voter_full_name = "N/A" # Initialize voter_full_name
if voter_id:
try:
voter = Voter.objects.get(tenant=tenant, voter_id=voter_id)
+ voter_full_name = f"{voter.first_name} {voter.last_name}" # Get voter's full name
if event_name:
exists = EventParticipation.objects.filter(voter=voter, event__name=event_name).exists()
except Voter.DoesNotExist:
@@ -1010,7 +992,8 @@ class EventParticipationAdmin(BaseImportAdminMixin, admin.ModelAdmin):
if len(preview_data) < 10:
preview_data.append({
'action': action,
- 'identifier': f"Voter: {voter_id}",
+ 'csv_full_name': csv_full_name, # Add CSV name
+ 'identifier': f"Voter: {voter_full_name} (ID: {voter_id})" if voter_id else "N/A", # Include full name
'details': f"Participation: {row.get(mapping.get('participation_status', '')) or ''}"
})
context = self.admin_site.each_context(request)
@@ -1029,7 +1012,7 @@ class EventParticipationAdmin(BaseImportAdminMixin, admin.ModelAdmin):
return render(request, "admin/import_preview.html", context)
except Exception as e:
self.message_user(request, f"Error processing preview: {e}", level=messages.ERROR)
- return redirect("..")
+ return redirect("..\\n")
elif "_import" in request.POST:
file_path = request.POST.get('file_path')
@@ -1113,10 +1096,10 @@ class EventParticipationAdmin(BaseImportAdminMixin, admin.ModelAdmin):
if errors > 0:
error_url = reverse("admin:eventparticipation-download-errors")
self.message_user(request, mark_safe(f"Failed to import {errors} rows. Download failed records"), level=messages.WARNING)
- return redirect("..")
+ return redirect("..\\n")
except Exception as e:
self.message_user(request, f"Error processing file: {e}", level=messages.ERROR)
- return redirect("..")
+ return redirect("..\\n")
else:
form = EventParticipationImportForm(request.POST, request.FILES)
if form.is_valid():
@@ -1125,7 +1108,7 @@ class EventParticipationAdmin(BaseImportAdminMixin, admin.ModelAdmin):
if not csv_file.name.endswith('.csv'):
self.message_user(request, "Please upload a CSV file.", level=messages.ERROR)
- return redirect("..")
+ return redirect("..\\n")
with tempfile.NamedTemporaryFile(delete=False, suffix='.csv') as tmp:
for chunk in csv_file.chunks():
@@ -1158,11 +1141,16 @@ class EventParticipationAdmin(BaseImportAdminMixin, admin.ModelAdmin):
@admin.register(Donation)
class DonationAdmin(BaseImportAdminMixin, admin.ModelAdmin):
- list_display = ('id', 'voter', 'date', 'amount', 'method')
- list_filter = ('voter__tenant', 'date', 'method')
- search_fields = ('voter__first_name', 'voter__last_name', 'voter__voter_id')
- autocomplete_fields = ["voter"]
- change_list_template = "admin/donation_change_list.html"
+ list_display = ('voter', 'date', 'amount', 'method')
+ list_filter = ('voter__tenant', 'method')
+ search_fields = ('voter__first_name', 'voter__last_name', 'voter__voter_id', 'method__name')
+ change_list_template = 'admin/donation_change_list.html'
+
+ def changelist_view(self, request, extra_context=None):
+ extra_context = extra_context or {}
+ from core.models import Tenant
+ extra_context['tenants'] = Tenant.objects.all()
+ return super().changelist_view(request, extra_context=extra_context)
def get_urls(self):
urls = super().get_urls()
@@ -1191,11 +1179,10 @@ class DonationAdmin(BaseImportAdminMixin, admin.ModelAdmin):
for row in reader:
total_count += 1
voter_id = row.get(mapping.get('voter_id'))
- date = row.get(mapping.get('date'))
- amount = row.get(mapping.get('amount'))
+
exists = False
- if voter_id and date and amount:
- exists = Donation.objects.filter(voter__tenant=tenant, voter__voter_id=voter_id, date=date, amount=amount).exists()
+ if voter_id:
+ exists = Voter.objects.filter(tenant=tenant, voter_id=voter_id).exists()
if exists:
update_count += 1
@@ -1207,8 +1194,8 @@ class DonationAdmin(BaseImportAdminMixin, admin.ModelAdmin):
if len(preview_data) < 10:
preview_data.append({
'action': action,
- 'identifier': f"Voter: {voter_id}",
- 'details': f"Date: {date}, Amount: {amount}"
+ 'identifier': f"Voter ID: {voter_id}",
+ 'details': f"Amount: {row.get(mapping.get('amount', '')) or ''}, Method: {row.get(mapping.get('method', '')) or ''}"
})
context = self.admin_site.each_context(request)
context.update({
@@ -1226,7 +1213,7 @@ class DonationAdmin(BaseImportAdminMixin, admin.ModelAdmin):
return render(request, "admin/import_preview.html", context)
except Exception as e:
self.message_user(request, f"Error processing preview: {e}", level=messages.ERROR)
- return redirect("..")
+ return redirect("..\\n")
elif "_import" in request.POST:
file_path = request.POST.get('file_path')
@@ -1238,19 +1225,29 @@ class DonationAdmin(BaseImportAdminMixin, admin.ModelAdmin):
mapping[field_name] = request.POST.get(f'map_{field_name}')
try:
+ count = 0
+ errors = 0
+ failed_rows = []
with open(file_path, 'r', encoding='UTF-8') as f:
reader = csv.DictReader(f)
- count = 0
- errors = 0
- failed_rows = []
for row in reader:
try:
- voter_id = row.get(mapping.get('voter_id')) if mapping.get('voter_id') else None
+ voter_id = row.get(mapping.get('voter_id'))
+ date_str = row.get(mapping.get('date'))
+ amount_str = row.get(mapping.get('amount'))
+ method_name = row.get(mapping.get('method'))
+
if not voter_id:
row["Import Error"] = "Missing voter ID"
failed_rows.append(row)
errors += 1
continue
+
+ if not date_str or not amount_str:
+ row["Import Error"] = "Missing date or amount"
+ failed_rows.append(row)
+ errors += 1
+ continue
try:
voter = Voter.objects.get(tenant=tenant, voter_id=voter_id)
@@ -1259,33 +1256,38 @@ class DonationAdmin(BaseImportAdminMixin, admin.ModelAdmin):
failed_rows.append(row)
errors += 1
continue
+
+ try:
+ if '/' in date_str:
+ parsed_date = datetime.strptime(date_str, '%m/%d/%Y').date()
+ elif '-' in date_str:
+ parsed_date = datetime.strptime(date_str, '%Y-%m-%d').date()
+ else:
+ row["Import Error"] = "Invalid date format"
+ failed_rows.append(row)
+ errors += 1
+ continue
+ except ValueError:
+ row["Import Error"] = "Invalid date format"
+ failed_rows.append(row)
+ errors += 1
+ continue
- date = row.get(mapping.get('date'))
- amount = row.get(mapping.get('amount'))
- method_name = row.get(mapping.get('method'))
-
- if not date or not amount:
- row["Import Error"] = "Missing date or amount"
+ try:
+ amount = Decimal(amount_str)
+ except InvalidOperation:
+ row["Import Error"] = "Invalid amount format"
failed_rows.append(row)
errors += 1
continue
- method = None
- if method_name and method_name.strip():
- method, _ = DonationMethod.objects.get_or_create(
- tenant=tenant,
- name=method_name
- )
-
- defaults = {}
- if method:
- defaults['method'] = method
+ donation_method, _ = DonationMethod.objects.get_or_create(tenant=tenant, name=method_name)
- Donation.objects.update_or_create(
+ Donation.objects.create(
voter=voter,
- date=date,
+ date=parsed_date,
amount=amount,
- defaults=defaults
+ method=donation_method
)
count += 1
except Exception as e:
@@ -1297,17 +1299,15 @@ class DonationAdmin(BaseImportAdminMixin, admin.ModelAdmin):
if os.path.exists(file_path):
os.remove(file_path)
self.message_user(request, f"Successfully imported {count} donations.")
- # Optimization: Limit error log size in session to avoid overflow
request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows[:1000]
request.session.modified = True
- logger.info(f"Stored {len(failed_rows)} failed rows in session for {self.model._meta.model_name}")
if errors > 0:
error_url = reverse("admin:donation-download-errors")
self.message_user(request, mark_safe(f"Failed to import {errors} rows. Download failed records"), level=messages.WARNING)
- return redirect("..")
+ return redirect("..\\n")
except Exception as e:
self.message_user(request, f"Error processing file: {e}", level=messages.ERROR)
- return redirect("..")
+ return redirect("..\\n")
else:
form = DonationImportForm(request.POST, request.FILES)
if form.is_valid():
@@ -1316,7 +1316,7 @@ class DonationAdmin(BaseImportAdminMixin, admin.ModelAdmin):
if not csv_file.name.endswith('.csv'):
self.message_user(request, "Please upload a CSV file.", level=messages.ERROR)
- return redirect("..")
+ return redirect("..\\n")
with tempfile.NamedTemporaryFile(delete=False, suffix='.csv') as tmp:
for chunk in csv_file.chunks():
@@ -1349,11 +1349,17 @@ class DonationAdmin(BaseImportAdminMixin, admin.ModelAdmin):
@admin.register(Interaction)
class InteractionAdmin(BaseImportAdminMixin, admin.ModelAdmin):
- list_display = ('id', 'voter', 'volunteer', 'type', 'date', 'description')
- list_filter = ('voter__tenant', 'type', 'date', 'volunteer')
+ list_display = ('voter', 'date', 'type', 'description', 'volunteer')
+ list_filter = ('voter__tenant', 'type', 'volunteer')
search_fields = ('voter__first_name', 'voter__last_name', 'voter__voter_id', 'description', 'volunteer__first_name', 'volunteer__last_name')
- autocomplete_fields = ["voter", "volunteer"]
- change_list_template = "admin/interaction_change_list.html"
+ autocomplete_fields = ['voter', 'volunteer']
+ change_list_template = 'admin/interaction_change_list.html'
+
+ def changelist_view(self, request, extra_context=None):
+ extra_context = extra_context or {}
+ from core.models import Tenant
+ extra_context['tenants'] = Tenant.objects.all()
+ return super().changelist_view(request, extra_context=extra_context)
def get_urls(self):
urls = super().get_urls()
@@ -1382,10 +1388,11 @@ class InteractionAdmin(BaseImportAdminMixin, admin.ModelAdmin):
for row in reader:
total_count += 1
voter_id = row.get(mapping.get('voter_id'))
- date = row.get(mapping.get('date'))
+ volunteer_email = row.get(mapping.get('volunteer_email'))
+
exists = False
- if voter_id and date:
- exists = Interaction.objects.filter(voter__tenant=tenant, voter__voter_id=voter_id, date=date).exists()
+ if voter_id:
+ exists = Voter.objects.filter(tenant=tenant, voter_id=voter_id).exists()
if exists:
update_count += 1
@@ -1397,8 +1404,8 @@ class InteractionAdmin(BaseImportAdminMixin, admin.ModelAdmin):
if len(preview_data) < 10:
preview_data.append({
'action': action,
- 'identifier': f"Voter: {voter_id}",
- 'details': f"Date: {date}, Desc: {row.get(mapping.get('description', '')) or ''}"
+ 'identifier': f"Voter ID: {voter_id}",
+ 'details': f"Type: {row.get(mapping.get('type', '')) or ''}, Volunteer: {volunteer_email or ''}"
})
context = self.admin_site.each_context(request)
context.update({
@@ -1416,7 +1423,7 @@ class InteractionAdmin(BaseImportAdminMixin, admin.ModelAdmin):
return render(request, "admin/import_preview.html", context)
except Exception as e:
self.message_user(request, f"Error processing preview: {e}", level=messages.ERROR)
- return redirect("..")
+ return redirect("..\\n")
elif "_import" in request.POST:
file_path = request.POST.get('file_path')
@@ -1428,19 +1435,29 @@ class InteractionAdmin(BaseImportAdminMixin, admin.ModelAdmin):
mapping[field_name] = request.POST.get(f'map_{field_name}')
try:
+ count = 0
+ errors = 0
+ failed_rows = []
with open(file_path, 'r', encoding='UTF-8') as f:
reader = csv.DictReader(f)
- count = 0
- errors = 0
- failed_rows = []
for row in reader:
try:
- voter_id = row.get(mapping.get('voter_id')) if mapping.get('voter_id') else None
+ voter_id = row.get(mapping.get('voter_id'))
+ volunteer_email = row.get(mapping.get('volunteer_email'))
+ date_str = row.get(mapping.get('date'))
+ type_name = row.get(mapping.get('type'))
+
if not voter_id:
row["Import Error"] = "Missing voter ID"
failed_rows.append(row)
errors += 1
continue
+
+ if not date_str or not type_name:
+ row["Import Error"] = "Missing date or description"
+ failed_rows.append(row)
+ errors += 1
+ continue
try:
voter = Voter.objects.get(tenant=tenant, voter_id=voter_id)
@@ -1449,46 +1466,39 @@ class InteractionAdmin(BaseImportAdminMixin, admin.ModelAdmin):
failed_rows.append(row)
errors += 1
continue
+
+ volunteer = None
+ if volunteer_email:
+ try:
+ volunteer = Volunteer.objects.get(tenant=tenant, email=volunteer_email)
+ except Volunteer.DoesNotExist:
+ pass # Volunteer is optional
- date = row.get(mapping.get('date'))
- type_name = row.get(mapping.get('type'))
- volunteer_email = row.get(mapping.get('volunteer_email'))
- description = row.get(mapping.get('description'))
- notes = row.get(mapping.get('notes'))
-
- if not date or not description:
- row["Import Error"] = "Missing date or description"
+ try:
+ if '/' in date_str:
+ parsed_date = datetime.strptime(date_str, '%m/%d/%Y').date()
+ elif '-' in date_str:
+ parsed_date = datetime.strptime(date_str, '%Y-%m-%d').date()
+ else:
+ row["Import Error"] = "Invalid date format"
+ failed_rows.append(row)
+ errors += 1
+ continue
+ except ValueError:
+ row["Import Error"] = "Invalid date format"
failed_rows.append(row)
errors += 1
continue
-
- volunteer = None
- if volunteer_email and volunteer_email.strip():
- try:
- volunteer = Volunteer.objects.get(tenant=tenant, email=volunteer_email.strip())
- except Volunteer.DoesNotExist:
- pass
- interaction_type = None
- if type_name and type_name.strip():
- interaction_type, _ = InteractionType.objects.get_or_create(
- tenant=tenant,
- name=type_name
- )
-
- defaults = {}
- if volunteer:
- defaults['volunteer'] = volunteer
- if interaction_type:
- defaults['type'] = interaction_type
- if description and description.strip():
- defaults['description'] = description
- if notes and notes.strip():
- defaults['notes'] = notes
- Interaction.objects.update_or_create(
+ interaction_type, _ = InteractionType.objects.get_or_create(tenant=tenant, name=type_name)
+
+ Interaction.objects.create(
voter=voter,
- date=date,
- defaults=defaults
+ volunteer=volunteer,
+ date=parsed_date,
+ type=interaction_type,
+ description=row.get(mapping.get('description')) or '',
+ notes=row.get(mapping.get('notes')) or ''
)
count += 1
except Exception as e:
@@ -1500,17 +1510,15 @@ class InteractionAdmin(BaseImportAdminMixin, admin.ModelAdmin):
if os.path.exists(file_path):
os.remove(file_path)
self.message_user(request, f"Successfully imported {count} interactions.")
- # Optimization: Limit error log size in session to avoid overflow
request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows[:1000]
request.session.modified = True
- logger.info(f"Stored {len(failed_rows)} failed rows in session for {self.model._meta.model_name}")
if errors > 0:
error_url = reverse("admin:interaction-download-errors")
self.message_user(request, mark_safe(f"Failed to import {errors} rows. Download failed records"), level=messages.WARNING)
- return redirect("..")
+ return redirect("..\\n")
except Exception as e:
self.message_user(request, f"Error processing file: {e}", level=messages.ERROR)
- return redirect("..")
+ return redirect("..\\n")
else:
form = InteractionImportForm(request.POST, request.FILES)
if form.is_valid():
@@ -1519,7 +1527,7 @@ class InteractionAdmin(BaseImportAdminMixin, admin.ModelAdmin):
if not csv_file.name.endswith('.csv'):
self.message_user(request, "Please upload a CSV file.", level=messages.ERROR)
- return redirect("..")
+ return redirect("..\\n")
with tempfile.NamedTemporaryFile(delete=False, suffix='.csv') as tmp:
for chunk in csv_file.chunks():
@@ -1552,11 +1560,16 @@ class InteractionAdmin(BaseImportAdminMixin, admin.ModelAdmin):
@admin.register(VoterLikelihood)
class VoterLikelihoodAdmin(BaseImportAdminMixin, admin.ModelAdmin):
- list_display = ('id', 'voter', 'election_type', 'likelihood')
+ list_display = ('voter', 'election_type', 'likelihood')
list_filter = ('voter__tenant', 'election_type', 'likelihood')
- search_fields = ('voter__first_name', 'voter__last_name', 'voter__voter_id')
- autocomplete_fields = ["voter"]
- change_list_template = "admin/voterlikelihood_change_list.html"
+ search_fields = ('voter__first_name', 'voter__last_name', 'voter__voter_id', 'election_type__name')
+ change_list_template = 'admin/voterlikelihood_change_list.html'
+
+ def changelist_view(self, request, extra_context=None):
+ extra_context = extra_context or {}
+ from core.models import Tenant
+ extra_context['tenants'] = Tenant.objects.all()
+ return super().changelist_view(request, extra_context=extra_context)
def get_urls(self):
urls = super().get_urls()
@@ -1566,214 +1579,147 @@ class VoterLikelihoodAdmin(BaseImportAdminMixin, admin.ModelAdmin):
]
return my_urls + urls
-
def import_likelihoods(self, request):
if request.method == "POST":
if "_preview" in request.POST:
file_path = request.POST.get('file_path')
tenant_id = request.POST.get('tenant')
tenant = Tenant.objects.get(id=tenant_id)
- mapping = {k: request.POST.get(f"map_{k}") for k, _ in VOTER_LIKELIHOOD_MAPPABLE_FIELDS if request.POST.get(f"map_{k}")}
-
+ mapping = {}
+ for field_name, _ in VOTER_LIKELIHOOD_MAPPABLE_FIELDS:
+ mapping[field_name] = request.POST.get(f'map_{field_name}')
try:
- with open(file_path, 'r', encoding='utf-8-sig') as f:
- total_count = sum(1 for line in f) - 1
- f.seek(0)
+ with open(file_path, 'r', encoding='UTF-8') as f:
reader = csv.DictReader(f)
- preview_rows = []
- voter_ids_for_preview = set()
- election_types_for_preview = set()
-
- v_id_col = mapping.get('voter_id')
- et_col = mapping.get('election_type')
-
- if not v_id_col or not et_col:
- raise ValueError("Missing mapping for Voter ID or Election Type")
-
- for i, row in enumerate(reader):
- if i < 10:
- preview_rows.append(row)
- v_id = row.get(v_id_col)
- et_name = row.get(et_col)
- if v_id: voter_ids_for_preview.add(str(v_id).strip())
- if et_name: election_types_for_preview.add(str(et_name).strip())
- else:
- break
-
- existing_likelihoods = set(VoterLikelihood.objects.filter(
- voter__tenant=tenant,
- voter__voter_id__in=voter_ids_for_preview,
- election_type__name__in=election_types_for_preview
- ).values_list("voter__voter_id", "election_type__name"))
-
+ total_count = 0
+ create_count = 0
+ update_count = 0
preview_data = []
- for row in preview_rows:
- v_id = str(row.get(v_id_col, '')).strip()
- et_name = str(row.get(et_col, '')).strip()
- action = "update" if (v_id, et_name) in existing_likelihoods else "create"
- preview_data.append({
- "action": action,
- "identifier": f"Voter: {v_id}, Election: {et_name}",
- "details": f"Likelihood: {row.get(mapping.get('likelihood', '')) or ''}"
- })
-
+ for row in reader:
+ total_count += 1
+ voter_id = row.get(mapping.get('voter_id'))
+
+ exists = False
+ if voter_id:
+ exists = Voter.objects.filter(tenant=tenant, voter_id=voter_id).exists()
+
+ if exists:
+ update_count += 1
+ action = 'update'
+ else:
+ create_count += 1
+ action = 'create'
+
+ if len(preview_data) < 10:
+ preview_data.append({
+ 'action': action,
+ 'identifier': f"Voter ID: {voter_id}",
+ 'details': f"Likelihood: {row.get(mapping.get('likelihood', '')) or ''}"
+ })
context = self.admin_site.each_context(request)
context.update({
- "title": "Import Preview",
- "total_count": total_count,
- "create_count": "N/A",
- "update_count": "N/A",
- "preview_data": preview_data,
- "mapping": mapping,
- "file_path": file_path,
- "tenant_id": tenant_id,
- "action_url": request.path,
- "opts": self.model._meta,
+ 'title': "Import Preview",
+ 'total_count': total_count,
+ 'create_count': create_count,
+ 'update_count': update_count,
+ 'preview_data': preview_data,
+ 'mapping': mapping,
+ 'file_path': file_path,
+ 'tenant_id': tenant_id,
+ 'action_url': request.path,
+ 'opts': self.model._meta,
})
return render(request, "admin/import_preview.html", context)
except Exception as e:
self.message_user(request, f"Error processing preview: {e}", level=messages.ERROR)
- return redirect("..")
+ return redirect("..\\n")
elif "_import" in request.POST:
file_path = request.POST.get('file_path')
tenant_id = request.POST.get('tenant')
tenant = Tenant.objects.get(id=tenant_id)
- mapping = {k: request.POST.get(f"map_{k}") for k, _ in VOTER_LIKELIHOOD_MAPPABLE_FIELDS if request.POST.get(f"map_{k}")}
+
+ mapping = {}
+ for field_name, _ in VOTER_LIKELIHOOD_MAPPABLE_FIELDS:
+ mapping[field_name] = request.POST.get(f'map_{field_name}')
try:
count = 0
- created_count = 0
- updated_count = 0
- skipped_no_change = 0
- skipped_no_id = 0
errors = 0
failed_rows = []
- batch_size = 2000
-
- likelihood_choices = dict(VoterLikelihood.LIKELIHOOD_CHOICES)
- likelihood_reverse = {v.lower(): k for k, v in likelihood_choices.items()}
- election_types = {et.name: et for et in ElectionType.objects.filter(tenant=tenant)}
+ with open(file_path, 'r', encoding='UTF-8') as f:
+ reader = csv.DictReader(f)
+ for row in reader:
+ try:
+ voter_id = row.get(mapping.get('voter_id'))
+ election_type_name = row.get(mapping.get('election_type'))
+ likelihood_val = row.get(mapping.get('likelihood'))
- with open(file_path, "r", encoding="utf-8-sig") as f:
- raw_reader = csv.reader(f)
- headers = next(raw_reader)
- h_idx = {h: i for i, h in enumerate(headers)}
-
- v_id_col = mapping.get("voter_id")
- et_col = mapping.get("election_type")
- l_col = mapping.get("likelihood")
-
- if not v_id_col or not et_col or not l_col:
- raise ValueError("Missing mapping for Voter ID, Election Type, or Likelihood")
-
- v_idx = h_idx[v_id_col]
- e_idx = h_idx[et_col]
- l_idx = h_idx[l_col]
-
- total_processed = 0
- for chunk in self.chunk_reader(raw_reader, batch_size):
- with transaction.atomic():
- voter_ids = []
- chunk_data = []
- for row in chunk:
- if len(row) <= max(v_idx, e_idx, l_idx): continue
- v_id = row[v_idx].strip()
- et_name = row[e_idx].strip()
- l_val = row[l_idx].strip()
- if v_id and et_name and l_val:
- voter_ids.append(v_id)
- chunk_data.append((v_id, et_name, l_val, row))
- else:
- skipped_no_id += 1
-
- voters = {v.voter_id: v for v in Voter.objects.filter(tenant=tenant, voter_id__in=voter_ids).only("id", "voter_id")}
- et_names = [d[1] for d in chunk_data]
- existing_likelihoods = {
- (vl.voter.voter_id, vl.election_type.name): vl
- for vl in VoterLikelihood.objects.filter(
- voter__tenant=tenant,
- voter__voter_id__in=voter_ids,
- election_type__name__in=et_names
- ).only("id", "likelihood", "voter__voter_id", "election_type__name").select_related("voter", "election_type")
- }
+ if not voter_id:
+ row["Import Error"] = "Missing voter ID"
+ failed_rows.append(row)
+ errors += 1
+ continue
- to_create = []
- to_update = []
- processed_in_batch = set()
+ if not election_type_name or not likelihood_val:
+ row["Import Error"] = "Missing election type or likelihood"
+ failed_rows.append(row)
+ errors += 1
+ continue
- for v_id, et_name, l_val, row in chunk_data:
- total_processed += 1
- try:
- if (v_id, et_name) in processed_in_batch: continue
- processed_in_batch.add((v_id, et_name))
-
- voter = voters.get(v_id)
- if not voter:
- errors += 1
- continue
-
- if et_name not in election_types:
- election_type, _ = ElectionType.objects.get_or_create(tenant=tenant, name=et_name)
- election_types[et_name] = election_type
- election_type = election_types[et_name]
-
- normalized_l = None
- l_val_lower = l_val.lower().replace(' ', '_')
- if l_val_lower in likelihood_choices: normalized_l = l_val_lower
- elif l_val_lower in likelihood_reverse: normalized_l = likelihood_reverse[l_val_lower]
- else:
- for k, v in likelihood_choices.items():
- if v.lower() == l_val.lower():
- normalized_l = k
- break
-
- if not normalized_l:
- errors += 1
- continue
-
- vl = existing_likelihoods.get((v_id, et_name))
- if not vl:
- to_create.append(VoterLikelihood(voter=voter, election_type=election_type, likelihood=normalized_l))
- created_count += 1
- elif vl.likelihood != normalized_l:
- vl.likelihood = normalized_l
- to_update.append(vl)
- updated_count += 1
- else:
- skipped_no_change += 1
-
- count += 1
- except Exception as e:
- errors += 1
-
- if to_create: VoterLikelihood.objects.bulk_create(to_create, batch_size=batch_size)
- if to_update: VoterLikelihood.objects.bulk_update(to_update, ["likelihood"], batch_size=batch_size)
-
- print(f"DEBUG: Likelihood import progress: {total_processed} processed. {count} created/updated.")
+ try:
+ voter = Voter.objects.get(tenant=tenant, voter_id=voter_id)
+ except Voter.DoesNotExist:
+ row["Import Error"] = f"Voter {voter_id} not found"
+ failed_rows.append(row)
+ errors += 1
+ continue
+
+ election_type, _ = ElectionType.objects.get_or_create(tenant=tenant, name=election_type_name)
+ VoterLikelihood.objects.update_or_create(
+ voter=voter,
+ election_type=election_type,
+ defaults={'likelihood': likelihood_val}
+ )
+ count += 1
+ except Exception as e:
+ print(f"DEBUG: Likelihood import failed: {e}")
+ row["Import Error"] = str(e)
+ failed_rows.append(row)
+ errors += 1
+
if os.path.exists(file_path):
os.remove(file_path)
-
- self.message_user(request, f"Import complete: {count} likelihoods created/updated. ({created_count} new, {updated_count} updated, {skipped_no_change} skipped, {errors} errors)")
- return redirect("..")
+ self.message_user(request, f"Import complete: {count} likelihoods created/updated.")
+ request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows[:1000]
+ request.session.modified = True
+ if errors > 0:
+ error_url = reverse("admin:voterlikelihood-download-errors")
+ self.message_user(request, mark_safe(f"Failed to import {errors} rows. Download failed records"), level=messages.WARNING)
+ return redirect("..\\n")
except Exception as e:
self.message_user(request, f"Error processing file: {e}", level=messages.ERROR)
- return redirect("..")
+ return redirect("..\\n")
else:
form = VoterLikelihoodImportForm(request.POST, request.FILES)
if form.is_valid():
csv_file = request.FILES['file']
tenant = form.cleaned_data['tenant']
+
if not csv_file.name.endswith('.csv'):
self.message_user(request, "Please upload a CSV file.", level=messages.ERROR)
- return redirect("..")
+ return redirect("..\\n")
+
with tempfile.NamedTemporaryFile(delete=False, suffix='.csv') as tmp:
- for chunk in csv_file.chunks(): tmp.write(chunk)
+ for chunk in csv_file.chunks():
+ tmp.write(chunk)
file_path = tmp.name
- with open(file_path, 'r', encoding='utf-8-sig') as f:
+
+ with open(file_path, 'r', encoding='UTF-8') as f:
reader = csv.reader(f)
headers = next(reader)
+
context = self.admin_site.each_context(request)
context.update({
'title': "Map Likelihood Fields",
@@ -1787,30 +1733,25 @@ class VoterLikelihoodAdmin(BaseImportAdminMixin, admin.ModelAdmin):
return render(request, "admin/import_mapping.html", context)
else:
form = VoterLikelihoodImportForm()
+
context = self.admin_site.each_context(request)
context['form'] = form
context['title'] = "Import Likelihoods"
context['opts'] = self.model._meta
return render(request, "admin/import_csv.html", context)
-@admin.register(CampaignSettings)
-class CampaignSettingsAdmin(admin.ModelAdmin):
- list_display = ('tenant', 'donation_goal', 'twilio_from_number', 'timezone')
- list_filter = ('tenant',)
- fields = ('tenant', 'donation_goal', 'twilio_account_sid', 'twilio_auth_token', 'twilio_from_number', 'timezone')
@admin.register(VotingRecord)
class VotingRecordAdmin(BaseImportAdminMixin, admin.ModelAdmin):
list_display = ('voter', 'election_date', 'election_description', 'primary_party')
- list_filter = ('voter__tenant', 'election_date', 'primary_party')
+ list_filter = ('voter__tenant', 'primary_party')
search_fields = ('voter__first_name', 'voter__last_name', 'voter__voter_id', 'election_description')
- autocomplete_fields = ["voter"]
- change_list_template = "admin/votingrecord_change_list.html"
+ change_list_template = 'admin/votingrecord_change_list.html'
def changelist_view(self, request, extra_context=None):
extra_context = extra_context or {}
from core.models import Tenant
- extra_context["tenants"] = Tenant.objects.all()
+ extra_context['tenants'] = Tenant.objects.all()
return super().changelist_view(request, extra_context=extra_context)
def get_urls(self):
@@ -1821,204 +1762,181 @@ class VotingRecordAdmin(BaseImportAdminMixin, admin.ModelAdmin):
]
return my_urls + urls
-
def import_voting_records(self, request):
if request.method == "POST":
if "_preview" in request.POST:
file_path = request.POST.get('file_path')
tenant_id = request.POST.get('tenant')
tenant = Tenant.objects.get(id=tenant_id)
- mapping = {k: request.POST.get(f"map_{k}") for k, _ in VOTING_RECORD_MAPPABLE_FIELDS if request.POST.get(f"map_{k}")}
-
+ mapping = {}
+ for field_name, _ in VOTING_RECORD_MAPPABLE_FIELDS:
+ mapping[field_name] = request.POST.get(f'map_{field_name}')
try:
- with open(file_path, 'r', encoding='utf-8-sig') as f:
- total_count = sum(1 for line in f) - 1
- f.seek(0)
+ with open(file_path, 'r', encoding='UTF-8') as f:
reader = csv.DictReader(f)
- preview_rows = []
- voter_ids_for_preview = set()
-
- v_id_col = mapping.get('voter_id')
- ed_col = mapping.get('election_date')
- desc_col = mapping.get('election_description')
-
- if not v_id_col or not ed_col or not desc_col:
- raise ValueError("Missing mapping for Voter ID, Election Date, or Description")
-
- for i, row in enumerate(reader):
- if i < 10:
- preview_rows.append(row)
- v_id = row.get(v_id_col)
- if v_id: voter_ids_for_preview.add(str(v_id).strip())
- else:
- break
-
- existing_records = set(VotingRecord.objects.filter(
- voter__tenant=tenant,
- voter__voter_id__in=voter_ids_for_preview
- ).values_list("voter__voter_id", "election_date", "election_description"))
-
+ total_count = 0
+ create_count = 0
+ update_count = 0
preview_data = []
- for row in preview_rows:
- v_id = str(row.get(v_id_col, '')).strip()
- e_date_raw = row.get(ed_col)
- e_desc = str(row.get(desc_col, '')).strip()
+ for row in reader:
+ total_count += 1
+ voter_id = row.get(mapping.get('voter_id'))
+ election_date = row.get(mapping.get('election_date'))
- e_date = None
- if e_date_raw:
- for fmt in ["%Y-%m-%d", "%m/%d/%Y", "%d/%m/%Y", "%Y/%m/%d"]:
- try:
- e_date = datetime.strptime(str(e_date_raw).strip(), fmt).date()
- break
- except: continue
+ exists = False
+ if voter_id and election_date:
+ try:
+ # Assuming voter_id and election_date uniquely identify a voting record
+ # This might need refinement based on actual data uniqueness requirements
+ if '/' in election_date:
+ dt = datetime.strptime(election_date, '%m/%d/%Y').date()
+ elif '-' in election_date:
+ dt = datetime.strptime(election_date, '%Y-%m-%d').date()
+ else:
+ dt = None
+
+ if dt:
+ exists = VotingRecord.objects.filter(voter__tenant=tenant, voter__voter_id=voter_id, election_date=dt).exists()
+
+ except ValueError:
+ # Handle cases where date parsing fails
+ pass
- action = "update" if (v_id, e_date, e_desc) in existing_records else "create"
- preview_data.append({
- "action": action,
- "identifier": f"Voter: {v_id}, Election: {e_desc}",
- "details": f"Date: {e_date or e_date_raw}"
- })
-
+ if exists:
+ update_count += 1
+ action = 'update'
+ else:
+ create_count += 1
+ action = 'create'
+
+ if len(preview_data) < 10:
+ preview_data.append({
+ 'action': action,
+ 'identifier': f"Voter ID: {voter_id} (Election: {election_date})",
+ 'details': f"Party: {row.get(mapping.get('primary_party', '')) or ''}"
+ })
context = self.admin_site.each_context(request)
context.update({
- "title": "Import Preview",
- "total_count": total_count,
- "create_count": "N/A",
- "update_count": "N/A",
- "preview_data": preview_data,
- "mapping": mapping,
- "file_path": file_path,
- "tenant_id": tenant_id,
- "action_url": request.path,
- "opts": self.model._meta,
+ 'title': "Import Preview",
+ 'total_count': total_count,
+ 'create_count': create_count,
+ 'update_count': update_count,
+ 'preview_data': preview_data,
+ 'mapping': mapping,
+ 'file_path': file_path,
+ 'tenant_id': tenant_id,
+ 'action_url': request.path,
+ 'opts': self.model._meta,
})
return render(request, "admin/import_preview.html", context)
except Exception as e:
self.message_user(request, f"Error processing preview: {e}", level=messages.ERROR)
- return redirect("..")
+ return redirect("..\\n")
elif "_import" in request.POST:
file_path = request.POST.get('file_path')
tenant_id = request.POST.get('tenant')
tenant = Tenant.objects.get(id=tenant_id)
- mapping = {k: request.POST.get(f"map_{k}") for k, _ in VOTING_RECORD_MAPPABLE_FIELDS if request.POST.get(f"map_{k}")}
+
+ mapping = {}
+ for field_name, _ in VOTING_RECORD_MAPPABLE_FIELDS:
+ mapping[field_name] = request.POST.get(f'map_{field_name}')
try:
count = 0
- created_count = 0
- updated_count = 0
- skipped_no_change = 0
errors = 0
- batch_size = 2000
+ failed_rows = []
+ with open(file_path, 'r', encoding='UTF-8') as f:
+ reader = csv.DictReader(f)
+ for row in reader:
+ try:
+ voter_id = row.get(mapping.get('voter_id'))
+ election_date_str = row.get(mapping.get('election_date'))
+ election_description = row.get(mapping.get('election_description'))
+ primary_party = row.get(mapping.get('primary_party'))
- with open(file_path, "r", encoding="utf-8-sig") as f:
- raw_reader = csv.reader(f)
- headers = next(raw_reader)
- h_idx = {h: i for i, h in enumerate(headers)}
-
- v_id_col = mapping.get("voter_id")
- ed_col = mapping.get("election_date")
- desc_col = mapping.get("election_description")
- party_col = mapping.get("primary_party")
-
- if not v_id_col or not ed_col or not desc_col:
- raise ValueError("Missing mapping for Voter ID, Election Date, or Description")
-
- v_idx = h_idx[v_id_col]
- ed_idx = h_idx[ed_col]
- desc_idx = h_idx[desc_col]
- p_idx = h_idx.get(party_col)
+ if not voter_id:
+ row["Import Error"] = "Missing voter ID"
+ failed_rows.append(row)
+ errors += 1
+ continue
- total_processed = 0
- for chunk in self.chunk_reader(raw_reader, batch_size):
- with transaction.atomic():
- voter_ids = [row[v_idx].strip() for row in chunk if len(row) > v_idx and row[v_idx].strip()]
- voters = {v.voter_id: v for v in Voter.objects.filter(tenant=tenant, voter_id__in=voter_ids).only("id", "voter_id")}
+ if not election_date_str or not election_description:
+ row["Import Error"] = "Missing election date or description"
+ failed_rows.append(row)
+ errors += 1
+ continue
- existing_records = {
- (vr.voter.voter_id, vr.election_date, vr.election_description): vr
- for vr in VotingRecord.objects.filter(
- voter__tenant=tenant,
- voter__voter_id__in=voter_ids
- ).only("id", "election_date", "election_description", "voter__voter_id").select_related("voter")
- }
-
- to_create = []
- to_update = []
- processed_in_batch = set()
+ try:
+ voter = Voter.objects.get(tenant=tenant, voter_id=voter_id)
+ except Voter.DoesNotExist:
+ row["Import Error"] = f"Voter {voter_id} not found"
+ failed_rows.append(row)
+ errors += 1
+ continue
- for row in chunk:
- total_processed += 1
- try:
- if len(row) <= max(v_idx, ed_idx, desc_idx): continue
- v_id = row[v_idx].strip()
- raw_ed = row[ed_idx].strip()
- desc = row[desc_idx].strip()
- party = row[p_idx].strip() if p_idx is not None and len(row) > p_idx else ""
-
- if not v_id or not raw_ed or not desc: continue
-
- if (v_id, raw_ed, desc) in processed_in_batch: continue
- processed_in_batch.add((v_id, raw_ed, desc))
-
- voter = voters.get(v_id)
- if not voter:
- errors += 1
- continue
-
- e_date = None
- for fmt in ["%Y-%m-%d", "%m/%d/%Y", "%d/%m/%Y", "%Y/%m/%d"]:
- try:
- e_date = datetime.strptime(raw_ed, fmt).date()
- break
- except: continue
-
- if not e_date:
- errors += 1
- continue
-
- vr = existing_records.get((v_id, e_date, desc))
- if not vr:
- to_create.append(VotingRecord(voter=voter, election_date=e_date, election_description=desc, primary_party=party))
- created_count += 1
- elif vr.primary_party != party:
- vr.primary_party = party
- to_update.append(vr)
- updated_count += 1
- else:
- skipped_no_change += 1
-
- count += 1
- except Exception as e:
+ try:
+ if '/' in election_date_str:
+ parsed_election_date = datetime.strptime(election_date_str, '%m/%d/%Y').date()
+ elif '-' in election_date_str:
+ parsed_election_date = datetime.strptime(election_date_str, '%Y-%m-%d').date()
+ else:
+ row["Import Error"] = "Invalid date format"
+ failed_rows.append(row)
errors += 1
+ continue
+ except ValueError:
+ row["Import Error"] = "Invalid date format"
+ failed_rows.append(row)
+ errors += 1
+ continue
- if to_create: VotingRecord.objects.bulk_create(to_create, batch_size=batch_size)
- if to_update: VotingRecord.objects.bulk_update(to_update, ["primary_party"], batch_size=batch_size)
-
- print(f"DEBUG: Voting record import progress: {total_processed} processed. {count} created/updated.")
-
+ VotingRecord.objects.update_or_create(
+ voter=voter,
+ election_date=parsed_election_date,
+ defaults={
+ 'election_description': election_description,
+ 'primary_party': primary_party or ''
+ }
+ )
+ count += 1
+ except Exception as e:
+ logger.error(f"Error importing: {e}")
+ row["Import Error"] = str(e)
+ failed_rows.append(row)
+ errors += 1
+
if os.path.exists(file_path):
os.remove(file_path)
-
- self.message_user(request, f"Import complete: {count} voting records created/updated. ({created_count} new, {updated_count} updated, {skipped_no_change} skipped, {errors} errors)")
- return redirect("..")
+ self.message_user(request, f"Successfully imported {count} voting records.")
+ request.session[f"{self.model._meta.model_name}_import_errors"] = failed_rows[:1000]
+ request.session.modified = True
+ if errors > 0:
+ error_url = reverse("admin:votingrecord-download-errors")
+ self.message_user(request, mark_safe(f"Failed to import {errors} rows. Download failed records"), level=messages.WARNING)
+ return redirect("..\\n")
except Exception as e:
self.message_user(request, f"Error processing file: {e}", level=messages.ERROR)
- return redirect("..")
+ return redirect("..\\n")
else:
form = VotingRecordImportForm(request.POST, request.FILES)
if form.is_valid():
csv_file = request.FILES['file']
tenant = form.cleaned_data['tenant']
+
if not csv_file.name.endswith('.csv'):
self.message_user(request, "Please upload a CSV file.", level=messages.ERROR)
- return redirect("..")
+ return redirect("..\\n")
+
with tempfile.NamedTemporaryFile(delete=False, suffix='.csv') as tmp:
- for chunk in csv_file.chunks(): tmp.write(chunk)
+ for chunk in csv_file.chunks():
+ tmp.write(chunk)
file_path = tmp.name
- with open(file_path, 'r', encoding='utf-8-sig') as f:
+
+ with open(file_path, 'r', encoding='UTF-8') as f:
reader = csv.reader(f)
headers = next(reader)
+
context = self.admin_site.each_context(request)
context.update({
'title': "Map Voting Record Fields",
@@ -2032,8 +1950,9 @@ class VotingRecordAdmin(BaseImportAdminMixin, admin.ModelAdmin):
return render(request, "admin/import_mapping.html", context)
else:
form = VotingRecordImportForm()
+
context = self.admin_site.each_context(request)
context['form'] = form
context['title'] = "Import Voting Records"
context['opts'] = self.model._meta
- return render(request, "admin/import_csv.html", context)
+ return render(request, "admin/import_csv.html", context)
\ No newline at end of file
diff --git a/core/forms.py b/core/forms.py
index aa2c26d..e3764db 100644
--- a/core/forms.py
+++ b/core/forms.py
@@ -1,7 +1,6 @@
from django import forms
from django.contrib.auth.models import User
from .models import Voter, Interaction, Donation, VoterLikelihood, InteractionType, DonationMethod, ElectionType, Event, EventParticipation, EventType, Tenant, ParticipationStatus, Volunteer, VolunteerEvent, VolunteerRole, ScheduledCall
-from .permissions import get_user_role
class Select2MultipleWidget(forms.SelectMultiple):
"""
@@ -277,14 +276,56 @@ class EventImportForm(forms.Form):
self.fields['file'].widget.attrs.update({'class': 'form-control'})
class EventParticipationImportForm(forms.Form):
- tenant = forms.ModelChoiceField(queryset=Tenant.objects.all(), label="Campaign")
- file = forms.FileField(label="Select CSV file")
+ file = forms.FileField(label="Select CSV/Excel file")
- def __init__(self, *args, **kwargs):
+ def __init__(self, *args, event=None, **kwargs):
super().__init__(*args, **kwargs)
- self.fields['tenant'].widget.attrs.update({'class': 'form-control form-select'})
+ # No tenant field needed as event_id is passed directly
self.fields['file'].widget.attrs.update({'class': 'form-control'})
+class ParticipantMappingForm(forms.Form):
+ def __init__(self, *args, headers, tenant, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.fields['email_column'] = forms.ChoiceField(
+ choices=[(header, header) for header in headers],
+ label="Column for Email Address",
+ required=True,
+ widget=forms.Select(attrs={'class': 'form-select'})
+ )
+
+ name_choices = [('', '-- Select Name Column (Optional) --')] + [(header, header) for header in headers]
+ self.fields['name_column'] = forms.ChoiceField(
+ choices=name_choices,
+ label="Column for Participant Name",
+ required=False,
+ widget=forms.Select(attrs={'class': 'form-select'})
+ )
+
+ phone_choices = [('', '-- Select Phone Column (Optional) --')] + [(header, header) for header in headers]
+ self.fields['phone_column'] = forms.ChoiceField(
+ choices=phone_choices,
+ label="Column for Phone Number",
+ required=False,
+ widget=forms.Select(attrs={'class': 'form-select'})
+ )
+
+ participation_status_choices = [('', '-- Select Status Column (Optional) --')] + [(header, header) for header in headers]
+ self.fields['participation_status_column'] = forms.ChoiceField(
+ choices=participation_status_choices,
+ label="Column for Participation Status",
+ required=False,
+ widget=forms.Select(attrs={'class': 'form-select'})
+ )
+
+ # Optional: Add a default participation status if no column is mapped
+ self.fields['default_participation_status'] = forms.ModelChoiceField(
+ queryset=ParticipationStatus.objects.filter(tenant=tenant, is_active=True),
+ label="Default Participation Status (if no column mapped or column is empty)",
+ required=False,
+ empty_label="-- Select a Default Status --",
+ widget=forms.Select(attrs={'class': 'form-select'})
+ )
+
class DonationImportForm(forms.Form):
tenant = forms.ModelChoiceField(queryset=Tenant.objects.all(), label="Campaign")
file = forms.FileField(label="Select CSV file")
@@ -456,4 +497,5 @@ class VolunteerProfileForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for field in self.fields.values():
- field.widget.attrs.update({'class': 'form-control'})
\ No newline at end of file
+ field.widget.attrs.update({'class': 'form-control'}
+)
\ No newline at end of file
diff --git a/core/templates/admin/import_preview.html b/core/templates/admin/import_preview.html
index 1bf9ca8..ba17685 100644
--- a/core/templates/admin/import_preview.html
+++ b/core/templates/admin/import_preview.html
@@ -31,7 +31,7 @@
@@ -45,7 +45,17 @@
{% translate "UPDATE" %}
{% endif %}
- {% translate "Action" %}
- {% translate "Identifyer" %}
+ {% translate "CSV Name / Matched Voter" %}
{% translate "Details" %}