diff --git a/.perm_test_apache b/.perm_test_apache
new file mode 100644
index 0000000..e69de29
diff --git a/.perm_test_exec b/.perm_test_exec
new file mode 100644
index 0000000..e69de29
diff --git a/ai/__init__.py b/ai/__init__.py
new file mode 100644
index 0000000..37a7b09
--- /dev/null
+++ b/ai/__init__.py
@@ -0,0 +1,3 @@
+"""Helpers for interacting with the Flatlogic AI proxy from Django code."""
+
+from .local_ai_api import LocalAIApi, create_response, request, decode_json_from_response # noqa: F401
diff --git a/ai/local_ai_api.py b/ai/local_ai_api.py
new file mode 100644
index 0000000..bcff732
--- /dev/null
+++ b/ai/local_ai_api.py
@@ -0,0 +1,420 @@
+"""
+LocalAIApi — lightweight Python client for the Flatlogic AI proxy.
+
+Usage (inside the Django workspace):
+
+ from ai.local_ai_api import LocalAIApi
+
+ response = LocalAIApi.create_response({
+ "input": [
+ {"role": "system", "content": "You are a helpful assistant."},
+ {"role": "user", "content": "Summarise this text in two sentences."},
+ ],
+ "text": {"format": {"type": "json_object"}},
+ })
+
+ if response.get("success"):
+ data = LocalAIApi.decode_json_from_response(response)
+ # ...
+
+# Typical successful payload (truncated):
+# {
+# "id": "resp_xxx",
+# "status": "completed",
+# "output": [
+# {"type": "reasoning", "summary": []},
+# {"type": "message", "content": [{"type": "output_text", "text": "Your final answer here."}]}
+# ],
+# "usage": { "input_tokens": 123, "output_tokens": 456 }
+# }
+
+The helper automatically injects the project UUID header and falls back to
+reading executor/.env if environment variables are missing.
+"""
+
+from __future__ import annotations
+
+import json
+import os
+import time
+import ssl
+from typing import Any, Dict, Iterable, Optional
+from urllib import error as urlerror
+from urllib import request as urlrequest
+
+__all__ = [
+ "LocalAIApi",
+ "create_response",
+ "request",
+ "fetch_status",
+ "await_response",
+ "extract_text",
+ "decode_json_from_response",
+]
+
+
+_CONFIG_CACHE: Optional[Dict[str, Any]] = None
+
+
+class LocalAIApi:
+ """Static helpers mirroring the PHP implementation."""
+
+ @staticmethod
+ def create_response(params: Dict[str, Any], options: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
+ return create_response(params, options or {})
+
+ @staticmethod
+ def request(path: Optional[str] = None, payload: Optional[Dict[str, Any]] = None,
+ options: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
+ return request(path, payload or {}, options or {})
+
+ @staticmethod
+ def extract_text(response: Dict[str, Any]) -> str:
+ return extract_text(response)
+
+ @staticmethod
+ def decode_json_from_response(response: Dict[str, Any]) -> Optional[Dict[str, Any]]:
+ return decode_json_from_response(response)
+
+
+def create_response(params: Dict[str, Any], options: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
+ """Signature compatible with the OpenAI Responses API."""
+ options = options or {}
+ payload = dict(params)
+
+ if not isinstance(payload.get("input"), list) or not payload["input"]:
+ return {
+ "success": False,
+ "error": "input_missing",
+ "message": 'Parameter "input" is required and must be a non-empty list.',
+ }
+
+ cfg = _config()
+ if not payload.get("model"):
+ payload["model"] = cfg["default_model"]
+
+ initial = request(options.get("path"), payload, options)
+ if not initial.get("success"):
+ return initial
+
+ data = initial.get("data")
+ if isinstance(data, dict) and "ai_request_id" in data:
+ ai_request_id = data["ai_request_id"]
+ poll_timeout = int(options.get("poll_timeout", 300))
+ poll_interval = int(options.get("poll_interval", 5))
+ return await_response(ai_request_id, {
+ "interval": poll_interval,
+ "timeout": poll_timeout,
+ "headers": options.get("headers"),
+ "timeout_per_call": options.get("timeout"),
+ })
+
+ return initial
+
+
+def request(path: Optional[str], payload: Dict[str, Any], options: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
+ """Perform a raw request to the AI proxy."""
+ cfg = _config()
+ options = options or {}
+
+ resolved_path = path or options.get("path") or cfg["responses_path"]
+ if not resolved_path:
+ return {
+ "success": False,
+ "error": "project_id_missing",
+ "message": "PROJECT_ID is not defined; cannot resolve AI proxy endpoint.",
+ }
+
+ project_uuid = cfg["project_uuid"]
+ if not project_uuid:
+ return {
+ "success": False,
+ "error": "project_uuid_missing",
+ "message": "PROJECT_UUID is not defined; aborting AI request.",
+ }
+
+ if "project_uuid" not in payload and project_uuid:
+ payload["project_uuid"] = project_uuid
+
+ url = _build_url(resolved_path, cfg["base_url"])
+ opt_timeout = options.get("timeout")
+ timeout = int(cfg["timeout"] if opt_timeout is None else opt_timeout)
+ verify_tls = options.get("verify_tls", cfg["verify_tls"])
+
+ headers: Dict[str, str] = {
+ "Content-Type": "application/json",
+ "Accept": "application/json",
+ cfg["project_header"]: project_uuid,
+ }
+ extra_headers = options.get("headers")
+ if isinstance(extra_headers, Iterable):
+ for header in extra_headers:
+ if isinstance(header, str) and ":" in header:
+ name, value = header.split(":", 1)
+ headers[name.strip()] = value.strip()
+
+ body = json.dumps(payload, ensure_ascii=False).encode("utf-8")
+ return _http_request(url, "POST", body, headers, timeout, verify_tls)
+
+
+def fetch_status(ai_request_id: Any, options: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
+ """Fetch status for a queued AI request."""
+ cfg = _config()
+ options = options or {}
+
+ project_uuid = cfg["project_uuid"]
+ if not project_uuid:
+ return {
+ "success": False,
+ "error": "project_uuid_missing",
+ "message": "PROJECT_UUID is not defined; aborting status check.",
+ }
+
+ status_path = _resolve_status_path(ai_request_id, cfg)
+ url = _build_url(status_path, cfg["base_url"])
+
+ opt_timeout = options.get("timeout")
+ timeout = int(cfg["timeout"] if opt_timeout is None else opt_timeout)
+ verify_tls = options.get("verify_tls", cfg["verify_tls"])
+
+ headers: Dict[str, str] = {
+ "Accept": "application/json",
+ cfg["project_header"]: project_uuid,
+ }
+ extra_headers = options.get("headers")
+ if isinstance(extra_headers, Iterable):
+ for header in extra_headers:
+ if isinstance(header, str) and ":" in header:
+ name, value = header.split(":", 1)
+ headers[name.strip()] = value.strip()
+
+ return _http_request(url, "GET", None, headers, timeout, verify_tls)
+
+
+def await_response(ai_request_id: Any, options: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
+ """Poll status endpoint until the request is complete or timed out."""
+ options = options or {}
+ timeout = int(options.get("timeout", 300))
+ interval = int(options.get("interval", 5))
+ if interval <= 0:
+ interval = 5
+ per_call_timeout = options.get("timeout_per_call")
+
+ deadline = time.time() + max(timeout, interval)
+
+ while True:
+ status_resp = fetch_status(ai_request_id, {
+ "headers": options.get("headers"),
+ "timeout": per_call_timeout,
+ "verify_tls": options.get("verify_tls"),
+ })
+ if status_resp.get("success"):
+ data = status_resp.get("data") or {}
+ if isinstance(data, dict):
+ status_value = data.get("status")
+ if status_value == "success":
+ return {
+ "success": True,
+ "status": 200,
+ "data": data.get("response", data),
+ }
+ if status_value == "failed":
+ return {
+ "success": False,
+ "status": 500,
+ "error": str(data.get("error") or "AI request failed"),
+ "data": data,
+ }
+ else:
+ return status_resp
+
+ if time.time() >= deadline:
+ return {
+ "success": False,
+ "error": "timeout",
+ "message": "Timed out waiting for AI response.",
+ }
+ time.sleep(interval)
+
+
+def extract_text(response: Dict[str, Any]) -> str:
+ """Public helper to extract plain text from a Responses payload."""
+ return _extract_text(response)
+
+
+def decode_json_from_response(response: Dict[str, Any]) -> Optional[Dict[str, Any]]:
+ """Attempt to decode JSON emitted by the model (handles markdown fences)."""
+ text = _extract_text(response)
+ if text == "":
+ return None
+
+ try:
+ decoded = json.loads(text)
+ if isinstance(decoded, dict):
+ return decoded
+ except json.JSONDecodeError:
+ pass
+
+ stripped = text.strip()
+ if stripped.startswith("```json"):
+ stripped = stripped[7:]
+ if stripped.endswith("```"):
+ stripped = stripped[:-3]
+ stripped = stripped.strip()
+ if stripped and stripped != text:
+ try:
+ decoded = json.loads(stripped)
+ if isinstance(decoded, dict):
+ return decoded
+ except json.JSONDecodeError:
+ return None
+ return None
+
+
+def _extract_text(response: Dict[str, Any]) -> str:
+ payload = response.get("data") if response.get("success") else response.get("response")
+ if isinstance(payload, dict):
+ output = payload.get("output")
+ if isinstance(output, list):
+ combined = ""
+ for item in output:
+ content = item.get("content") if isinstance(item, dict) else None
+ if isinstance(content, list):
+ for block in content:
+ if isinstance(block, dict) and block.get("type") == "output_text" and block.get("text"):
+ combined += str(block["text"])
+ if combined:
+ return combined
+ choices = payload.get("choices")
+ if isinstance(choices, list) and choices:
+ message = choices[0].get("message")
+ if isinstance(message, dict) and message.get("content"):
+ return str(message["content"])
+ if isinstance(payload, str):
+ return payload
+ return ""
+
+
+def _config() -> Dict[str, Any]:
+ global _CONFIG_CACHE # noqa: PLW0603
+ if _CONFIG_CACHE is not None:
+ return _CONFIG_CACHE
+
+ _ensure_env_loaded()
+
+ base_url = os.getenv("AI_PROXY_BASE_URL", "https://flatlogic.com")
+ project_id = os.getenv("PROJECT_ID") or None
+ responses_path = os.getenv("AI_RESPONSES_PATH")
+ if not responses_path and project_id:
+ responses_path = f"/projects/{project_id}/ai-request"
+
+ _CONFIG_CACHE = {
+ "base_url": base_url,
+ "responses_path": responses_path,
+ "project_id": project_id,
+ "project_uuid": os.getenv("PROJECT_UUID"),
+ "project_header": os.getenv("AI_PROJECT_HEADER", "project-uuid"),
+ "default_model": os.getenv("AI_DEFAULT_MODEL", "gpt-5-mini"),
+ "timeout": int(os.getenv("AI_TIMEOUT", "30")),
+ "verify_tls": os.getenv("AI_VERIFY_TLS", "true").lower() not in {"0", "false", "no"},
+ }
+ return _CONFIG_CACHE
+
+
+def _build_url(path: str, base_url: str) -> str:
+ trimmed = path.strip()
+ if trimmed.startswith("http://") or trimmed.startswith("https://"):
+ return trimmed
+ if trimmed.startswith("/"):
+ return f"{base_url}{trimmed}"
+ return f"{base_url}/{trimmed}"
+
+
+def _resolve_status_path(ai_request_id: Any, cfg: Dict[str, Any]) -> str:
+ base_path = (cfg.get("responses_path") or "").rstrip("/")
+ if not base_path:
+ return f"/ai-request/{ai_request_id}/status"
+ if not base_path.endswith("/ai-request"):
+ base_path = f"{base_path}/ai-request"
+ return f"{base_path}/{ai_request_id}/status"
+
+
+def _http_request(url: str, method: str, body: Optional[bytes], headers: Dict[str, str],
+ timeout: int, verify_tls: bool) -> Dict[str, Any]:
+ """
+ Shared HTTP helper for GET/POST requests.
+ """
+ req = urlrequest.Request(url, data=body, method=method.upper())
+ for name, value in headers.items():
+ req.add_header(name, value)
+
+ context = None
+ if not verify_tls:
+ context = ssl.create_default_context()
+ context.check_hostname = False
+ context.verify_mode = ssl.CERT_NONE
+
+ try:
+ with urlrequest.urlopen(req, timeout=timeout, context=context) as resp:
+ status = resp.getcode()
+ response_body = resp.read().decode("utf-8", errors="replace")
+ except urlerror.HTTPError as exc:
+ status = exc.getcode()
+ response_body = exc.read().decode("utf-8", errors="replace")
+ except Exception as exc: # pylint: disable=broad-except
+ return {
+ "success": False,
+ "error": "request_failed",
+ "message": str(exc),
+ }
+
+ decoded = None
+ if response_body:
+ try:
+ decoded = json.loads(response_body)
+ except json.JSONDecodeError:
+ decoded = None
+
+ if 200 <= status < 300:
+ return {
+ "success": True,
+ "status": status,
+ "data": decoded if decoded is not None else response_body,
+ }
+
+ error_message = "AI proxy request failed"
+ if isinstance(decoded, dict):
+ error_message = decoded.get("error") or decoded.get("message") or error_message
+ elif response_body:
+ error_message = response_body
+
+ return {
+ "success": False,
+ "status": status,
+ "error": error_message,
+ "response": decoded if decoded is not None else response_body,
+ }
+
+
+def _ensure_env_loaded() -> None:
+ """Populate os.environ from executor/.env if variables are missing."""
+ if os.getenv("PROJECT_UUID") and os.getenv("PROJECT_ID"):
+ return
+
+ env_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".env"))
+ if not os.path.exists(env_path):
+ return
+
+ try:
+ with open(env_path, "r", encoding="utf-8") as handle:
+ for line in handle:
+ stripped = line.strip()
+ if not stripped or stripped.startswith("#") or "=" not in stripped:
+ continue
+ key, value = stripped.split("=", 1)
+ key = key.strip()
+ value = value.strip().strip('\'"')
+ if key and not os.getenv(key):
+ os.environ[key] = value
+ except OSError:
+ pass
diff --git a/config/__pycache__/__init__.cpython-311.pyc b/config/__pycache__/__init__.cpython-311.pyc
index 3d6501c..3d2c8fd 100644
Binary files a/config/__pycache__/__init__.cpython-311.pyc and b/config/__pycache__/__init__.cpython-311.pyc differ
diff --git a/config/__pycache__/settings.cpython-311.pyc b/config/__pycache__/settings.cpython-311.pyc
index dadfaa7..e557a5d 100644
Binary files a/config/__pycache__/settings.cpython-311.pyc and b/config/__pycache__/settings.cpython-311.pyc differ
diff --git a/config/__pycache__/urls.cpython-311.pyc b/config/__pycache__/urls.cpython-311.pyc
index 139db10..ff04da5 100644
Binary files a/config/__pycache__/urls.cpython-311.pyc and b/config/__pycache__/urls.cpython-311.pyc differ
diff --git a/config/__pycache__/wsgi.cpython-311.pyc b/config/__pycache__/wsgi.cpython-311.pyc
index 79ce690..0c8f1a4 100644
Binary files a/config/__pycache__/wsgi.cpython-311.pyc and b/config/__pycache__/wsgi.cpython-311.pyc differ
diff --git a/core/__pycache__/__init__.cpython-311.pyc b/core/__pycache__/__init__.cpython-311.pyc
index 3b7774e..414f175 100644
Binary files a/core/__pycache__/__init__.cpython-311.pyc and b/core/__pycache__/__init__.cpython-311.pyc differ
diff --git a/core/__pycache__/admin.cpython-311.pyc b/core/__pycache__/admin.cpython-311.pyc
index 5e41572..582eadb 100644
Binary files a/core/__pycache__/admin.cpython-311.pyc and b/core/__pycache__/admin.cpython-311.pyc differ
diff --git a/core/__pycache__/apps.cpython-311.pyc b/core/__pycache__/apps.cpython-311.pyc
index 6435d92..8fb1659 100644
Binary files a/core/__pycache__/apps.cpython-311.pyc and b/core/__pycache__/apps.cpython-311.pyc differ
diff --git a/core/__pycache__/forms.cpython-311.pyc b/core/__pycache__/forms.cpython-311.pyc
index f6e5c4e..eaea285 100644
Binary files a/core/__pycache__/forms.cpython-311.pyc and b/core/__pycache__/forms.cpython-311.pyc differ
diff --git a/core/__pycache__/models.cpython-311.pyc b/core/__pycache__/models.cpython-311.pyc
index 5b41fe1..625e489 100644
Binary files a/core/__pycache__/models.cpython-311.pyc and b/core/__pycache__/models.cpython-311.pyc differ
diff --git a/core/__pycache__/urls.cpython-311.pyc b/core/__pycache__/urls.cpython-311.pyc
index 4e4f113..42abe0d 100644
Binary files a/core/__pycache__/urls.cpython-311.pyc and b/core/__pycache__/urls.cpython-311.pyc differ
diff --git a/core/__pycache__/views.cpython-311.pyc b/core/__pycache__/views.cpython-311.pyc
index 9d0ddd8..36aec7d 100644
Binary files a/core/__pycache__/views.cpython-311.pyc and b/core/__pycache__/views.cpython-311.pyc differ
diff --git a/core/admin.py b/core/admin.py
index 639ff3a..6efe590 100644
--- a/core/admin.py
+++ b/core/admin.py
@@ -1,8 +1,8 @@
from django.contrib import admin
-from .models import Ticket
+from .models import Course
-@admin.register(Ticket)
-class TicketAdmin(admin.ModelAdmin):
- list_display = ('subject', 'status', 'priority', 'requester_email', 'created_at')
- list_filter = ('status', 'priority')
- search_fields = ('subject', 'requester_email', 'description')
+@admin.register(Course)
+class CourseAdmin(admin.ModelAdmin):
+ list_display = ('name', 'code', 'department', 'semester', 'credits')
+ search_fields = ('name', 'code', 'department')
+ list_filter = ('department', 'semester')
\ No newline at end of file
diff --git a/core/forms.py b/core/forms.py
index 7a6b83b..a30d942 100644
--- a/core/forms.py
+++ b/core/forms.py
@@ -1,7 +1 @@
-from django import forms
-from .models import Ticket
-
-class TicketForm(forms.ModelForm):
- class Meta:
- model = Ticket
- fields = ['subject', 'requester_email', 'priority', 'description']
+# Forms for the core app can be defined here.
\ No newline at end of file
diff --git a/core/migrations/0002_course_delete_ticket.py b/core/migrations/0002_course_delete_ticket.py
new file mode 100644
index 0000000..e740366
--- /dev/null
+++ b/core/migrations/0002_course_delete_ticket.py
@@ -0,0 +1,27 @@
+# Generated by Django 5.2.7 on 2025-11-22 10:07
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('core', '0001_initial'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='Course',
+ fields=[
+ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('code', models.CharField(max_length=20, unique=True)),
+ ('name', models.CharField(max_length=255)),
+ ('credits', models.IntegerField()),
+ ('semester', models.IntegerField()),
+ ('department', models.CharField(max_length=100)),
+ ],
+ ),
+ migrations.DeleteModel(
+ name='Ticket',
+ ),
+ ]
diff --git a/core/migrations/__pycache__/0001_initial.cpython-311.pyc b/core/migrations/__pycache__/0001_initial.cpython-311.pyc
index 64d8a55..6beecdd 100644
Binary files a/core/migrations/__pycache__/0001_initial.cpython-311.pyc and b/core/migrations/__pycache__/0001_initial.cpython-311.pyc differ
diff --git a/core/migrations/__pycache__/0002_course_delete_ticket.cpython-311.pyc b/core/migrations/__pycache__/0002_course_delete_ticket.cpython-311.pyc
new file mode 100644
index 0000000..df8b031
Binary files /dev/null and b/core/migrations/__pycache__/0002_course_delete_ticket.cpython-311.pyc differ
diff --git a/core/migrations/__pycache__/__init__.cpython-311.pyc b/core/migrations/__pycache__/__init__.cpython-311.pyc
index 58b1c14..d210a32 100644
Binary files a/core/migrations/__pycache__/__init__.cpython-311.pyc and b/core/migrations/__pycache__/__init__.cpython-311.pyc differ
diff --git a/core/models.py b/core/models.py
index 78b60d1..81dc2ea 100644
--- a/core/models.py
+++ b/core/models.py
@@ -1,25 +1,11 @@
from django.db import models
-class Ticket(models.Model):
- STATUS_CHOICES = [
- ('open', 'Open'),
- ('in_progress', 'In Progress'),
- ('closed', 'Closed'),
- ]
-
- PRIORITY_CHOICES = [
- ('low', 'Low'),
- ('medium', 'Medium'),
- ('high', 'High'),
- ]
-
- subject = models.CharField(max_length=255)
- status = models.CharField(max_length=20, choices=STATUS_CHOICES, default='open')
- priority = models.CharField(max_length=20, choices=PRIORITY_CHOICES, default='medium')
- requester_email = models.EmailField()
- description = models.TextField()
- created_at = models.DateTimeField(auto_now_add=True)
- updated_at = models.DateTimeField(auto_now=True)
+class Course(models.Model):
+ code = models.CharField(max_length=20, unique=True)
+ name = models.CharField(max_length=255)
+ credits = models.IntegerField()
+ semester = models.IntegerField()
+ department = models.CharField(max_length=100)
def __str__(self):
- return self.subject
\ No newline at end of file
+ return self.name
diff --git a/core/templates/base.html b/core/templates/base.html
new file mode 100644
index 0000000..18e5f98
--- /dev/null
+++ b/core/templates/base.html
@@ -0,0 +1,19 @@
+
+
+
+
+
+ {% block title %}Grade Management System{% endblock %}
+
+
+
+
+ {% load static %}
+
+ {% block head %}{% endblock %}
+
+
+ {% block content %}{% endblock %}
+
+
+
\ No newline at end of file
diff --git a/core/templates/core/article_detail.html b/core/templates/core/article_detail.html
new file mode 100644
index 0000000..8820990
--- /dev/null
+++ b/core/templates/core/article_detail.html
@@ -0,0 +1,14 @@
+{% extends 'base.html' %}
+
+{% block title %}{{ article.title }}{% endblock %}
+
+{% block content %}
+
+
{{ article.title }}
+
Published on {{ article.created_at|date:"F d, Y" }}
+
+
+ {{ article.content|safe }}
+
+
+{% endblock %}
diff --git a/core/templates/core/index.html b/core/templates/core/index.html
index f4e4991..776f2a6 100644
--- a/core/templates/core/index.html
+++ b/core/templates/core/index.html
@@ -1,157 +1,43 @@
-
-
+{% extends 'base.html' %}
+{% load static %}
-
-
-
- {{ project_name }}
- {% if project_description %}
-
-
-
- {% endif %}
- {% if project_image_url %}
-
-
- {% endif %}
-
-
-
-
-
-
-
-
-
-
Analyzing your requirements and generating your website…
-
-
Loading…
+
+
Course Catalog
+
+ {% if courses %}
+ {% for course in courses %}
+
+
+
+
{{ course.name }}
+
{{ course.code }}
+
Department: {{ course.department }}
+
Semester: {{ course.semester }} | Credits: {{ course.credits }}
+
+
-
Appwizzy AI is collecting your requirements and applying the first changes.
-
This page will refresh automatically as the plan is implemented.
-
- Runtime: Django {{ django_version }} · Python {{ python_version }} —
- UTC {{ current_time|date:"Y-m-d H:i:s" }}
-
-
-
-
-
+ {% endfor %}
+ {% else %}
+
+
+ No courses have been added yet. The administrator can add courses via the admin panel.
+
+
+ {% endif %}
+
+
-
\ No newline at end of file
+
+{% endblock %}
diff --git a/core/views.py b/core/views.py
index c1a6d45..28123b8 100644
--- a/core/views.py
+++ b/core/views.py
@@ -1,37 +1,10 @@
-import os
-import platform
-
-from django import get_version as django_version
from django.shortcuts import render
-from django.urls import reverse_lazy
-from django.utils import timezone
-from django.views.generic.edit import CreateView
-
-from .forms import TicketForm
-from .models import Ticket
-
+from .models import Course
def home(request):
- """Render the landing screen with loader and environment details."""
- host_name = request.get_host().lower()
- agent_brand = "AppWizzy" if host_name == "appwizzy.com" else "Flatlogic"
- now = timezone.now()
-
+ courses = Course.objects.all()
context = {
- "project_name": "New Style",
- "agent_brand": agent_brand,
- "django_version": django_version(),
- "python_version": platform.python_version(),
- "current_time": now,
- "host_name": host_name,
- "project_description": os.getenv("PROJECT_DESCRIPTION", ""),
- "project_image_url": os.getenv("PROJECT_IMAGE_URL", ""),
+ 'courses': courses,
+ 'project_name': 'Grade Management System',
}
- return render(request, "core/index.html", context)
-
-
-class TicketCreateView(CreateView):
- model = Ticket
- form_class = TicketForm
- template_name = "core/ticket_create.html"
- success_url = reverse_lazy("home")
+ return render(request, "core/index.html", context)
\ No newline at end of file
diff --git a/static/css/custom.css b/static/css/custom.css
new file mode 100644
index 0000000..f47a908
--- /dev/null
+++ b/static/css/custom.css
@@ -0,0 +1,61 @@
+/* Grade Management System Custom Styles */
+
+:root {
+ --primary-color: #0A2540;
+ --secondary-color: #F6F9FC;
+ --accent-color: #007BFF;
+ --text-dark: #333333;
+ --text-light: #FFFFFF;
+}
+
+body {
+ font-family: 'Lato', sans-serif;
+ background-color: var(--secondary-color);
+ color: var(--text-dark);
+}
+
+h1, h2, h3, h4, h5, h6 {
+ font-family: 'Poppins', sans-serif;
+ font-weight: 600;
+}
+
+.hero-section {
+ background: linear-gradient(135deg, var(--primary-color) 0%, #1c3d6e 100%);
+ color: var(--text-light);
+ padding: 6rem 0;
+ margin-bottom: 3rem;
+}
+
+.hero-section .display-4 {
+ font-weight: 700;
+}
+
+.course-card {
+ border: none;
+ border-radius: 15px;
+ box-shadow: 0 10px 20px rgba(0, 0, 0, 0.05);
+ transition: transform 0.3s ease, box-shadow 0.3s ease;
+}
+
+.course-card:hover {
+ transform: translateY(-5px);
+ box-shadow: 0 15px 30px rgba(0, 0, 0, 0.1);
+}
+
+.course-card .card-title {
+ color: var(--primary-color);
+ font-weight: 700;
+}
+
+.course-card .card-subtitle {
+ color: #6c757d;
+}
+
+footer {
+ background-color: var(--primary-color);
+ color: var(--text-light);
+}
+
+footer p {
+ margin: 0;
+}
diff --git a/staticfiles/css/custom.css b/staticfiles/css/custom.css
index 108056f..f47a908 100644
--- a/staticfiles/css/custom.css
+++ b/staticfiles/css/custom.css
@@ -1,21 +1,61 @@
+/* Grade Management System Custom Styles */
:root {
- --bg-color-start: #6a11cb;
- --bg-color-end: #2575fc;
- --text-color: #ffffff;
- --card-bg-color: rgba(255, 255, 255, 0.01);
- --card-border-color: rgba(255, 255, 255, 0.1);
+ --primary-color: #0A2540;
+ --secondary-color: #F6F9FC;
+ --accent-color: #007BFF;
+ --text-dark: #333333;
+ --text-light: #FFFFFF;
}
+
body {
- margin: 0;
- font-family: 'Inter', sans-serif;
- background: linear-gradient(45deg, var(--bg-color-start), var(--bg-color-end));
- color: var(--text-color);
- display: flex;
- justify-content: center;
- align-items: center;
- min-height: 100vh;
- text-align: center;
- overflow: hidden;
- position: relative;
+ font-family: 'Lato', sans-serif;
+ background-color: var(--secondary-color);
+ color: var(--text-dark);
+}
+
+h1, h2, h3, h4, h5, h6 {
+ font-family: 'Poppins', sans-serif;
+ font-weight: 600;
+}
+
+.hero-section {
+ background: linear-gradient(135deg, var(--primary-color) 0%, #1c3d6e 100%);
+ color: var(--text-light);
+ padding: 6rem 0;
+ margin-bottom: 3rem;
+}
+
+.hero-section .display-4 {
+ font-weight: 700;
+}
+
+.course-card {
+ border: none;
+ border-radius: 15px;
+ box-shadow: 0 10px 20px rgba(0, 0, 0, 0.05);
+ transition: transform 0.3s ease, box-shadow 0.3s ease;
+}
+
+.course-card:hover {
+ transform: translateY(-5px);
+ box-shadow: 0 15px 30px rgba(0, 0, 0, 0.1);
+}
+
+.course-card .card-title {
+ color: var(--primary-color);
+ font-weight: 700;
+}
+
+.course-card .card-subtitle {
+ color: #6c757d;
+}
+
+footer {
+ background-color: var(--primary-color);
+ color: var(--text-light);
+}
+
+footer p {
+ margin: 0;
}