Compare commits

...

5 Commits

Author SHA1 Message Date
Flatlogic Bot
20a786eae4 Auto commit: 2025-11-25T16:20:11.899Z 2025-11-25 16:20:11 +00:00
Flatlogic Bot
0187c43d11 Auto commit: 2025-11-25T16:17:51.853Z 2025-11-25 16:17:51 +00:00
Flatlogic Bot
b0df731824 Auto commit: 2025-11-25T16:13:14.454Z 2025-11-25 16:13:14 +00:00
Flatlogic Bot
6a85a0e336 Auto commit: 2025-11-25T16:03:13.664Z 2025-11-25 16:03:13 +00:00
Flatlogic Bot
bfba0ea6ae Auto commit: 2025-11-25T15:48:00.966Z 2025-11-25 15:48:00 +00:00
36 changed files with 1226 additions and 198 deletions

0
.perm_test_apache Normal file
View File

0
.perm_test_exec Normal file
View File

3
ai/__init__.py Normal file
View File

@ -0,0 +1,3 @@
"""Helpers for interacting with the Flatlogic AI proxy from Django code."""
from .local_ai_api import LocalAIApi, create_response, request, decode_json_from_response # noqa: F401

Binary file not shown.

Binary file not shown.

420
ai/local_ai_api.py Normal file
View File

@ -0,0 +1,420 @@
"""
LocalAIApi lightweight Python client for the Flatlogic AI proxy.
Usage (inside the Django workspace):
from ai.local_ai_api import LocalAIApi
response = LocalAIApi.create_response({
"input": [
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": "Summarise this text in two sentences."},
],
"text": {"format": {"type": "json_object"}},
})
if response.get("success"):
data = LocalAIApi.decode_json_from_response(response)
# ...
# Typical successful payload (truncated):
# {
# "id": "resp_xxx",
# "status": "completed",
# "output": [
# {"type": "reasoning", "summary": []},
# {"type": "message", "content": [{"type": "output_text", "text": "Your final answer here."}]}
# ],
# "usage": { "input_tokens": 123, "output_tokens": 456 }
# }
The helper automatically injects the project UUID header and falls back to
reading executor/.env if environment variables are missing.
"""
from __future__ import annotations
import json
import os
import time
import ssl
from typing import Any, Dict, Iterable, Optional
from urllib import error as urlerror
from urllib import request as urlrequest
__all__ = [
"LocalAIApi",
"create_response",
"request",
"fetch_status",
"await_response",
"extract_text",
"decode_json_from_response",
]
_CONFIG_CACHE: Optional[Dict[str, Any]] = None
class LocalAIApi:
"""Static helpers mirroring the PHP implementation."""
@staticmethod
def create_response(params: Dict[str, Any], options: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
return create_response(params, options or {})
@staticmethod
def request(path: Optional[str] = None, payload: Optional[Dict[str, Any]] = None,
options: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
return request(path, payload or {}, options or {})
@staticmethod
def extract_text(response: Dict[str, Any]) -> str:
return extract_text(response)
@staticmethod
def decode_json_from_response(response: Dict[str, Any]) -> Optional[Dict[str, Any]]:
return decode_json_from_response(response)
def create_response(params: Dict[str, Any], options: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
"""Signature compatible with the OpenAI Responses API."""
options = options or {}
payload = dict(params)
if not isinstance(payload.get("input"), list) or not payload["input"]:
return {
"success": False,
"error": "input_missing",
"message": 'Parameter "input" is required and must be a non-empty list.',
}
cfg = _config()
if not payload.get("model"):
payload["model"] = cfg["default_model"]
initial = request(options.get("path"), payload, options)
if not initial.get("success"):
return initial
data = initial.get("data")
if isinstance(data, dict) and "ai_request_id" in data:
ai_request_id = data["ai_request_id"]
poll_timeout = int(options.get("poll_timeout", 300))
poll_interval = int(options.get("poll_interval", 5))
return await_response(ai_request_id, {
"interval": poll_interval,
"timeout": poll_timeout,
"headers": options.get("headers"),
"timeout_per_call": options.get("timeout"),
})
return initial
def request(path: Optional[str], payload: Dict[str, Any], options: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
"""Perform a raw request to the AI proxy."""
cfg = _config()
options = options or {}
resolved_path = path or options.get("path") or cfg["responses_path"]
if not resolved_path:
return {
"success": False,
"error": "project_id_missing",
"message": "PROJECT_ID is not defined; cannot resolve AI proxy endpoint.",
}
project_uuid = cfg["project_uuid"]
if not project_uuid:
return {
"success": False,
"error": "project_uuid_missing",
"message": "PROJECT_UUID is not defined; aborting AI request.",
}
if "project_uuid" not in payload and project_uuid:
payload["project_uuid"] = project_uuid
url = _build_url(resolved_path, cfg["base_url"])
opt_timeout = options.get("timeout")
timeout = int(cfg["timeout"] if opt_timeout is None else opt_timeout)
verify_tls = options.get("verify_tls", cfg["verify_tls"])
headers: Dict[str, str] = {
"Content-Type": "application/json",
"Accept": "application/json",
cfg["project_header"]: project_uuid,
}
extra_headers = options.get("headers")
if isinstance(extra_headers, Iterable):
for header in extra_headers:
if isinstance(header, str) and ":" in header:
name, value = header.split(":", 1)
headers[name.strip()] = value.strip()
body = json.dumps(payload, ensure_ascii=False).encode("utf-8")
return _http_request(url, "POST", body, headers, timeout, verify_tls)
def fetch_status(ai_request_id: Any, options: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
"""Fetch status for a queued AI request."""
cfg = _config()
options = options or {}
project_uuid = cfg["project_uuid"]
if not project_uuid:
return {
"success": False,
"error": "project_uuid_missing",
"message": "PROJECT_UUID is not defined; aborting status check.",
}
status_path = _resolve_status_path(ai_request_id, cfg)
url = _build_url(status_path, cfg["base_url"])
opt_timeout = options.get("timeout")
timeout = int(cfg["timeout"] if opt_timeout is None else opt_timeout)
verify_tls = options.get("verify_tls", cfg["verify_tls"])
headers: Dict[str, str] = {
"Accept": "application/json",
cfg["project_header"]: project_uuid,
}
extra_headers = options.get("headers")
if isinstance(extra_headers, Iterable):
for header in extra_headers:
if isinstance(header, str) and ":" in header:
name, value = header.split(":", 1)
headers[name.strip()] = value.strip()
return _http_request(url, "GET", None, headers, timeout, verify_tls)
def await_response(ai_request_id: Any, options: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
"""Poll status endpoint until the request is complete or timed out."""
options = options or {}
timeout = int(options.get("timeout", 300))
interval = int(options.get("interval", 5))
if interval <= 0:
interval = 5
per_call_timeout = options.get("timeout_per_call")
deadline = time.time() + max(timeout, interval)
while True:
status_resp = fetch_status(ai_request_id, {
"headers": options.get("headers"),
"timeout": per_call_timeout,
"verify_tls": options.get("verify_tls"),
})
if status_resp.get("success"):
data = status_resp.get("data") or {}
if isinstance(data, dict):
status_value = data.get("status")
if status_value == "success":
return {
"success": True,
"status": 200,
"data": data.get("response", data),
}
if status_value == "failed":
return {
"success": False,
"status": 500,
"error": str(data.get("error") or "AI request failed"),
"data": data,
}
else:
return status_resp
if time.time() >= deadline:
return {
"success": False,
"error": "timeout",
"message": "Timed out waiting for AI response.",
}
time.sleep(interval)
def extract_text(response: Dict[str, Any]) -> str:
"""Public helper to extract plain text from a Responses payload."""
return _extract_text(response)
def decode_json_from_response(response: Dict[str, Any]) -> Optional[Dict[str, Any]]:
"""Attempt to decode JSON emitted by the model (handles markdown fences)."""
text = _extract_text(response)
if text == "":
return None
try:
decoded = json.loads(text)
if isinstance(decoded, dict):
return decoded
except json.JSONDecodeError:
pass
stripped = text.strip()
if stripped.startswith("```json"):
stripped = stripped[7:]
if stripped.endswith("```"):
stripped = stripped[:-3]
stripped = stripped.strip()
if stripped and stripped != text:
try:
decoded = json.loads(stripped)
if isinstance(decoded, dict):
return decoded
except json.JSONDecodeError:
return None
return None
def _extract_text(response: Dict[str, Any]) -> str:
payload = response.get("data") if response.get("success") else response.get("response")
if isinstance(payload, dict):
output = payload.get("output")
if isinstance(output, list):
combined = ""
for item in output:
content = item.get("content") if isinstance(item, dict) else None
if isinstance(content, list):
for block in content:
if isinstance(block, dict) and block.get("type") == "output_text" and block.get("text"):
combined += str(block["text"])
if combined:
return combined
choices = payload.get("choices")
if isinstance(choices, list) and choices:
message = choices[0].get("message")
if isinstance(message, dict) and message.get("content"):
return str(message["content"])
if isinstance(payload, str):
return payload
return ""
def _config() -> Dict[str, Any]:
global _CONFIG_CACHE # noqa: PLW0603
if _CONFIG_CACHE is not None:
return _CONFIG_CACHE
_ensure_env_loaded()
base_url = os.getenv("AI_PROXY_BASE_URL", "https://flatlogic.com")
project_id = os.getenv("PROJECT_ID") or None
responses_path = os.getenv("AI_RESPONSES_PATH")
if not responses_path and project_id:
responses_path = f"/projects/{project_id}/ai-request"
_CONFIG_CACHE = {
"base_url": base_url,
"responses_path": responses_path,
"project_id": project_id,
"project_uuid": os.getenv("PROJECT_UUID"),
"project_header": os.getenv("AI_PROJECT_HEADER", "project-uuid"),
"default_model": os.getenv("AI_DEFAULT_MODEL", "gpt-5-mini"),
"timeout": int(os.getenv("AI_TIMEOUT", "30")),
"verify_tls": os.getenv("AI_VERIFY_TLS", "true").lower() not in {"0", "false", "no"},
}
return _CONFIG_CACHE
def _build_url(path: str, base_url: str) -> str:
trimmed = path.strip()
if trimmed.startswith("http://") or trimmed.startswith("https://"):
return trimmed
if trimmed.startswith("/"):
return f"{base_url}{trimmed}"
return f"{base_url}/{trimmed}"
def _resolve_status_path(ai_request_id: Any, cfg: Dict[str, Any]) -> str:
base_path = (cfg.get("responses_path") or "").rstrip("/")
if not base_path:
return f"/ai-request/{ai_request_id}/status"
if not base_path.endswith("/ai-request"):
base_path = f"{base_path}/ai-request"
return f"{base_path}/{ai_request_id}/status"
def _http_request(url: str, method: str, body: Optional[bytes], headers: Dict[str, str],
timeout: int, verify_tls: bool) -> Dict[str, Any]:
"""
Shared HTTP helper for GET/POST requests.
"""
req = urlrequest.Request(url, data=body, method=method.upper())
for name, value in headers.items():
req.add_header(name, value)
context = None
if not verify_tls:
context = ssl.create_default_context()
context.check_hostname = False
context.verify_mode = ssl.CERT_NONE
try:
with urlrequest.urlopen(req, timeout=timeout, context=context) as resp:
status = resp.getcode()
response_body = resp.read().decode("utf-8", errors="replace")
except urlerror.HTTPError as exc:
status = exc.getcode()
response_body = exc.read().decode("utf-8", errors="replace")
except Exception as exc: # pylint: disable=broad-except
return {
"success": False,
"error": "request_failed",
"message": str(exc),
}
decoded = None
if response_body:
try:
decoded = json.loads(response_body)
except json.JSONDecodeError:
decoded = None
if 200 <= status < 300:
return {
"success": True,
"status": status,
"data": decoded if decoded is not None else response_body,
}
error_message = "AI proxy request failed"
if isinstance(decoded, dict):
error_message = decoded.get("error") or decoded.get("message") or error_message
elif response_body:
error_message = response_body
return {
"success": False,
"status": status,
"error": error_message,
"response": decoded if decoded is not None else response_body,
}
def _ensure_env_loaded() -> None:
"""Populate os.environ from executor/.env if variables are missing."""
if os.getenv("PROJECT_UUID") and os.getenv("PROJECT_ID"):
return
env_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".env"))
if not os.path.exists(env_path):
return
try:
with open(env_path, "r", encoding="utf-8") as handle:
for line in handle:
stripped = line.strip()
if not stripped or stripped.startswith("#") or "=" not in stripped:
continue
key, value = stripped.split("=", 1)
key = key.strip()
value = value.strip().strip('\'"')
if key and not os.getenv(key):
os.environ[key] = value
except OSError:
pass

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.0 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.2 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.8 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.7 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.9 MiB

View File

@ -20,6 +20,9 @@ load_dotenv(BASE_DIR.parent / ".env")
SECRET_KEY = os.getenv("DJANGO_SECRET_KEY", "change-me")
DEBUG = os.getenv("DJANGO_DEBUG", "true").lower() == "true"
YOUTUBE_API_KEY = os.getenv("YOUTUBE_API_KEY", "REPLACE_WITH_YOUR_YOUTUBE_API_KEY")
# Trust proxy headers from Cloudflare Tunnel / Apache
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
USE_X_FORWARDED_HOST = True

View File

@ -0,0 +1,13 @@
import os
import time
def project_context(request):
"""
Adds project-specific environment variables to the template context globally.
"""
return {
"project_description": os.getenv("PROJECT_DESCRIPTION", ""),
"project_image_url": os.getenv("PROJECT_IMAGE_URL", ""),
# Used for cache-busting static assets
"deployment_timestamp": int(time.time()),
}

25
core/templates/base.html Normal file
View File

@ -0,0 +1,25 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>{% block title %}Knowledge Base{% endblock %}</title>
{% if project_description %}
<meta name="description" content="{{ project_description }}">
<meta property="og:description" content="{{ project_description }}">
<meta property="twitter:description" content="{{ project_description }}">
{% endif %}
{% if project_image_url %}
<meta property="og:image" content="{{ project_image_url }}">
<meta property="twitter:image" content="{{ project_image_url }}">
{% endif %}
{% load static %}
<link rel="stylesheet" href="{% static 'css/custom.css' %}?v={{ deployment_timestamp }}">
{% block head %}{% endblock %}
</head>
<body>
{% block content %}{% endblock %}
</body>
</html>

View File

@ -0,0 +1,14 @@
{% extends 'base.html' %}
{% block title %}{{ article.title }}{% endblock %}
{% block content %}
<div class="container mt-5">
<h1>{{ article.title }}</h1>
<p class="text-muted">Published on {{ article.created_at|date:"F d, Y" }}</p>
<hr>
<div>
{{ article.content|safe }}
</div>
</div>
{% endblock %}

View File

@ -1,157 +1,39 @@
<!doctype html>
<html lang="en">
{% extends "base.html" %}
{% load static %}
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>{{ project_name }}</title>
{% if project_description %}
<meta name="description" content="{{ project_description }}">
<meta property="og:description" content="{{ project_description }}">
<meta property="twitter:description" content="{{ project_description }}">
{% endif %}
{% if project_image_url %}
<meta property="og:image" content="{{ project_image_url }}">
<meta property="twitter:image" content="{{ project_image_url }}">
{% endif %}
<link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@400;600;700&display=swap" rel="stylesheet">
<style>
:root {
--bg-color-start: #6a11cb;
--bg-color-end: #2575fc;
--text-color: #ffffff;
--card-bg-color: rgba(255, 255, 255, 0.08);
--card-border-color: rgba(255, 255, 255, 0.18);
}
{% block title %}Mood Music AI{% endblock %}
* {
box-sizing: border-box;
}
{% block content %}
<link href="https://fonts.googleapis.com/css2?family=Poppins:wght@400;600&family=Roboto:wght@400;500&display=swap" rel="stylesheet">
body {
margin: 0;
font-family: 'Inter', sans-serif;
background: linear-gradient(130deg, var(--bg-color-start), var(--bg-color-end));
color: var(--text-color);
display: flex;
justify-content: center;
align-items: center;
min-height: 100vh;
text-align: center;
overflow: hidden;
position: relative;
}
<div class="container-fluid hero-section">
<div class="text-center">
<h1 class="hero-title">Find Your Vibe</h1>
<p class="hero-subtitle">Let our AI create the perfect playlist to match your mood.</p>
</div>
body::before {
content: '';
position: absolute;
inset: 0;
background-image: url("data:image/svg+xml,<svg xmlns='http://www.w3.org/2000/svg' width='140' height='140' viewBox='0 0 140 140'><path d='M-20 20L160 20M20 -20L20 160' stroke-width='1' stroke='rgba(255,255,255,0.05)'/></svg>");
animation: bg-pan 24s linear infinite;
z-index: -1;
}
@keyframes bg-pan {
0% {
transform: translate3d(0, 0, 0);
}
100% {
transform: translate3d(-140px, -140px, 0);
}
}
main {
padding: clamp(2rem, 4vw, 3rem);
width: min(640px, 92vw);
}
.card {
background: var(--card-bg-color);
border: 1px solid var(--card-border-color);
border-radius: 20px;
padding: clamp(2rem, 4vw, 3rem);
backdrop-filter: blur(24px);
-webkit-backdrop-filter: blur(24px);
box-shadow: 0 20px 60px rgba(15, 23, 42, 0.35);
}
h1 {
margin: 0 0 1.2rem;
font-weight: 700;
font-size: clamp(2.2rem, 3vw + 1.3rem, 3rem);
letter-spacing: -0.04em;
}
p {
margin: 0.6rem 0;
font-size: 1.1rem;
line-height: 1.7;
opacity: 0.92;
}
.loader {
margin: 1.5rem auto;
width: 56px;
height: 56px;
border: 4px solid rgba(255, 255, 255, 0.25);
border-top-color: #fff;
border-radius: 50%;
animation: spin 1s linear infinite;
}
@keyframes spin {
to {
transform: rotate(360deg);
}
}
.sr-only {
position: absolute;
width: 1px;
height: 1px;
padding: 0;
margin: -1px;
overflow: hidden;
clip: rect(0, 0, 0, 0);
border: 0;
}
code {
background: rgba(15, 23, 42, 0.35);
padding: 0.2rem 0.6rem;
border-radius: 0.5rem;
font-size: 0.95rem;
}
footer {
margin-top: 2.4rem;
font-size: 0.86rem;
opacity: 0.7;
}
</style>
</head>
<body>
<main>
<div class="card">
<h1>Analyzing your requirements and generating your website…</h1>
<div class="loader" role="status" aria-live="polite" aria-label="Applying initial changes">
<span class="sr-only">Loading…</span>
</div>
<p>Appwizzy AI is collecting your requirements and applying the first changes.</p>
<p>This page will refresh automatically as the plan is implemented.</p>
<p>
Runtime: Django <code>{{ django_version }}</code> · Python <code>{{ python_version }}</code>
UTC <code>{{ current_time|date:"Y-m-d H:i:s" }}</code>
</p>
<div class="chat-container">
<div class="chat-box" id="chat-box">
{% if conversation %}
{% for message in conversation %}
<div class="chat-message {% if message.role == 'user' %}user-message{% else %}ai-message{% endif %}">
<p>{{ message.content }}</p>
</div>
{% endfor %}
{% else %}
<div class="chat-message ai-message">
<p>{{ ai_message }}</p>
</div>
{% endif %}
</div>
<footer>
Page updated: {{ current_time|date:"Y-m-d H:i:s" }} (UTC)
</footer>
</main>
</body>
<form method="post" id="chat-form" class="chat-input-form">
{% csrf_token %}
<input type="text" name="message" id="chat-input" class="chat-input" placeholder="Tell me about your day..." required>
<button type="submit" class="send-button">Send</button>
</form>
</div>
</html>
</div>
<script src="{% static 'js/chat.js' %}"></script>
{% endblock %}

View File

@ -1,7 +1,7 @@
from django.urls import path
from .views import home
from .views import index
urlpatterns = [
path("", home, name="home"),
path("", index, name="index"),
]

View File

@ -1,37 +1,89 @@
import os
import platform
from django import get_version as django_version
from django.shortcuts import render
from django.urls import reverse_lazy
from django.utils import timezone
from django.views.generic.edit import CreateView
from django.http import JsonResponse
from ai.local_ai_api import LocalAIApi
import json
from django.conf import settings
from googleapiclient.discovery import build
from .forms import TicketForm
from .models import Ticket
def youtube_search(query):
if not settings.YOUTUBE_API_KEY or settings.YOUTUBE_API_KEY == "REPLACE_WITH_YOUR_YOUTUBE_API_KEY":
return None
youtube = build('youtube', 'v3', developerKey=settings.YOUTUBE_API_KEY)
request = youtube.search().list(
q=query,
part='snippet',
maxResults=1,
type='video'
)
response = request.execute()
if response['items']:
return response['items'][0]['id']['videoId']
return None
def home(request):
"""Render the landing screen with loader and environment details."""
host_name = request.get_host().lower()
agent_brand = "AppWizzy" if host_name == "appwizzy.com" else "Flatlogic"
now = timezone.now()
context = {
"project_name": "New Style",
"agent_brand": agent_brand,
"django_version": django_version(),
"python_version": platform.python_version(),
"current_time": now,
"host_name": host_name,
"project_description": os.getenv("PROJECT_DESCRIPTION", ""),
"project_image_url": os.getenv("PROJECT_IMAGE_URL", ""),
def get_playlist_for_mood(mood):
"""
Returns a mock playlist of songs for a given mood.
"""
playlists = {
"happy": [
{"title": "Happy", "artist": "Pharrell Williams"},
{"title": "Don't Stop Me Now", "artist": "Queen"},
{"title": "Uptown Funk", "artist": "Mark Ronson ft. Bruno Mars"},
],
"sad": [
{"title": "Someone Like You", "artist": "Adele"},
{"title": "Hurt", "artist": "Johnny Cash"},
{"title": "Fix You", "artist": "Coldplay"},
],
"energetic": [
{"title": "Eye of the Tiger", "artist": "Survivor"},
{"title": "Thunderstruck", "artist": "AC/DC"},
{"title": "Can't Stop", "artist": "Red Hot Chili Peppers"},
],
"calm": [
{"title": "Weightless", "artist": "Marconi Union"},
{"title": "Clair de Lune", "artist": "Claude Debussy"},
{"title": "Orinoco Flow", "artist": "Enya"},
],
}
return render(request, "core/index.html", context)
return playlists.get(mood.lower(), [])
def index(request):
playlist = None
if request.method == 'POST':
user_message = request.POST.get('message')
conversation = request.session.get('conversation', [])
conversation.append({'role': 'user', 'content': user_message})
class TicketCreateView(CreateView):
model = Ticket
form_class = TicketForm
template_name = "core/ticket_create.html"
success_url = reverse_lazy("home")
response = LocalAIApi.create_response({
"input": [
{'role': 'system', 'content': 'You are a friendly AI that helps users find music based on their mood. Ask up to 6 questions to understand their mood. Once you have determined the mood, respond with ONLY a JSON object with a single key "playlist" and the value as a list of songs, where each song is a JSON object with "title" and "artist" as keys (e.g. {"playlist": [{"title": "Happy", "artist": "Pharrell Williams"}]}).'},
*conversation
],
})
ai_message = 'Sorry, I had an error.'
if response.get("success"):
json_response = LocalAIApi.decode_json_from_response(response)
if json_response and 'playlist' in json_response:
playlist = json_response['playlist']
for song in playlist:
video_id = youtube_search(f"{song['title']} {song['artist']}")
song['video_id'] = video_id
ai_message = "I've created a playlist for you. I hope you like it!"
else:
ai_message = LocalAIApi.extract_text(response)
conversation.append({'role': 'assistant', 'content': ai_message})
request.session['conversation'] = conversation
if request.headers.get('x-requested-with') == 'XMLHttpRequest':
return JsonResponse({'ai_message': ai_message, 'playlist': playlist})
return render(request, 'core/index.html', {'user_message': user_message, 'ai_message': ai_message, 'playlist': playlist, 'conversation': conversation})
else:
# Start of a new conversation
request.session['conversation'] = []
return render(request, 'core/index.html', {'ai_message': 'Hi! How are you feeling today?'})

View File

@ -1,3 +1,4 @@
Django==5.2.7
mysqlclient==2.2.7
python-dotenv==1.1.1
google-api-python-client>=2.136.0

210
static/css/custom.css Normal file
View File

@ -0,0 +1,210 @@
/* custom.css */
html, body {
height: 100%;
margin: 0;
padding: 0;
overflow: hidden; /* Prevents the body from scrolling */
}
body {
font-family: 'Roboto', sans-serif;
background-color: #121212;
color: #FFFFFF;
}
.hero-section {
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
height: 100vh; /* Changed from min-height to height */
padding: 20px;
box-sizing: border-box;
text-align: center;
background: linear-gradient(rgba(18, 18, 18, 0.8), rgba(18, 18, 18, 0.8)), url('https://images.unsplash.com/photo-1511379938547-c1f69419868d?q=80&w=2070&auto=format&fit=crop');
background-size: cover;
background-position: center;
}
.hero-title {
font-family: 'Poppins', sans-serif;
font-size: 2.5rem; /* Adjusted for a more balanced look */
font-weight: 600;
margin-bottom: 10px;
}
.hero-subtitle {
font-size: 1rem; /* Adjusted for a more balanced look */
margin-bottom: 20px;
}
.chat-container {
width: 100%;
max-width: 600px;
background-color: #181818;
border-radius: 15px;
box-shadow: 0 10px 30px rgba(0, 0, 0, 0.5);
display: flex;
flex-direction: column;
/* Let the container take up available space, but not exceed the screen height */
flex-grow: 1;
max-height: calc(100vh - 180px); /* Adjust 180px based on title/subtitle height */
}
.chat-box {
flex-grow: 1; /* This makes the chat box fill the available space */
overflow-y: auto;
padding: 20px;
padding-right: 10px;
}
.chat-message {
padding: 12px;
border-radius: 10px;
margin-bottom: 12px;
max-width: 85%;
word-wrap: break-word;
display: flex; /* Use flexbox for alignment */
}
.ai-message {
background-color: #282828;
color: #FFFFFF;
align-self: flex-start;
}
.user-message {
background: linear-gradient(to right, #1DB954, #1ED760);
color: #FFFFFF;
margin-left: auto;
align-self: flex-end; /* Align user messages to the right */
}
.chat-input-form {
display: flex;
padding: 20px;
border-top: 1px solid #282828;
}
.chat-input {
flex-grow: 1;
padding: 12px;
border-radius: 25px;
border: none;
background-color: #282828;
color: #FFFFFF;
}
.chat-input:focus {
outline: none;
box-shadow: 0 0 0 2px #1DB954;
}
.send-button {
padding: 12px 20px;
border-radius: 25px;
border: none;
background: linear-gradient(to right, #1DB954, #1ED760);
color: #FFFFFF;
margin-left: 10px;
cursor: pointer;
font-weight: 500;
}
.spotify-embed {
margin-top: 15px;
}
/* Responsive adjustments */
@media (max-width: 768px) {
.hero-title {
font-size: 2rem;
}
.hero-subtitle {
font-size: 0.9rem;
}
.chat-container {
max-height: calc(100vh - 150px); /* Adjust for smaller screens */
}
.chat-box {
padding: 15px;
}
.chat-input-form {
padding: 15px;
}
}
.playlist-container {
width: 100%;
max-width: 600px;
margin-top: 20px;
background-color: #181818;
border-radius: 15px;
padding: 20px;
box-shadow: 0 10px 30px rgba(0, 0, 0, 0.5);
}
.playlist-title {
font-family: 'Poppins', sans-serif;
font-size: 1.5rem;
font-weight: 600;
margin-bottom: 15px;
text-align: center;
}
.playlist {
list-style: none;
padding: 0;
margin: 0;
}
.playlist-item {
padding: 10px 0;
border-bottom: 1px solid #282828;
display: flex;
justify-content: space-between;
align-items: center;
}
.playlist-item:last-child {
border-bottom: none;
}
.song-title {
font-weight: 500;
}
.song-artist {
color: #B3B3B3;
}
.typing-indicator span {
display: inline-block;
width: 8px;
height: 8px;
border-radius: 50%;
background-color: #B3B3B3;
margin: 0 2px;
animation: typing 1s infinite;
}
.typing-indicator span:nth-child(2) {
animation-delay: 0.2s;
}
.typing-indicator span:nth-child(3) {
animation-delay: 0.4s;
}
@keyframes typing {
0%, 100% {
transform: translateY(0);
}
50% {
transform: translateY(-5px);
}
}

108
static/js/chat.js Normal file
View File

@ -0,0 +1,108 @@
document.addEventListener('DOMContentLoaded', function() {
const chatForm = document.getElementById('chat-form');
const chatBox = document.getElementById('chat-box');
const chatInput = document.getElementById('chat-input');
const csrfToken = document.querySelector('[name=csrfmiddlewaretoken]').value;
chatForm.addEventListener('submit', function(event) {
event.preventDefault();
const userMessage = chatInput.value.trim();
if (userMessage === '') {
return;
}
// Add user message to chat box
appendMessage(userMessage, 'user-message');
chatInput.value = '';
showTypingIndicator();
fetch(window.location.href, {
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'X-CSRFToken': csrfToken,
'X-Requested-With': 'XMLHttpRequest',
},
body: new URLSearchParams({
'message': userMessage
})
})
.then(response => response.json())
.then(data => {
removeTypingIndicator();
// Add AI message to chat box
appendMessage(data.ai_message, 'ai-message');
// If a playlist is returned, display it
if (data.playlist && data.playlist.length > 0) {
displayPlaylist(data.playlist);
}
})
.catch(error => {
console.error('Error:', error);
removeTypingIndicator();
appendMessage('Sorry, something went wrong.', 'ai-message');
});
});
function appendMessage(message, className) {
const messageElement = document.createElement('div');
messageElement.classList.add('chat-message', className);
const p = document.createElement('p');
p.textContent = message;
messageElement.appendChild(p);
chatBox.appendChild(messageElement);
chatBox.scrollTop = chatBox.scrollHeight; // Scroll to bottom
}
function showTypingIndicator() {
const typingIndicator = document.createElement('div');
typingIndicator.classList.add('chat-message', 'ai-message', 'typing-indicator');
typingIndicator.innerHTML = '<p><span>.</span><span>.</span><span>.</span></p>';
chatBox.appendChild(typingIndicator);
chatBox.scrollTop = chatBox.scrollHeight;
}
function removeTypingIndicator() {
const typingIndicator = document.querySelector('.typing-indicator');
if (typingIndicator) {
typingIndicator.remove();
}
}
function displayPlaylist(playlist) {
let playlistContainer = document.querySelector('.playlist-container');
if (!playlistContainer) {
playlistContainer = document.createElement('div');
playlistContainer.classList.add('playlist-container');
const heroSection = document.querySelector('.hero-section');
heroSection.appendChild(playlistContainer);
}
playlistContainer.innerHTML = ''; // Clear previous playlist
const title = document.createElement('h2');
title.classList.add('playlist-title');
title.textContent = 'Your Playlist';
playlistContainer.appendChild(title);
const ul = document.createElement('ul');
ul.classList.add('playlist');
playlist.forEach(song => {
const li = document.createElement('li');
li.classList.add('playlist-item');
if (song.video_id) {
li.innerHTML = `<iframe width="100%" height="315" src="https://www.youtube.com/embed/${song.video_id}" title="YouTube video player" frameborder="0" allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture" allowfullscreen></iframe>`;
} else {
li.innerHTML = `<span class="song-title">${song.title}</span> - <span class="song-artist">${song.artist}</span> (Video not found)`;
}
ul.appendChild(li);
});
playlistContainer.appendChild(ul);
}
});

View File

@ -1,21 +1,210 @@
/* custom.css */
:root {
--bg-color-start: #6a11cb;
--bg-color-end: #2575fc;
--text-color: #ffffff;
--card-bg-color: rgba(255, 255, 255, 0.01);
--card-border-color: rgba(255, 255, 255, 0.1);
}
body {
html, body {
height: 100%;
margin: 0;
font-family: 'Inter', sans-serif;
background: linear-gradient(45deg, var(--bg-color-start), var(--bg-color-end));
color: var(--text-color);
padding: 0;
overflow: hidden; /* Prevents the body from scrolling */
}
body {
font-family: 'Roboto', sans-serif;
background-color: #121212;
color: #FFFFFF;
}
.hero-section {
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
min-height: 100vh;
height: 100vh; /* Changed from min-height to height */
padding: 20px;
box-sizing: border-box;
text-align: center;
overflow: hidden;
position: relative;
background: linear-gradient(rgba(18, 18, 18, 0.8), rgba(18, 18, 18, 0.8)), url('https://images.unsplash.com/photo-1511379938547-c1f69419868d?q=80&w=2070&auto=format&fit=crop');
background-size: cover;
background-position: center;
}
.hero-title {
font-family: 'Poppins', sans-serif;
font-size: 2.5rem; /* Adjusted for a more balanced look */
font-weight: 600;
margin-bottom: 10px;
}
.hero-subtitle {
font-size: 1rem; /* Adjusted for a more balanced look */
margin-bottom: 20px;
}
.chat-container {
width: 100%;
max-width: 600px;
background-color: #181818;
border-radius: 15px;
box-shadow: 0 10px 30px rgba(0, 0, 0, 0.5);
display: flex;
flex-direction: column;
/* Let the container take up available space, but not exceed the screen height */
flex-grow: 1;
max-height: calc(100vh - 180px); /* Adjust 180px based on title/subtitle height */
}
.chat-box {
flex-grow: 1; /* This makes the chat box fill the available space */
overflow-y: auto;
padding: 20px;
padding-right: 10px;
}
.chat-message {
padding: 12px;
border-radius: 10px;
margin-bottom: 12px;
max-width: 85%;
word-wrap: break-word;
display: flex; /* Use flexbox for alignment */
}
.ai-message {
background-color: #282828;
color: #FFFFFF;
align-self: flex-start;
}
.user-message {
background: linear-gradient(to right, #1DB954, #1ED760);
color: #FFFFFF;
margin-left: auto;
align-self: flex-end; /* Align user messages to the right */
}
.chat-input-form {
display: flex;
padding: 20px;
border-top: 1px solid #282828;
}
.chat-input {
flex-grow: 1;
padding: 12px;
border-radius: 25px;
border: none;
background-color: #282828;
color: #FFFFFF;
}
.chat-input:focus {
outline: none;
box-shadow: 0 0 0 2px #1DB954;
}
.send-button {
padding: 12px 20px;
border-radius: 25px;
border: none;
background: linear-gradient(to right, #1DB954, #1ED760);
color: #FFFFFF;
margin-left: 10px;
cursor: pointer;
font-weight: 500;
}
.spotify-embed {
margin-top: 15px;
}
/* Responsive adjustments */
@media (max-width: 768px) {
.hero-title {
font-size: 2rem;
}
.hero-subtitle {
font-size: 0.9rem;
}
.chat-container {
max-height: calc(100vh - 150px); /* Adjust for smaller screens */
}
.chat-box {
padding: 15px;
}
.chat-input-form {
padding: 15px;
}
}
.playlist-container {
width: 100%;
max-width: 600px;
margin-top: 20px;
background-color: #181818;
border-radius: 15px;
padding: 20px;
box-shadow: 0 10px 30px rgba(0, 0, 0, 0.5);
}
.playlist-title {
font-family: 'Poppins', sans-serif;
font-size: 1.5rem;
font-weight: 600;
margin-bottom: 15px;
text-align: center;
}
.playlist {
list-style: none;
padding: 0;
margin: 0;
}
.playlist-item {
padding: 10px 0;
border-bottom: 1px solid #282828;
display: flex;
justify-content: space-between;
align-items: center;
}
.playlist-item:last-child {
border-bottom: none;
}
.song-title {
font-weight: 500;
}
.song-artist {
color: #B3B3B3;
}
.typing-indicator span {
display: inline-block;
width: 8px;
height: 8px;
border-radius: 50%;
background-color: #B3B3B3;
margin: 0 2px;
animation: typing 1s infinite;
}
.typing-indicator span:nth-child(2) {
animation-delay: 0.2s;
}
.typing-indicator span:nth-child(3) {
animation-delay: 0.4s;
}
@keyframes typing {
0%, 100% {
transform: translateY(0);
}
50% {
transform: translateY(-5px);
}
}

108
staticfiles/js/chat.js Normal file
View File

@ -0,0 +1,108 @@
document.addEventListener('DOMContentLoaded', function() {
const chatForm = document.getElementById('chat-form');
const chatBox = document.getElementById('chat-box');
const chatInput = document.getElementById('chat-input');
const csrfToken = document.querySelector('[name=csrfmiddlewaretoken]').value;
chatForm.addEventListener('submit', function(event) {
event.preventDefault();
const userMessage = chatInput.value.trim();
if (userMessage === '') {
return;
}
// Add user message to chat box
appendMessage(userMessage, 'user-message');
chatInput.value = '';
showTypingIndicator();
fetch(window.location.href, {
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'X-CSRFToken': csrfToken,
'X-Requested-With': 'XMLHttpRequest',
},
body: new URLSearchParams({
'message': userMessage
})
})
.then(response => response.json())
.then(data => {
removeTypingIndicator();
// Add AI message to chat box
appendMessage(data.ai_message, 'ai-message');
// If a playlist is returned, display it
if (data.playlist && data.playlist.length > 0) {
displayPlaylist(data.playlist);
}
})
.catch(error => {
console.error('Error:', error);
removeTypingIndicator();
appendMessage('Sorry, something went wrong.', 'ai-message');
});
});
function appendMessage(message, className) {
const messageElement = document.createElement('div');
messageElement.classList.add('chat-message', className);
const p = document.createElement('p');
p.textContent = message;
messageElement.appendChild(p);
chatBox.appendChild(messageElement);
chatBox.scrollTop = chatBox.scrollHeight; // Scroll to bottom
}
function showTypingIndicator() {
const typingIndicator = document.createElement('div');
typingIndicator.classList.add('chat-message', 'ai-message', 'typing-indicator');
typingIndicator.innerHTML = '<p><span>.</span><span>.</span><span>.</span></p>';
chatBox.appendChild(typingIndicator);
chatBox.scrollTop = chatBox.scrollHeight;
}
function removeTypingIndicator() {
const typingIndicator = document.querySelector('.typing-indicator');
if (typingIndicator) {
typingIndicator.remove();
}
}
function displayPlaylist(playlist) {
let playlistContainer = document.querySelector('.playlist-container');
if (!playlistContainer) {
playlistContainer = document.createElement('div');
playlistContainer.classList.add('playlist-container');
const heroSection = document.querySelector('.hero-section');
heroSection.appendChild(playlistContainer);
}
playlistContainer.innerHTML = ''; // Clear previous playlist
const title = document.createElement('h2');
title.classList.add('playlist-title');
title.textContent = 'Your Playlist';
playlistContainer.appendChild(title);
const ul = document.createElement('ul');
ul.classList.add('playlist');
playlist.forEach(song => {
const li = document.createElement('li');
li.classList.add('playlist-item');
if (song.video_id) {
li.innerHTML = `<iframe width="100%" height="315" src="https://www.youtube.com/embed/${song.video_id}" title="YouTube video player" frameborder="0" allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture" allowfullscreen></iframe>`;
} else {
li.innerHTML = `<span class="song-title">${song.title}</span> - <span class="song-artist">${song.artist}</span> (Video not found)`;
}
ul.appendChild(li);
});
playlistContainer.appendChild(ul);
}
});