From 1ff7bbc53c093f88d7b052f450293e820e4b1de7 Mon Sep 17 00:00:00 2001 From: Flatlogic Bot Date: Thu, 27 Nov 2025 07:52:19 +0000 Subject: [PATCH] Asistente V1 --- .perm_test_apache | 0 .perm_test_exec | 0 ai/__init__.py | 3 + ai/local_ai_api.py | 420 ++++++++++++++++++ config/__pycache__/__init__.cpython-311.pyc | Bin 159 -> 159 bytes config/__pycache__/settings.cpython-311.pyc | Bin 4210 -> 4220 bytes config/__pycache__/urls.cpython-311.pyc | Bin 1143 -> 1143 bytes config/__pycache__/wsgi.cpython-311.pyc | Bin 679 -> 679 bytes config/settings.py | 4 +- core/__pycache__/__init__.cpython-311.pyc | Bin 157 -> 157 bytes core/__pycache__/admin.cpython-311.pyc | Bin 777 -> 1617 bytes core/__pycache__/apps.cpython-311.pyc | Bin 524 -> 524 bytes core/__pycache__/forms.cpython-311.pyc | Bin 847 -> 154 bytes core/__pycache__/models.cpython-311.pyc | Bin 1409 -> 4291 bytes core/__pycache__/urls.cpython-311.pyc | Bin 347 -> 378 bytes core/__pycache__/views.cpython-311.pyc | Bin 2006 -> 2233 bytes core/admin.py | 24 +- core/context_processors.py | 13 + core/forms.py | 8 +- ..._serviceintervention_sparepart_and_more.py | 51 +++ ...003_alter_machinemodel_options_and_more.py | 98 ++++ .../__pycache__/0001_initial.cpython-311.pyc | Bin 1660 -> 1660 bytes ...vention_sparepart_and_more.cpython-311.pyc | Bin 0 -> 3227 bytes ...hinemodel_options_and_more.cpython-311.pyc | Bin 0 -> 4219 bytes .../__pycache__/__init__.cpython-311.pyc | Bin 168 -> 168 bytes core/models.py | 65 ++- core/templates/base.html | 30 ++ core/templates/core/article_detail.html | 14 + core/templates/core/index.html | 219 +++------ core/urls.py | 6 +- core/views.py | 24 +- static/css/custom.css | 70 +++ staticfiles/css/custom.css | 79 +++- 33 files changed, 912 insertions(+), 216 deletions(-) create mode 100644 .perm_test_apache create mode 100644 .perm_test_exec create mode 100644 ai/__init__.py create mode 100644 ai/local_ai_api.py create mode 100644 core/context_processors.py create mode 100644 core/migrations/0002_machinemodel_serviceintervention_sparepart_and_more.py create mode 100644 core/migrations/0003_alter_machinemodel_options_and_more.py create mode 100644 core/migrations/__pycache__/0002_machinemodel_serviceintervention_sparepart_and_more.cpython-311.pyc create mode 100644 core/migrations/__pycache__/0003_alter_machinemodel_options_and_more.cpython-311.pyc create mode 100644 core/templates/base.html create mode 100644 core/templates/core/article_detail.html create mode 100644 static/css/custom.css diff --git a/.perm_test_apache b/.perm_test_apache new file mode 100644 index 0000000..e69de29 diff --git a/.perm_test_exec b/.perm_test_exec new file mode 100644 index 0000000..e69de29 diff --git a/ai/__init__.py b/ai/__init__.py new file mode 100644 index 0000000..37a7b09 --- /dev/null +++ b/ai/__init__.py @@ -0,0 +1,3 @@ +"""Helpers for interacting with the Flatlogic AI proxy from Django code.""" + +from .local_ai_api import LocalAIApi, create_response, request, decode_json_from_response # noqa: F401 diff --git a/ai/local_ai_api.py b/ai/local_ai_api.py new file mode 100644 index 0000000..bcff732 --- /dev/null +++ b/ai/local_ai_api.py @@ -0,0 +1,420 @@ +""" +LocalAIApi — lightweight Python client for the Flatlogic AI proxy. + +Usage (inside the Django workspace): + + from ai.local_ai_api import LocalAIApi + + response = LocalAIApi.create_response({ + "input": [ + {"role": "system", "content": "You are a helpful assistant."}, + {"role": "user", "content": "Summarise this text in two sentences."}, + ], + "text": {"format": {"type": "json_object"}}, + }) + + if response.get("success"): + data = LocalAIApi.decode_json_from_response(response) + # ... + +# Typical successful payload (truncated): +# { +# "id": "resp_xxx", +# "status": "completed", +# "output": [ +# {"type": "reasoning", "summary": []}, +# {"type": "message", "content": [{"type": "output_text", "text": "Your final answer here."}]} +# ], +# "usage": { "input_tokens": 123, "output_tokens": 456 } +# } + +The helper automatically injects the project UUID header and falls back to +reading executor/.env if environment variables are missing. +""" + +from __future__ import annotations + +import json +import os +import time +import ssl +from typing import Any, Dict, Iterable, Optional +from urllib import error as urlerror +from urllib import request as urlrequest + +__all__ = [ + "LocalAIApi", + "create_response", + "request", + "fetch_status", + "await_response", + "extract_text", + "decode_json_from_response", +] + + +_CONFIG_CACHE: Optional[Dict[str, Any]] = None + + +class LocalAIApi: + """Static helpers mirroring the PHP implementation.""" + + @staticmethod + def create_response(params: Dict[str, Any], options: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: + return create_response(params, options or {}) + + @staticmethod + def request(path: Optional[str] = None, payload: Optional[Dict[str, Any]] = None, + options: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: + return request(path, payload or {}, options or {}) + + @staticmethod + def extract_text(response: Dict[str, Any]) -> str: + return extract_text(response) + + @staticmethod + def decode_json_from_response(response: Dict[str, Any]) -> Optional[Dict[str, Any]]: + return decode_json_from_response(response) + + +def create_response(params: Dict[str, Any], options: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: + """Signature compatible with the OpenAI Responses API.""" + options = options or {} + payload = dict(params) + + if not isinstance(payload.get("input"), list) or not payload["input"]: + return { + "success": False, + "error": "input_missing", + "message": 'Parameter "input" is required and must be a non-empty list.', + } + + cfg = _config() + if not payload.get("model"): + payload["model"] = cfg["default_model"] + + initial = request(options.get("path"), payload, options) + if not initial.get("success"): + return initial + + data = initial.get("data") + if isinstance(data, dict) and "ai_request_id" in data: + ai_request_id = data["ai_request_id"] + poll_timeout = int(options.get("poll_timeout", 300)) + poll_interval = int(options.get("poll_interval", 5)) + return await_response(ai_request_id, { + "interval": poll_interval, + "timeout": poll_timeout, + "headers": options.get("headers"), + "timeout_per_call": options.get("timeout"), + }) + + return initial + + +def request(path: Optional[str], payload: Dict[str, Any], options: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: + """Perform a raw request to the AI proxy.""" + cfg = _config() + options = options or {} + + resolved_path = path or options.get("path") or cfg["responses_path"] + if not resolved_path: + return { + "success": False, + "error": "project_id_missing", + "message": "PROJECT_ID is not defined; cannot resolve AI proxy endpoint.", + } + + project_uuid = cfg["project_uuid"] + if not project_uuid: + return { + "success": False, + "error": "project_uuid_missing", + "message": "PROJECT_UUID is not defined; aborting AI request.", + } + + if "project_uuid" not in payload and project_uuid: + payload["project_uuid"] = project_uuid + + url = _build_url(resolved_path, cfg["base_url"]) + opt_timeout = options.get("timeout") + timeout = int(cfg["timeout"] if opt_timeout is None else opt_timeout) + verify_tls = options.get("verify_tls", cfg["verify_tls"]) + + headers: Dict[str, str] = { + "Content-Type": "application/json", + "Accept": "application/json", + cfg["project_header"]: project_uuid, + } + extra_headers = options.get("headers") + if isinstance(extra_headers, Iterable): + for header in extra_headers: + if isinstance(header, str) and ":" in header: + name, value = header.split(":", 1) + headers[name.strip()] = value.strip() + + body = json.dumps(payload, ensure_ascii=False).encode("utf-8") + return _http_request(url, "POST", body, headers, timeout, verify_tls) + + +def fetch_status(ai_request_id: Any, options: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: + """Fetch status for a queued AI request.""" + cfg = _config() + options = options or {} + + project_uuid = cfg["project_uuid"] + if not project_uuid: + return { + "success": False, + "error": "project_uuid_missing", + "message": "PROJECT_UUID is not defined; aborting status check.", + } + + status_path = _resolve_status_path(ai_request_id, cfg) + url = _build_url(status_path, cfg["base_url"]) + + opt_timeout = options.get("timeout") + timeout = int(cfg["timeout"] if opt_timeout is None else opt_timeout) + verify_tls = options.get("verify_tls", cfg["verify_tls"]) + + headers: Dict[str, str] = { + "Accept": "application/json", + cfg["project_header"]: project_uuid, + } + extra_headers = options.get("headers") + if isinstance(extra_headers, Iterable): + for header in extra_headers: + if isinstance(header, str) and ":" in header: + name, value = header.split(":", 1) + headers[name.strip()] = value.strip() + + return _http_request(url, "GET", None, headers, timeout, verify_tls) + + +def await_response(ai_request_id: Any, options: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: + """Poll status endpoint until the request is complete or timed out.""" + options = options or {} + timeout = int(options.get("timeout", 300)) + interval = int(options.get("interval", 5)) + if interval <= 0: + interval = 5 + per_call_timeout = options.get("timeout_per_call") + + deadline = time.time() + max(timeout, interval) + + while True: + status_resp = fetch_status(ai_request_id, { + "headers": options.get("headers"), + "timeout": per_call_timeout, + "verify_tls": options.get("verify_tls"), + }) + if status_resp.get("success"): + data = status_resp.get("data") or {} + if isinstance(data, dict): + status_value = data.get("status") + if status_value == "success": + return { + "success": True, + "status": 200, + "data": data.get("response", data), + } + if status_value == "failed": + return { + "success": False, + "status": 500, + "error": str(data.get("error") or "AI request failed"), + "data": data, + } + else: + return status_resp + + if time.time() >= deadline: + return { + "success": False, + "error": "timeout", + "message": "Timed out waiting for AI response.", + } + time.sleep(interval) + + +def extract_text(response: Dict[str, Any]) -> str: + """Public helper to extract plain text from a Responses payload.""" + return _extract_text(response) + + +def decode_json_from_response(response: Dict[str, Any]) -> Optional[Dict[str, Any]]: + """Attempt to decode JSON emitted by the model (handles markdown fences).""" + text = _extract_text(response) + if text == "": + return None + + try: + decoded = json.loads(text) + if isinstance(decoded, dict): + return decoded + except json.JSONDecodeError: + pass + + stripped = text.strip() + if stripped.startswith("```json"): + stripped = stripped[7:] + if stripped.endswith("```"): + stripped = stripped[:-3] + stripped = stripped.strip() + if stripped and stripped != text: + try: + decoded = json.loads(stripped) + if isinstance(decoded, dict): + return decoded + except json.JSONDecodeError: + return None + return None + + +def _extract_text(response: Dict[str, Any]) -> str: + payload = response.get("data") if response.get("success") else response.get("response") + if isinstance(payload, dict): + output = payload.get("output") + if isinstance(output, list): + combined = "" + for item in output: + content = item.get("content") if isinstance(item, dict) else None + if isinstance(content, list): + for block in content: + if isinstance(block, dict) and block.get("type") == "output_text" and block.get("text"): + combined += str(block["text"]) + if combined: + return combined + choices = payload.get("choices") + if isinstance(choices, list) and choices: + message = choices[0].get("message") + if isinstance(message, dict) and message.get("content"): + return str(message["content"]) + if isinstance(payload, str): + return payload + return "" + + +def _config() -> Dict[str, Any]: + global _CONFIG_CACHE # noqa: PLW0603 + if _CONFIG_CACHE is not None: + return _CONFIG_CACHE + + _ensure_env_loaded() + + base_url = os.getenv("AI_PROXY_BASE_URL", "https://flatlogic.com") + project_id = os.getenv("PROJECT_ID") or None + responses_path = os.getenv("AI_RESPONSES_PATH") + if not responses_path and project_id: + responses_path = f"/projects/{project_id}/ai-request" + + _CONFIG_CACHE = { + "base_url": base_url, + "responses_path": responses_path, + "project_id": project_id, + "project_uuid": os.getenv("PROJECT_UUID"), + "project_header": os.getenv("AI_PROJECT_HEADER", "project-uuid"), + "default_model": os.getenv("AI_DEFAULT_MODEL", "gpt-5-mini"), + "timeout": int(os.getenv("AI_TIMEOUT", "30")), + "verify_tls": os.getenv("AI_VERIFY_TLS", "true").lower() not in {"0", "false", "no"}, + } + return _CONFIG_CACHE + + +def _build_url(path: str, base_url: str) -> str: + trimmed = path.strip() + if trimmed.startswith("http://") or trimmed.startswith("https://"): + return trimmed + if trimmed.startswith("/"): + return f"{base_url}{trimmed}" + return f"{base_url}/{trimmed}" + + +def _resolve_status_path(ai_request_id: Any, cfg: Dict[str, Any]) -> str: + base_path = (cfg.get("responses_path") or "").rstrip("/") + if not base_path: + return f"/ai-request/{ai_request_id}/status" + if not base_path.endswith("/ai-request"): + base_path = f"{base_path}/ai-request" + return f"{base_path}/{ai_request_id}/status" + + +def _http_request(url: str, method: str, body: Optional[bytes], headers: Dict[str, str], + timeout: int, verify_tls: bool) -> Dict[str, Any]: + """ + Shared HTTP helper for GET/POST requests. + """ + req = urlrequest.Request(url, data=body, method=method.upper()) + for name, value in headers.items(): + req.add_header(name, value) + + context = None + if not verify_tls: + context = ssl.create_default_context() + context.check_hostname = False + context.verify_mode = ssl.CERT_NONE + + try: + with urlrequest.urlopen(req, timeout=timeout, context=context) as resp: + status = resp.getcode() + response_body = resp.read().decode("utf-8", errors="replace") + except urlerror.HTTPError as exc: + status = exc.getcode() + response_body = exc.read().decode("utf-8", errors="replace") + except Exception as exc: # pylint: disable=broad-except + return { + "success": False, + "error": "request_failed", + "message": str(exc), + } + + decoded = None + if response_body: + try: + decoded = json.loads(response_body) + except json.JSONDecodeError: + decoded = None + + if 200 <= status < 300: + return { + "success": True, + "status": status, + "data": decoded if decoded is not None else response_body, + } + + error_message = "AI proxy request failed" + if isinstance(decoded, dict): + error_message = decoded.get("error") or decoded.get("message") or error_message + elif response_body: + error_message = response_body + + return { + "success": False, + "status": status, + "error": error_message, + "response": decoded if decoded is not None else response_body, + } + + +def _ensure_env_loaded() -> None: + """Populate os.environ from executor/.env if variables are missing.""" + if os.getenv("PROJECT_UUID") and os.getenv("PROJECT_ID"): + return + + env_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".env")) + if not os.path.exists(env_path): + return + + try: + with open(env_path, "r", encoding="utf-8") as handle: + for line in handle: + stripped = line.strip() + if not stripped or stripped.startswith("#") or "=" not in stripped: + continue + key, value = stripped.split("=", 1) + key = key.strip() + value = value.strip().strip('\'"') + if key and not os.getenv(key): + os.environ[key] = value + except OSError: + pass diff --git a/config/__pycache__/__init__.cpython-311.pyc b/config/__pycache__/__init__.cpython-311.pyc index 3d6501c67fa5c80fdda8a8ee57862699aabd319d..1e86240f55b4d2f0fb8df77f2f7fe7ce981f41b9 100644 GIT binary patch delta 20 acmbQwIG>ScIWI340}zBWYGh92nG66cV+1h( delta 20 acmbQwIG>ScIWI340}vQ{e#)50GZ_Fd1qB`e diff --git a/config/__pycache__/settings.cpython-311.pyc b/config/__pycache__/settings.cpython-311.pyc index dadfaa7db630a06a9bc0da9edbc375273aefa0ed..210aa8971a392a05f79dc2921b70bd10b768ff95 100644 GIT binary patch delta 60 zcmeyQ@JE4nIWI340}!M!X=KjW$lJ;-T&$Z~T*d2JT9jXqs_&baQk0ppc{aB!6JyZk OoxIshjM9@;1$+S^T@lIv delta 50 zcmeyP@JWGpIWI340}wn|@;#$(BX27=XP$0p@h#@i5a-QHxn-FcgEt@M&1Pa0oUAY4 F3jnn75E1|Y diff --git a/config/__pycache__/urls.cpython-311.pyc b/config/__pycache__/urls.cpython-311.pyc index 139db1071801d00bd7e1a167305bffd074d952f6..31e21b5b1fb03ff7d3b085fce3ff7ea20e9912b4 100644 GIT binary patch delta 21 bcmey)@tuQbIWI340}zBWYGiKY$z}lnK9U6M delta 21 bcmey)@tuQbIWI340}vQ{e#+R$lg$DEL?H#( diff --git a/config/__pycache__/wsgi.cpython-311.pyc b/config/__pycache__/wsgi.cpython-311.pyc index 79ce690f602e05f6bfa8a8e253edcb377296b788..bdafd83aaec64ef46a16f91f75067137b838ce43 100644 GIT binary patch delta 21 bcmZ3^x}23~IWI340}zBWYGiKYnauI4y@7aG!r2>I=L>Jw%O2|JPbX%s= zxtgJb91=zt^@vYRN(ob#>M6czs=j7wzHaK2D1_6Ut`FhrF?gN8lQxBXCQREWOb4rM zclH?eJb@=|$M8Do4v3k%$EfvT%m8Ed$&C4XjCvo&95Cje%vfNBbz%%$;HeENb({Ha zaDh=9sw}SB&bAxy)sS&d=GSYs;NNT!%h7c%e!33-62#EqLF|Ts(+eCrUD>Xl!QhZ^ zVp2v-g;7(5M`H?Xs>l^hTu2=bQ?$qN@RDXqH zH>%kVuR5XN)g=C<+MdXRPXZcrqDg>8?>ZlyZ+vZyRT`^!iIJ{pe+i~w`JMy{NT8ZJ z(?AAYmV3gc8mIzmT9yUh|)rUcf4mPiJ3r!4Jsz!wPs{Q5#FMN zP(~O;7y^(3iAI~Q2PqT7IGvn(6no=cz^J2HfZM^o$6%KltF4hrV+F6&6P@};JO?X} zMQmo64pC?1WS<&J=>JaU9v!x9C_rn}~AMz*h3nCVooQ5y=x3H_SKW8NvI`@&&v1p?|ERRC@a z_X&g>dDB>FjV*(EQ?F08iS$kdM87p-K$h8#9c+b5P8h_(-4OUckr^m;?nOd}fxy3# zz@LuH3BCoj&V?)_UuB|LTC7PDlLOt&?oIA(`q{h_3qXONW;6uYRw<<|^5XRF-69`O zAHUD(U}JoL?x1)^XPb1kttfP%O&-FL-ke;##D!~wOO+^dJ^LI{h7y+i8J%jtEUetwU?Dg&9PZ)VbM9q^vv*eJac(Y!!t=87b-oUR%=~gUWcP?5 z0xt5P0f2}>OxN(th6%_(6k4&tVGZJP5o*g8v5t_Pc+(j+sJKHs)_#L2vyNHwf(jX! zMfqSXU8kqKu6yjbO8C9g7HtY;`N9!BUnRSobvP4OD$D6*kNSdgOg)FWD%IrF5tLv@ z*an{o^SR#JHhEIl=@3+!P`}ApSFlcd-?mf|%pGByztwwZztqatSTq5k{!A7T8ZRhSbZlq!XZ7E zZt`+S^&j{*NUp`p-oi8b^Ijnv6mBbhYY3$glqQA=TQEWYW4|`aRo^4=Gs1Th8T0?z HqGf&nN^^Vf diff --git a/core/__pycache__/apps.cpython-311.pyc b/core/__pycache__/apps.cpython-311.pyc index 6435d92a257f85ac41d6fd22a9e528c3da4a1ec5..7ad0138f9794fc56e0138052e16c463132050eb4 100644 GIT binary patch delta 20 ZcmeBS>0#kn&dbZi00iNT8kq}um;fl81CRg! delta 20 acmeBS>0#kn&dbZi00hRKpE4HmFaZE9J_LOL diff --git a/core/__pycache__/forms.cpython-311.pyc b/core/__pycache__/forms.cpython-311.pyc index f6e5c4e3024d0f534a8b93046d15a44a0c8eeba6..3479328d4dace9ea8424e0305c7f22d19f44a5ab 100644 GIT binary patch delta 111 zcmX@lHjB}6IWI340}!lW)X3xq(vLwL7+{1lKC=NC(-~42f*CX!{Z=v*F)=Vqp3G>f l^NYhKH$SB`C)KWq6{rNHz8GZ62WCb_#t#fIqKFwN1_1Qf5>o&G literal 847 zcmZ`%v2N5r5S{hfzRN+lAV`E#KtsV5#6N(jP>?`L1C<-g@op}@*yqcxEl8IlMM?^! z^b|peKcTwvv@KP-i!P$LiWx5^vXsp3KF{pTzS)`e_tn)NpbVe?IrxR&-()o6{Zkfi zP&o$yf@P4ikTDRB;F**2kTY=LEeQ7%geTd^Z4O^>fSAzHjZ`y3>MOhD+=$rvVcE*wmN%Sy-v#{{nFS*~s5Z}Byei_I3F z4*}q}f+L2|9q#a%Djbu`qzLH0k)TLCB%;PjO-HA#Im6pfmo{&0G z>7-1H@$SGk-6$Hzxr`#yi=w;`RYv;#C_1X*%z6}c*m&raneq2inF;*^7F5ylWpGgB zGN|^daajfOvrMY8P{F4{9qLJ($RH_{4C*#+O^%IAH&rWmk~sBKgp>bcwzhHR%^tp+ z*7MA#p&nz1+CbPHxat8JeS)=STRkGJj#?+J(*V7U-bT1dFn1j9J)6VrcvbIT-{G|> zh@HCTi%~ot7K;8Hh&*6`s+b^=~9wz#4W}u(5Xq!Dh)ClKmmY$})epP%N2Ui@;>8Zd(J=jOiKi z<4TAQd*G0R52SF_Zc@(H7AXoJsMya!=GHiS1 zxu@Uj?$@v1``*)ELm|Ha?YiRw<)~K>{(~Q_=5BU2SDc~D>4a!` z${L|19eS==ow7o6Oa9N({Mh_6bnXjO$cseCI~)RawpAq}ajZKZI6z@|?S=|>$he5R zM+VddnZDK>;vs#+OT6p82VjOrTc*f+`LXqBd7jbd&tUzBqUA5kx1>x_*MZyh%TQiI%PF&E@{|IE80R#K)9*CPkkMxUH}M$Ea(X5>mO za-|-*Qa!ifa=6DggkJIj$6mG>UeSGcL+B;8#;9zEr*Fa8e}U8X^sYDuEFjKxS1){Z zpn;os*893*xf>0nA@l25-U$|4Wg8M;6jRSSyDg22fo91Jso<*BI3^$KN(Y78=h zz);)&q09u;3rtxmC{Mpv6Cw-4&~hXxtA;{k5~pfBYiNaQ3^5eIxR1~Uxl%Hg2WJ7s z0H6RRxIp}K`KtCDH|w~bR@}T9h{9V3;wWT!XXsLe>VR;*R!>he#HMcN#ZYEPfb4;{ zC;AUy^e28S`cX$7{kx#k8U0{xF%4GX(G|xn+Of@pX*tqXy&2S~l8%Oepza^|!)?>M zw;9CYor0LX2clM(HE)he1;`V8L(7+=w+uUy1H`^2L(x=gFr(>;q1>QnRf8^48v(2^ z*sE|2Q5WvcxNQxTw3`4vIA#~tDDXaL!H}R4^nU``h61tSm5cS*usQPC zv-ix4c{6sY7Q0lBU8<%VuK|eY!IhDE^x)dbCxd46WG#BK9z9t---sQ>g^rpZPCS#% zSf&=s)MFV~WoUeLq&_rm9-lHVerXQP)rRKkLvyBU55mSFgpF>3uyF`sqsai+cne{p zo7k{{2kY=li~($$N8v)-u(1ee|1Bg*2v*nC7PJV|178?AyfvZ3)m;p9%<#b%`hY#d zP24aG!3FT^5n>o`c?fur!zknp$_yWM690OjtJDZ_ISfr(F8BbneuS>bTfU$3@#c6V zAt-CYT@VuKlIaJX-0Qo#ATi1z2*8F!yXFs&0f3-EG6*pdvj(!1-B1eL1KAA<9;c}Z z7bL{KOe#HbHhtRiWvEIN;7}M1oJ~uanG2_9QrYZsAf?7LOj`t>)PUzpUYO2IrE;kW z%b(QBOC?C?MCOcB>11kZ3K%{BqyjC$xD`@h$<~&71uwEyreJZTsS=b~)Sy<7!32X6 zUUM)kgmPRu_w->wRSH_d%}&7}>qmmNWuGHKLCcfUK?&_S&$qvTk%ZF*(!N4bgJbLL z5XvTi@RDhg7K$=rVUkg~38hw0hSEh+Aqr(ecnP&7HUmd5Rct{=TZ0Z4ZC&r1p>yvJ z7h#ujnBfs1&{ljWj@A6y6m5B^&C*oLt2%onMSBC%o`E$HZF`XR7$WV*KZdmE`5bAd zpaZ`>u(E=l-JsHPY_GJ|Fy-_#;_UQ&sK;F3k*;(F3#9g;Ae+S@OUm(OSDcpO%idvf zWVn1}cn+MTuWVQ3mcIbkg)Sl+mKW-@1w$!Q7Kep;aDLxTc5Wj6hdALd5JddTYq{@! zZT4S==dtm_@Be+`!oN;jFf;kuiA(hpmu!aVz1&O*v96S0g|oR+x!J6gJage}GL^OK zJpUvVy~?6`p5Eq_n$?#)l}(}|xMQ0o@IDN@!^;{$L_T7v1(A%rU$H$)BF~9hY8Io%uxgL_MlZ~yUQ;Qy} zM~^|ii45cNkhngEwIk_TBwdfBVU_3zrl%3}{m=eBVPbhZ|qtw(38=P}`Yf(fUa zV8ZzX6Hb!>3FnH48D}SIXB{+CI^nf^w%P&Hu z-Ah)E-~03hj=yX(_Xb7Cti5E#b1(P;$6vOY`v*m|$E%CKi|9p A@Bjb+ literal 1409 zcmZuxy>Ht_6hDg8w@169EsumE? zlf`$Pzf%bPA&e=dC(6Y#C~pu!M6yv&(j^I{&?+LSSBS_|>Ox*Z$1*~vuoBW`qHLo^ z`dpX|iD!Bap|)#I;8pCw%enF6V#5KVV8(?lYlDaR0h3M(N! zdnF=GWs*6|UO7;1CQ2c>nW%gm6}Ls~ehlk0DFa+D#%O8kxPq!NTArdLe^$71(#lLk z@k+!TDZN5^m8j6XGS6t1`ILGMxsl?EGoXF0Yyk(WX!Y@cIc-K=_eF8Nf9v7I04!tL zj!Oy8ti&?TmThOB%ilTs0`E~``8}T5h^5%OW3@Y6Sp$Cl51bzcR_GZA*rxrq*Ezoi zE+!_>kXa1`&zc>_GO5e6gzg!>?J*VH8l;sB-*a%^*~bPUVCSZ`c{wkL6uNjS7jwYg zpqHo%G9kFPv*Wc|@(*sp;slDfloLldkt&Y>9Eq~YVdwz=^P!sC0*1p4@x&y*<-+HvJJ)U})xF652bPRUiqBdb4 z3OnO^hUdFHH(-{-Eblol-0K;Z9f$A=qtATmdX!-*ygaYb124gfgt{iP2A<{gx%!Ra zQO)Yn1S^<~f`eeg;{|^}COVsklD{Ih0D|2RO#I?vCJyKR4lKC*j8jTIGe%PuY>rFH zb>Wz>P`NfPp~}s}y{P=r$;V;2IV?A$ax+*RsSAfYk-BhFd;8?Kolw<=surnQaDN2R zKShZ-yTf`TsyBkIQSJ88CsFP8u(lZ07K3j` z)jLP^sCwt^QdnIcR+ppda`0g4Jn_Qn(y+P|RhNPXV?|2OkI^hk?k%po*-&_%bd7%7 zX_DP!GU*NRhm>^WQ_+^oF1^ovnGF3a_89=;-MJgU#n3W_bVr diff --git a/core/__pycache__/urls.cpython-311.pyc b/core/__pycache__/urls.cpython-311.pyc index 4e4f113d309e34129cca5a1bb3778fb33b4fe2e4..ec30d5eee2ac329dd54efe2c3e25d23ff60e6961 100644 GIT binary patch delta 131 zcmcc3^oxmiIWI340}yOt)X40f$SW!60pv_)NMT4}%wfo7jACS*s2#+{nwgi9S~0Py zjWK32C!>sq%moG%^ngL)0xJ5z#=y$i;L;(~5jsQs0*m5B7R4(piXWJn_?a5GL9mDi GXb}LvyCQx7 delta 109 zcmeyxbeoBHIWI340}w2<{FRY4kynz{2FRH{(L9KiB_lsKbz*-Tmlsgv12ZEd$JU{~gXp$WQ diff --git a/core/__pycache__/views.cpython-311.pyc b/core/__pycache__/views.cpython-311.pyc index 9d0ddd892cc8db8e347c5f97af5002a4ce469452..d550545ba1ff3300dcefa88e5bc09270122c4d5d 100644 GIT binary patch literal 2233 zcmbVNO>7fK6rT0JV<#~Qlwgd#8&naSU>Ai@r9p%=q*2-uq6Avih+2+!us7_FJ3CH@ zWh5VZsl)-P2QC~qrJ%I;9((N3wpOw=k|m^`xJ5#}&`aN}V<(0pwH=S&ym|BH&3p6S zH~Z7z-~fX5i!h;#xe@w3 zyHu~{OZ$L#%O2IQ4WtLOKsulW(?O7Vzenjo*#~q;7S>VR|B4^95*Jt?k7;HL6H_tt z7XtGTtjn0df{!X1E*m<2C4g$&$$~esxq_nO8-|Qk=3g&n30}<-%0|~Q*-~XyK4w-`no*N!7g~U*Ucw- zkDgcdy`?wOwE_0*I^pwYN4z;)@Arm|9j^Sxo#Fqz^Pj!t`F^L#{+|bSA*Xx2y#AZ0 zf*}v<&`DCpoe=}GUGuOED~cA=V(UHYM2GGd4O4FgFu*U zNZ!oqd7}$rHdNf91w-%6gSip`RMQLxH1ih>^T5hz*QSi&o|GcTEj!i~DjFP;DG`;8 zC`W*u$r&XGK4fzEXMoTH$>>3J8I8XW2f`CY$O$AObJT=c9?lsArxbW;9wZA?Q{y4# z{g}*cJ4iUP z2?r$e+*4F|mnm1=am=CDQ^+ev@Tu2oR4W~*Q5OFDm zAK+Yx8YH!CkoypR4yWuewiLW=CW|}FZ3jfo!+wre^DiJ(^fGj^84?>Iu^t+?LgUTQ zL?bj&4<)QnqPpA?_C4s-bnE1JJ3LVj zC#-Oy9UiyB7wh3ED?HU2JKZ`FZO5diJGJ=L+BLl%Gpv}=J`;QTP3_z#zkXUjGiRNd zYez3y(V1p+t`VK9N9V2Rd^_@?6}ixiOf@1?^~kgpnQq6e5|Hy1RdBaM-htfK-?MjG*Ai<8!sslh5<@ zz-23Nx#qh38p20?-vJj0?C-!hFi#Ney#xrp39``t5(1_jE9Ei!ilNGsoQg9trddU` zONMY`ae++0U~)quNdRR-{kkLxLVlb%3Z#nKfn(gy>7JKl66En=aux={6`7}lC&E4L z)Ly6~Nty+NP^baZt|b;c5SOT;nw@&}*u`(!h1V%TUYvGG*q*#V_Ib9eZ+urkyQJc) z^lWPXd!>q{)&9LiAJzJM3!Sa?_ZE_B{rx4nR(pTn_Z$~K-bX!> L6!`RAVmtjeWq>A^ literal 2006 zcmZ`(&2Q936d!wcz21)`&W4tJ#pJA?V%j1zOmzNm$ozG=l3ywe)Hb% z%?~3Zc?9FF3q=E*-j z479_0?hBiOCEO9YnTVF?30CU9NR#Y?c5QI}H@-|+=}nR529D9No${FX$C{LI!+SPU zn*tq6Z168R;jN4%Z9`N$QDj{EKl`$8?z7~_^8GZONIW6PmX+H~57!dvm~Wk5i3+CD z8de>{vutlw4NQWur#5X`Qyt%cda7VivA1p$-*d4?RSQ$Yc7kfFXw>UXyVcsLn!d~A zi}m`KTw=LPjzJxN)iztXW!zM6(hUc*i5n}|uPiRy(m!3ixv;WyYhaWLKH(DYNaRP^~=u`nh_pqn_KE-I{$i*}l4) zFFl<5w)(KzUg%5c(6R8~%x?K)w>;Y`&+e9Idgb@K67l%h2GSK{&;Ro>LVcS%fBB$#nRS^&g9(F)o$TzuW+`L zKD!4&B9oJ3Bu%z=8Z?g(M*ND?L<|=k&S;V)Xju_q*%G&*sdomAt>vr~Ksn8n_|o|M zZfPBi(nO=1MF&vBofDNaO4Twisve%7U0!$``60Z5tPnIz69)lD(vOH71syrU$$JUmQZNUkJ(yv!)Lsn7 zC)-!T1E<>8!qVZbx$oAyrQ^NQ@%B|dV@KRaWr9p`SMLI0QrruKXD>#g5^JhK%_kIq z6GQ-Hg?%+d6~uuS6Agt{j`4hGUll_OS`HK2tYXWiB+LJn;b#xxJV;Saf+#^zM&kpX z;(-hf{aXGw&g*&d0T_7If)GeQB?v-@a_wk_XsjL0m*{k7c!lU_XLyC^SZ8>J=tyUH tg~Lw3=Swuz8D1eOcZOGAQiKotXh4pqg%f@BZ?e_;m1}?d4^wpVzX0UP4AcMs diff --git a/core/admin.py b/core/admin.py index 639ff3a..8bea5ae 100644 --- a/core/admin.py +++ b/core/admin.py @@ -1,8 +1,20 @@ from django.contrib import admin -from .models import Ticket +from .models import MachineModel, SparePart, ServiceIntervention -@admin.register(Ticket) -class TicketAdmin(admin.ModelAdmin): - list_display = ('subject', 'status', 'priority', 'requester_email', 'created_at') - list_filter = ('status', 'priority') - search_fields = ('subject', 'requester_email', 'description') +@admin.register(MachineModel) +class MachineModelAdmin(admin.ModelAdmin): + list_display = ('name', 'brand') + search_fields = ('name', 'brand') + +@admin.register(SparePart) +class SparePartAdmin(admin.ModelAdmin): + list_display = ('name', 'reference_code', 'stock_quantity') + search_fields = ('name', 'reference_code') + list_filter = ('stock_quantity',) + +@admin.register(ServiceIntervention) +class ServiceInterventionAdmin(admin.ModelAdmin): + list_display = ('machine', 'technician', 'status', 'created_at') + list_filter = ('status', 'technician', 'created_at') + search_fields = ('machine__name', 'description') + autocomplete_fields = ('machine', 'technician') \ No newline at end of file diff --git a/core/context_processors.py b/core/context_processors.py new file mode 100644 index 0000000..0bf87c3 --- /dev/null +++ b/core/context_processors.py @@ -0,0 +1,13 @@ +import os +import time + +def project_context(request): + """ + Adds project-specific environment variables to the template context globally. + """ + return { + "project_description": os.getenv("PROJECT_DESCRIPTION", ""), + "project_image_url": os.getenv("PROJECT_IMAGE_URL", ""), + # Used for cache-busting static assets + "deployment_timestamp": int(time.time()), + } diff --git a/core/forms.py b/core/forms.py index 7a6b83b..6c2d93f 100644 --- a/core/forms.py +++ b/core/forms.py @@ -1,7 +1 @@ -from django import forms -from .models import Ticket - -class TicketForm(forms.ModelForm): - class Meta: - model = Ticket - fields = ['subject', 'requester_email', 'priority', 'description'] +# No forms yet. \ No newline at end of file diff --git a/core/migrations/0002_machinemodel_serviceintervention_sparepart_and_more.py b/core/migrations/0002_machinemodel_serviceintervention_sparepart_and_more.py new file mode 100644 index 0000000..9ff6f6f --- /dev/null +++ b/core/migrations/0002_machinemodel_serviceintervention_sparepart_and_more.py @@ -0,0 +1,51 @@ +# Generated by Django 5.2.7 on 2025-11-27 07:45 + +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('core', '0001_initial'), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.CreateModel( + name='MachineModel', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(max_length=200)), + ('brand', models.CharField(max_length=100)), + ('manual_pdf', models.FileField(blank=True, null=True, upload_to='manuals/')), + ], + ), + migrations.CreateModel( + name='ServiceIntervention', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('status', models.CharField(choices=[('PENDING', 'Pending'), ('IN_PROGRESS', 'In Progress'), ('COMPLETED', 'Completed'), ('CANCELLED', 'Cancelled')], default='PENDING', max_length=20)), + ('description', models.TextField()), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('resolved_at', models.DateTimeField(blank=True, null=True)), + ('machine', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='interventions', to='core.machinemodel')), + ('technician', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='interventions', to=settings.AUTH_USER_MODEL)), + ], + ), + migrations.CreateModel( + name='SparePart', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('reference_code', models.CharField(max_length=100, unique=True)), + ('name', models.CharField(max_length=200)), + ('description', models.TextField(blank=True)), + ('stock_quantity', models.PositiveIntegerField(default=0)), + ('low_stock_threshold', models.PositiveIntegerField(default=10)), + ], + ), + migrations.DeleteModel( + name='Ticket', + ), + ] diff --git a/core/migrations/0003_alter_machinemodel_options_and_more.py b/core/migrations/0003_alter_machinemodel_options_and_more.py new file mode 100644 index 0000000..d5e4482 --- /dev/null +++ b/core/migrations/0003_alter_machinemodel_options_and_more.py @@ -0,0 +1,98 @@ +# Generated by Django 5.2.7 on 2025-11-27 07:49 + +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('core', '0002_machinemodel_serviceintervention_sparepart_and_more'), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.AlterModelOptions( + name='machinemodel', + options={'verbose_name': 'Modelo de Máquina', 'verbose_name_plural': 'Modelos de Máquinas'}, + ), + migrations.AlterModelOptions( + name='serviceintervention', + options={'verbose_name': 'Intervención de Servicio', 'verbose_name_plural': 'Intervenciones de Servicio'}, + ), + migrations.AlterModelOptions( + name='sparepart', + options={'verbose_name': 'Repuesto', 'verbose_name_plural': 'Repuestos'}, + ), + migrations.AlterField( + model_name='machinemodel', + name='brand', + field=models.CharField(max_length=100, verbose_name='Marca'), + ), + migrations.AlterField( + model_name='machinemodel', + name='manual_pdf', + field=models.FileField(blank=True, null=True, upload_to='manuals/', verbose_name='Manual (PDF)'), + ), + migrations.AlterField( + model_name='machinemodel', + name='name', + field=models.CharField(max_length=200, verbose_name='Nombre'), + ), + migrations.AlterField( + model_name='serviceintervention', + name='created_at', + field=models.DateTimeField(auto_now_add=True, verbose_name='Fecha de Creación'), + ), + migrations.AlterField( + model_name='serviceintervention', + name='description', + field=models.TextField(verbose_name='Descripción'), + ), + migrations.AlterField( + model_name='serviceintervention', + name='machine', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='interventions', to='core.machinemodel', verbose_name='Máquina'), + ), + migrations.AlterField( + model_name='serviceintervention', + name='resolved_at', + field=models.DateTimeField(blank=True, null=True, verbose_name='Fecha de Resolución'), + ), + migrations.AlterField( + model_name='serviceintervention', + name='status', + field=models.CharField(choices=[('PENDING', 'Pendiente'), ('IN_PROGRESS', 'En Progreso'), ('COMPLETED', 'Completada'), ('CANCELLED', 'Cancelada')], default='PENDING', max_length=20, verbose_name='Estado'), + ), + migrations.AlterField( + model_name='serviceintervention', + name='technician', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='interventions', to=settings.AUTH_USER_MODEL, verbose_name='Técnico'), + ), + migrations.AlterField( + model_name='sparepart', + name='description', + field=models.TextField(blank=True, verbose_name='Descripción'), + ), + migrations.AlterField( + model_name='sparepart', + name='low_stock_threshold', + field=models.PositiveIntegerField(default=10, verbose_name='Umbral de Stock Bajo'), + ), + migrations.AlterField( + model_name='sparepart', + name='name', + field=models.CharField(max_length=200, verbose_name='Nombre'), + ), + migrations.AlterField( + model_name='sparepart', + name='reference_code', + field=models.CharField(max_length=100, unique=True, verbose_name='Código de Referencia'), + ), + migrations.AlterField( + model_name='sparepart', + name='stock_quantity', + field=models.PositiveIntegerField(default=0, verbose_name='Cantidad en Stock'), + ), + ] diff --git a/core/migrations/__pycache__/0001_initial.cpython-311.pyc b/core/migrations/__pycache__/0001_initial.cpython-311.pyc index 64d8a5533a01aeb9823459fa0e2a5787563687ab..7dceafe9f7b1123561b06a7650b6c31a0f31fd2b 100644 GIT binary patch delta 21 bcmeyv^M{9LIWI340}zBWYGiKYDPRKtKmG*- delta 21 bcmeyv^M{9LIWI340}$N&_cLQ7PXQYMPud4l diff --git a/core/migrations/__pycache__/0002_machinemodel_serviceintervention_sparepart_and_more.cpython-311.pyc b/core/migrations/__pycache__/0002_machinemodel_serviceintervention_sparepart_and_more.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b0f9c78603f1069194518a9a7bbf4be358f602e2 GIT binary patch literal 3227 zcmb7GO>7g}6&{c6@o$0+0mmU=Q>VcWB~A%_0!mTy5-0ePA7clJ`cX|MzBl$b;~8^j z90+P97F~4tb}!4PF49+FRatbwq6?NR$I>jUSyk#PJ0qel-m2$b|Ktz#@y4FxGv}Ur z?stC2e;XR|b8!9Bk(B@FJu|R$YBnI9=%Tdk#1q?(S9Qiyo%y z?Mo1koFMU4StO_wx!~hQLesMc7T*l4Zt}>>Gy{XZzNY`Aeb5P<(g}V-XNc(p2X^R* z!_9LiBeltwUCq!Q4;bL?Ua-$uJ;EOT3E0n{#8u|dg}*w`2)fw3boyKrVY);8Q}=** z(|_Q=$5FOE+!rBNGkPWtW2bdM>+(r&z|uI=I@h;^L)CDfuR=w3uWCQ5{7g0`*=ZLCo?dSz&*-F1=}eu~DGQ&Bz@>rR8_i#wv|Dx+Bj_e{{Iu;`>327@ zx~rrSYzJp%W_~Hks%*%TVh2~GQbktr3hjKGm(lv7&1V-A@Q&@%F@eqUE3meW$%dw5QI%@gc2Rop9-zUL@zB(hQ7qY8Mz+e@@m;+fLyDH}LYfJe|q3|k@RpU9Ll?m4A1oX|X zt>l(7>zM^$wOU=l;4qWVt?u_=K%(0ogg2#zVi4LZ zb^u|$MC3X{=I=#NlnjVTH>j%iLX;2$6%c63Hb!)jClT08MyUl)t(z5lNIvGJ{who} zINe)h!x~WdG*yJ;0ctx)utL+-%?591w%@>|iYk|6Nlm$rP9QhwS<{rH>4CmNT_QLq z5yKuPcoP#0{)#0i3c$sj5*n)fyn#~=X2c%W4Xv~#K5s~n0Aq*HaS3aa)Yt_D{Ohh8 z6>zblDd>RCdRthROIsKM!BV=K8nwM5!zzllUlhTkhJt|)iqx_~t4S0IeI3{%`U|OE zmo{J>;W{K15bhBAjy}WX4wu7G-OClV4l@5t8hX`iyiz@F3&-A zFVc#Fq*o-;V;5xw_tCs^~v(GJ(AOO=(ufGwq@KqFR;M!)@?q&R>!?J@!z?RfRp=SD_zlQ zINjK2s751=U*Zy=Cg~R%+0yG$38!g2q>tXTR zM%S*k!?AydFPf35Z?>(-)LYm439~4gDlsE^E23Kw-3%G+aKwzJb}w1c)PBK?-fczi zTG6{^=w3TKYK~3qPFiDA`z3SiUTf^0HFnPo{kk2#V#dSkKT=#kH0o!&swo( zR_vJ>`hCae9vSYqy*~d34#*)To%2VCCO-pF)VzFS_Y3RtjSk2C3iGF^#=oSZjC08e zKjROb9~eDk6eQuSvmL%_#%I10toTeTe#?sAGDEWr_AG@x3!r0*t=OU!TQoyUO!+od zzWw(0J7mTmwc?Mg_#-p4+zyYK<2QE~t?`?0g@5_1I}fcpkF7huGb=CPpg%cs6v&mw zJRkrH2!K)qd7hm%LrD?6yZDPyfmT#8c`sJlypXqB0ByV5xr(6X21KwO3c31-`_S9xmF@)C30ry@&8Hx zAcXsd8Nc6(-?!rT&CqY!VKDZ98hh~WxtYke5?L#eHA7#u!;@xm{(Hen&cFZCOs=(( zYgTg24COjQ9_oOfIsoL5lFp?g2cjJ(PuGAPQqq}sOq>^9e?>oJwciLSuO00+_%zy> zX7g5`?sqxKfEvttHUOqen!4GYM!vxgHn`Zt>GswNwabjJ0r!LVes|tIAO(1&|Ce$bm8z5s|?lu0Ie=qivR!s literal 0 HcmV?d00001 diff --git a/core/migrations/__pycache__/0003_alter_machinemodel_options_and_more.cpython-311.pyc b/core/migrations/__pycache__/0003_alter_machinemodel_options_and_more.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d7711f545f1e0d8f5df47eeed2a610247cf1e7fd GIT binary patch literal 4219 zcmbtXO>o=B6$U`@5BjkQGX6{K$hIVltxdXdf08zhqe_&lNTN6*m8lq8D2QEAkU@aP zk8Eq|)`uQ?%&nJR+JmZ$dC*KQK6=JI=thry$|-Lb1Wh>(6-_{3vAb{I zd*A!nhvk2Uhj|7riJpzMQo8tM-Ip^pJu(7i7iXfs%Dz=_?l8R zBumjX(~Hz}gjG}S@X^{*94iZW^Z~Wtoi3&u=$DOO9aF|n&uAX~`fyax2>9|hjSr6% zTONJ%hBd!MzdOpBYNC7!eGGN-_g;J{sEJS@2V~vA-iOz(U;jX?N%E$mVOl0d6C2x# zj1|p-4%SEs70tS2V0bK1(vVmqoI|}#HrWJAq{`}}2#+p3|JOZR(IlJXfHmj>NiZ8TN(*vMJm9)EHtF5YD+^(y- zghWfH(y6?IZ-~%HUawWa@w{wc$-+pKEb8`IEN@E0=4qIx4j|r@Y)cn4{k|w6L?wBH z2%EB@P*>Wd%+L5^pM~`oMLSXThI`|0?j%UIWDbR#eD{JtgB5W|6Yv7IATm zT*nk|(7>jyZu`hdW^|FS5Q6Ok2^r^rWV}ln;$JUfT41S}9sskVg0nt@-qc}3m|hg&4art5<5U+MZ-MtUMOGlB#37~U zU&%1k(~%xC#>nz<(o6)5Y0jrp3zI6u335-qD?(CXb5mE*0r|k}q*fv1l1ek0&`19u zZGlzBjX;h!V&DyIV8AcRkcBp>bkonjMoN|L#FY+JkCzhQAxWiJAghsJ8S|n=o>C;|JqzNvH9xp-#K(>mZKO~C4tdc;BqCpM`Udp^L z)$39P=m^&#OaShoK~8PniK*4nEpfG&TM?I*XL5_)PrjCl!%#!i*BdFb<1m}{?H)DUfPhv zGLzuk$K78$!o-)@R_w~(c{g@tFXm2u+Kf%Pv8i@Al!`oFY!5Nbg`2K$ zvp>hVmT;M0l-iR6|16Ze9)uV~v}ECBJ*@0Feo zI17>^RGLD?6)OGN&Ib5F-&Lx$w^cZU0QoC76@phG^hfeau7wD!wGe@|7V0k_oZ7K= zY`^sE^Z%~7xm7o}?&hi>>Ar970$1cf)&eIJWd`?WYWyQ={3Ak1KH~alxztK@qp+8H znsle5Ts$}Sq#LNU$&X|K` za0H+QFeB^QhF_86T@G5jX^r&@PoBu7!VA#*HukY!^FJ|4FeLwi&A&j?4zVl?#fZJ= pFr)t63OYSAlMeHiyB#UBY@`i;z2DY;$-5`VUQptzA6P>1eFw6EGF<=w literal 0 HcmV?d00001 diff --git a/core/migrations/__pycache__/__init__.cpython-311.pyc b/core/migrations/__pycache__/__init__.cpython-311.pyc index 58b1c14eb06fea9cfb9a0d59788218572a75b51c..dda82a9276cb4aae9d6be2c1f514296e311706d7 100644 GIT binary patch delta 20 acmZ3%xPp;qIWI340}zBWYGh92nF9bU!URkJ delta 20 acmZ3%xPp;qIWI340}vQ{e#)50GY0@MWCb|@ diff --git a/core/models.py b/core/models.py index 78b60d1..9b991e6 100644 --- a/core/models.py +++ b/core/models.py @@ -1,25 +1,50 @@ from django.db import models +from django.contrib.auth.models import User -class Ticket(models.Model): - STATUS_CHOICES = [ - ('open', 'Open'), - ('in_progress', 'In Progress'), - ('closed', 'Closed'), - ] +class MachineModel(models.Model): + name = models.CharField("Nombre", max_length=200) + brand = models.CharField("Marca", max_length=100) + manual_pdf = models.FileField("Manual (PDF)", upload_to='manuals/', blank=True, null=True) - PRIORITY_CHOICES = [ - ('low', 'Low'), - ('medium', 'Medium'), - ('high', 'High'), - ] - - subject = models.CharField(max_length=255) - status = models.CharField(max_length=20, choices=STATUS_CHOICES, default='open') - priority = models.CharField(max_length=20, choices=PRIORITY_CHOICES, default='medium') - requester_email = models.EmailField() - description = models.TextField() - created_at = models.DateTimeField(auto_now_add=True) - updated_at = models.DateTimeField(auto_now=True) + class Meta: + verbose_name = "Modelo de Máquina" + verbose_name_plural = "Modelos de Máquinas" def __str__(self): - return self.subject \ No newline at end of file + return f"{self.brand} {self.name}" + +class SparePart(models.Model): + reference_code = models.CharField("Código de Referencia", max_length=100, unique=True) + name = models.CharField("Nombre", max_length=200) + description = models.TextField("Descripción", blank=True) + stock_quantity = models.PositiveIntegerField("Cantidad en Stock", default=0) + low_stock_threshold = models.PositiveIntegerField("Umbral de Stock Bajo", default=10) + + class Meta: + verbose_name = "Repuesto" + verbose_name_plural = "Repuestos" + + def __str__(self): + return self.name + +class ServiceIntervention(models.Model): + STATUS_CHOICES = [ + ('PENDING', 'Pendiente'), + ('IN_PROGRESS', 'En Progreso'), + ('COMPLETED', 'Completada'), + ('CANCELLED', 'Cancelada'), + ] + + machine = models.ForeignKey(MachineModel, on_delete=models.CASCADE, related_name='interventions', verbose_name="Máquina") + technician = models.ForeignKey(User, on_delete=models.SET_NULL, null=True, blank=True, related_name='interventions', verbose_name="Técnico") + status = models.CharField("Estado", max_length=20, choices=STATUS_CHOICES, default='PENDING') + description = models.TextField("Descripción") + created_at = models.DateTimeField("Fecha de Creación", auto_now_add=True) + resolved_at = models.DateTimeField("Fecha de Resolución", null=True, blank=True) + + class Meta: + verbose_name = "Intervención de Servicio" + verbose_name_plural = "Intervenciones de Servicio" + + def __str__(self): + return f"Intervención para {self.machine} el {self.created_at.strftime('%d-%m-%Y')}" \ No newline at end of file diff --git a/core/templates/base.html b/core/templates/base.html new file mode 100644 index 0000000..dc6f022 --- /dev/null +++ b/core/templates/base.html @@ -0,0 +1,30 @@ + + + + + + {% block title %}Knowledge Base{% endblock %} + {% if project_description %} + + + + {% endif %} + {% if project_image_url %} + + + {% endif %} + + + + + {% load static %} + + {% block head %}{% endblock %} + + + + {% block content %}{% endblock %} + + + + diff --git a/core/templates/core/article_detail.html b/core/templates/core/article_detail.html new file mode 100644 index 0000000..8820990 --- /dev/null +++ b/core/templates/core/article_detail.html @@ -0,0 +1,14 @@ +{% extends 'base.html' %} + +{% block title %}{{ article.title }}{% endblock %} + +{% block content %} +
+

{{ article.title }}

+

Published on {{ article.created_at|date:"F d, Y" }}

+
+
+ {{ article.content|safe }} +
+
+{% endblock %} diff --git a/core/templates/core/index.html b/core/templates/core/index.html index f4e4991..3644ff5 100644 --- a/core/templates/core/index.html +++ b/core/templates/core/index.html @@ -1,157 +1,74 @@ - - +{% extends 'base.html' %} +{% load static %} - - - - {{ project_name }} - {% if project_description %} - - - - {% endif %} - {% if project_image_url %} - - - {% endif %} - - - - - - - -
-
-

Analyzing your requirements and generating your website…

-
- Loading… -
-

Appwizzy AI is collecting your requirements and applying the first changes.

-

This page will refresh automatically as the plan is implemented.

-

- Runtime: Django {{ django_version }} · Python {{ python_version }} — - UTC {{ current_time|date:"Y-m-d H:i:s" }} -

+
+
+ +
+
+
+
{{ intervention_count }}
+
Intervenciones Totales
-
- Page updated: {{ current_time|date:"Y-m-d H:i:s" }} (UTC) -
-
- + + - \ No newline at end of file +
+
+
+
{{ pending_interventions }}
+
Intervenciones Pendientes
+
+
+
+ +
+
+
+
{{ spare_part_count }}
+
Repuestos
+
+
+
+ +
+
+
+
{{ machine_model_count }}
+
Modelos de Máquinas
+
+
+
+ + + +{% endblock %} \ No newline at end of file diff --git a/core/urls.py b/core/urls.py index 6299e3d..332eff0 100644 --- a/core/urls.py +++ b/core/urls.py @@ -1,7 +1,9 @@ from django.urls import path -from .views import home +from django.urls import path + +from .views import index urlpatterns = [ - path("", home, name="home"), + path("", index, name="index"), ] diff --git a/core/views.py b/core/views.py index c1a6d45..f306c47 100644 --- a/core/views.py +++ b/core/views.py @@ -3,35 +3,33 @@ import platform from django import get_version as django_version from django.shortcuts import render -from django.urls import reverse_lazy from django.utils import timezone -from django.views.generic.edit import CreateView -from .forms import TicketForm -from .models import Ticket +from .models import MachineModel, SparePart, ServiceIntervention -def home(request): - """Render the landing screen with loader and environment details.""" +def index(request): + """Render the landing screen with dashboard stats.""" host_name = request.get_host().lower() agent_brand = "AppWizzy" if host_name == "appwizzy.com" else "Flatlogic" now = timezone.now() context = { - "project_name": "New Style", + "project_name": "Nespresso Repair Assistant", "agent_brand": agent_brand, "django_version": django_version(), "python_version": platform.python_version(), "current_time": now, "host_name": host_name, - "project_description": os.getenv("PROJECT_DESCRIPTION", ""), + "project_description": os.getenv("PROJECT_DESCRIPTION", "A guided tool for Nespresso machine repairs."), "project_image_url": os.getenv("PROJECT_IMAGE_URL", ""), + "intervention_count": ServiceIntervention.objects.count(), + "spare_part_count": SparePart.objects.count(), + "machine_model_count": MachineModel.objects.count(), + "pending_interventions": ServiceIntervention.objects.filter(status='PENDING').count(), } return render(request, "core/index.html", context) -class TicketCreateView(CreateView): - model = Ticket - form_class = TicketForm - template_name = "core/ticket_create.html" - success_url = reverse_lazy("home") +def article_detail(request): + return render(request, "core/article_detail.html") \ No newline at end of file diff --git a/static/css/custom.css b/static/css/custom.css new file mode 100644 index 0000000..721fde9 --- /dev/null +++ b/static/css/custom.css @@ -0,0 +1,70 @@ +/* +Custom Styles for Nespresso Repair Assistant +*/ + +:root { + --primary-color: #1A237E; + --secondary-color: #FFAB00; + --accent-color: #82B1FF; + --background-color: #F5F5F5; + --text-color: #212121; + --font-family-headings: 'Poppins', sans-serif; + --font-family-body: 'Roboto', sans-serif; +} + +body { + background-color: var(--background-color); + font-family: var(--font-family-body); + color: var(--text-color); +} + +h1, h2, h3, h4, h5, h6 { + font-family: var(--font-family-headings); + font-weight: 600; +} + +.hero-section { + background: linear-gradient(135deg, var(--primary-color), var(--accent-color)); + color: white; + padding: 4rem 2rem; + border-radius: 0 0 2rem 2rem; +} + +.hero-section h1 { + font-weight: 700; +} + +.stat-card { + background-color: white; + border: none; + border-radius: 1rem; + box-shadow: 0 0.5rem 1rem rgba(0, 0, 0, 0.05); + transition: transform 0.2s ease-in-out, box-shadow 0.2s ease-in-out; +} + +.stat-card:hover { + transform: translateY(-5px); + box-shadow: 0 0.75rem 1.5rem rgba(0, 0, 0, 0.1); +} + +.stat-card .card-body { + padding: 2rem; +} + +.stat-card .stat-number { + font-size: 2.5rem; + font-weight: 700; + color: var(--primary-color); +} + +.stat-card .card-title { + font-size: 1.1rem; + font-weight: 500; + color: #6c757d; +} + +.navbar-brand { + font-family: var(--font-family-headings); + font-weight: 700; + color: var(--primary-color) !important; +} \ No newline at end of file diff --git a/staticfiles/css/custom.css b/staticfiles/css/custom.css index 108056f..721fde9 100644 --- a/staticfiles/css/custom.css +++ b/staticfiles/css/custom.css @@ -1,21 +1,70 @@ +/* +Custom Styles for Nespresso Repair Assistant +*/ :root { - --bg-color-start: #6a11cb; - --bg-color-end: #2575fc; - --text-color: #ffffff; - --card-bg-color: rgba(255, 255, 255, 0.01); - --card-border-color: rgba(255, 255, 255, 0.1); + --primary-color: #1A237E; + --secondary-color: #FFAB00; + --accent-color: #82B1FF; + --background-color: #F5F5F5; + --text-color: #212121; + --font-family-headings: 'Poppins', sans-serif; + --font-family-body: 'Roboto', sans-serif; } + body { - margin: 0; - font-family: 'Inter', sans-serif; - background: linear-gradient(45deg, var(--bg-color-start), var(--bg-color-end)); + background-color: var(--background-color); + font-family: var(--font-family-body); color: var(--text-color); - display: flex; - justify-content: center; - align-items: center; - min-height: 100vh; - text-align: center; - overflow: hidden; - position: relative; } + +h1, h2, h3, h4, h5, h6 { + font-family: var(--font-family-headings); + font-weight: 600; +} + +.hero-section { + background: linear-gradient(135deg, var(--primary-color), var(--accent-color)); + color: white; + padding: 4rem 2rem; + border-radius: 0 0 2rem 2rem; +} + +.hero-section h1 { + font-weight: 700; +} + +.stat-card { + background-color: white; + border: none; + border-radius: 1rem; + box-shadow: 0 0.5rem 1rem rgba(0, 0, 0, 0.05); + transition: transform 0.2s ease-in-out, box-shadow 0.2s ease-in-out; +} + +.stat-card:hover { + transform: translateY(-5px); + box-shadow: 0 0.75rem 1.5rem rgba(0, 0, 0, 0.1); +} + +.stat-card .card-body { + padding: 2rem; +} + +.stat-card .stat-number { + font-size: 2.5rem; + font-weight: 700; + color: var(--primary-color); +} + +.stat-card .card-title { + font-size: 1.1rem; + font-weight: 500; + color: #6c757d; +} + +.navbar-brand { + font-family: var(--font-family-headings); + font-weight: 700; + color: var(--primary-color) !important; +} \ No newline at end of file