38292-vm/core/ai_service.py
2026-02-08 23:02:26 +00:00

174 lines
6.4 KiB
Python

import httpx
import logging
import random
import hashlib
from django.utils import timezone
from datetime import timedelta
from .models import AIConfiguration, AIChatHistory, SyncHistoryLog
from .utils import decrypt_value
logger = logging.getLogger(__name__)
def sync_all_configurations():
configs = AIConfiguration.objects.filter(is_active=True)
for config in configs:
sync_ai_history(config)
def sync_ai_history(config: AIConfiguration):
api_key = decrypt_value(config.api_key)
provider = config.provider
company = config.company
log = SyncHistoryLog.objects.create(
company=company,
configuration=config,
status='success',
records_synced=0
)
try:
if provider == 'openai':
records = fetch_openai_history(api_key)
elif provider == 'perplexity':
records = fetch_perplexity_history(api_key)
elif provider == 'merlin':
records = fetch_merlin_history(api_key)
elif provider == 'poe':
records = fetch_poe_history(api_key)
elif provider == 'openrouter':
records = fetch_openrouter_history(api_key)
else:
records = []
synced_count = 0
for record in records:
# Simple deduplication based on ai_chat_id and company
obj, created = AIChatHistory.objects.update_or_create(
company=company,
ai_chat_id=record['id'],
defaults={
'ai_configuration': config,
'ai_chat_engine': provider.upper() if provider in ['poe', 'openrouter'] else provider.capitalize(),
'chat_title': record['title'],
'chat_content': record['content'],
'chat_last_date': record['last_date'],
}
)
if created:
synced_count += 1
log.records_synced = synced_count
log.save()
config.last_sync = timezone.now()
config.save()
except Exception as e:
logger.exception(f"Error syncing {provider} for {company.name}")
log.status = 'error'
log.error_message = str(e)
log.save()
def _generate_stable_id(title, provider):
"""Generates a stable ID based on the title and provider for mock data."""
hash_object = hashlib.md5(f"{provider}:{title}".encode())
return f"{provider[:2]}_{hash_object.hexdigest()[:8]}"
def fetch_openai_history(api_key):
if api_key == "ERROR_DECRYPTING":
raise Exception("API Key decryption failed")
data = [
{
'title': 'Optimizing Telecom Network Architecture',
'content': 'Discussion about 5G deployment and latency optimization in rural areas. Focus on edge computing and MIMO technologies.',
'last_date': timezone.now() - timedelta(hours=5)
},
{
'title': 'Customer Churn Analysis Script',
'content': 'Python script using pandas to analyze monthly billing cycles and identify high-risk accounts based on data usage patterns.',
'last_date': timezone.now() - timedelta(days=2)
},
{
'title': 'VoIP Quality Troubleshooting Guide',
'content': 'Steps to identify packet loss and jitter in enterprise VoIP setups. Recommended buffer settings and QoS tagging.',
'last_date': timezone.now() - timedelta(days=3)
}
]
for item in data:
item['id'] = _generate_stable_id(item['title'], 'openai')
return data
def fetch_perplexity_history(api_key):
data = [
{
'title': 'Latest Trends in Satellite Communication 2026',
'content': 'Summary of Starlink and Kuiper project progress as of February 2026. Includes regulatory changes in EMEA region.',
'last_date': timezone.now() - timedelta(minutes=15)
},
{
'title': 'Regulatory Impact on 6G Spectrum',
'content': 'Analysis of FCC and ITU recent publications regarding terahertz frequency allocations for early 6G trials.',
'last_date': timezone.now() - timedelta(days=1)
}
]
for item in data:
item['id'] = _generate_stable_id(item['title'], 'perplexity')
return data
def fetch_merlin_history(api_key):
data = [
{
'title': 'Merlin AI - Project Planning SEO',
'content': 'SEO strategy for a new telecommunications landing page targeting enterprise clients. Keywords: SD-WAN, Managed Security.',
'last_date': timezone.now() - timedelta(hours=1)
}
]
for item in data:
item['id'] = _generate_stable_id(item['title'], 'merlin')
return data
def fetch_poe_history(api_key):
if api_key == "YOUR_MOCK_KEY_OR_EMPTY":
# For now, it's returning mock data:
data = [
{
'title': 'POE - Python Data Visualization',
'content': 'Generating heatmaps for network traffic distribution across various regional nodes using Seaborn and Matplotlib.',
'last_date': timezone.now() - timedelta(hours=2)
},
{
'title': 'Automated Network Incident Response',
'content': 'Conceptual workflow for using LLMs to parse syslog errors and suggest immediate remediation steps for NOC engineers.',
'last_date': timezone.now() - timedelta(hours=8)
}
]
return data # or return [] Or handle as error
# Calling an API endpoint (Replace URL with the actual Poe API you use)
response = httpx.get(
"https://api.poe.com/v1/chats",
headers={"Authorization": f"Bearer {api_key}"}
)
response.raise_for_status()
data = response.json()
# Transform the real API response to match our internal format:
return [{
'id': item['chatId'],
'title': item['title'],
'content': item['snippet'],
'last_date': parse_date(item['updatedAt'])
} for item in data['chats']]
def fetch_openrouter_history(api_key):
data = [
{
'title': 'OpenRouter - Llama 3 Research',
'content': 'Comparing performance of open-source models for automated customer support in Telecom. Benchmarks on response latency.',
'last_date': timezone.now() - timedelta(hours=3)
}
]
for item in data:
item['id'] = _generate_stable_id(item['title'], 'openrouter')
return data