diff --git a/assets/js/main.js b/assets/js/main.js index 1bec195..e394358 100644 --- a/assets/js/main.js +++ b/assets/js/main.js @@ -1486,11 +1486,21 @@ document.addEventListener('DOMContentLoaded', () => { } if (msg.is_pinned) div.classList.add('pinned'); - const ytRegex = /(?:https?:\/\/)?(?:www\.)?(?:youtube\.com\/watch\?v=|youtu\.be\/)([a-zA-Z0-9_-]{11})/; + const ytRegex = /(?:https?:\/\/)?(?:www\.)?(?:youtube\.com\/(?:watch\?v=|shorts\/)|youtu\.be\/)([a-zA-Z0-9_-]{11})/; + const dmRegex = /(?:https?:\/\/)?(?:www\.)?(?:dailymotion\.com\/video\/|dai\.ly\/)([a-zA-Z0-9]+)/; + const vimeoRegex = /(?:https?:\/\/)?(?:www\.)?vimeo\.com\/(\d+)/; + const ytMatch = msg.content.match(ytRegex); - let ytHtml = ''; + const dmMatch = msg.content.match(dmRegex); + const vimeoMatch = msg.content.match(vimeoRegex); + + let videoHtml = ''; if (ytMatch && ytMatch[1]) { - ytHtml = `
`; + videoHtml = `
`; + } else if (dmMatch && dmMatch[1]) { + videoHtml = `
`; + } else if (vimeoMatch && vimeoMatch[1]) { + videoHtml = `
`; } const authorStyle = msg.role_color ? `color: ${msg.role_color};` : ''; @@ -1506,8 +1516,8 @@ document.addEventListener('DOMContentLoaded', () => {
${escapeHTML(msg.content).replace(/\n/g, '
').replace(mentionRegex, `@${window.currentUsername}`)} ${attachmentHtml} - ${ytHtml} - ${ytHtml ? '' : embedHtml} + ${videoHtml} + ${videoHtml ? '' : embedHtml}
+ diff --git a/assets/pasted-20260215-164921-daccb69a.png b/assets/pasted-20260215-164921-daccb69a.png new file mode 100644 index 0000000..d2f6997 Binary files /dev/null and b/assets/pasted-20260215-164921-daccb69a.png differ diff --git a/includes/ai_filtering.php b/includes/ai_filtering.php index 806c0e6..7371fbd 100644 --- a/includes/ai_filtering.php +++ b/includes/ai_filtering.php @@ -4,9 +4,15 @@ require_once __DIR__ . '/../ai/LocalAIApi.php'; function moderateContent($content) { if (empty(trim($content))) return ['is_safe' => true]; + // Bypass moderation for video platforms as they are handled by their own safety measures + // and often trigger false positives in AI moderation due to "lack of context". + if (preg_match('/(?:https?:\/\/)?(?:www\.)?(?:youtube\.com|youtu\.be|dailymotion\.com|dai\.ly|vimeo\.com)\//i', $content)) { + return ['is_safe' => true]; + } + $resp = LocalAIApi::createResponse([ 'input' => [ - ['role' => 'system', 'content' => 'You are a content moderator. Analyze the message and return a JSON object with "is_safe" (boolean) and "reason" (string, optional). Safe means no hate speech, extreme violence, or explicit sexual content.'], + ['role' => 'system', 'content' => 'You are a content moderator. Analyze the message and return a JSON object with "is_safe" (boolean) and "reason" (string, optional). Safe means no hate speech, extreme violence, or explicit sexual content. Do not flag URLs as unsafe simply because you cannot see the content behind them.'], ['role' => 'user', 'content' => $content], ], ]); diff --git a/includes/opengraph.php b/includes/opengraph.php index a241cef..bf2e522 100644 --- a/includes/opengraph.php +++ b/includes/opengraph.php @@ -11,6 +11,7 @@ function fetchOpenGraphData($url) { curl_close($ch); if (!$html || $info['http_code'] !== 200) return null; + if (!class_exists('DOMDocument')) return null; $doc = new DOMDocument(); @$doc->loadHTML($html);