+
diff --git a/assets/pasted-20260215-164921-daccb69a.png b/assets/pasted-20260215-164921-daccb69a.png
new file mode 100644
index 0000000..d2f6997
Binary files /dev/null and b/assets/pasted-20260215-164921-daccb69a.png differ
diff --git a/includes/ai_filtering.php b/includes/ai_filtering.php
index 806c0e6..7371fbd 100644
--- a/includes/ai_filtering.php
+++ b/includes/ai_filtering.php
@@ -4,9 +4,15 @@ require_once __DIR__ . '/../ai/LocalAIApi.php';
function moderateContent($content) {
if (empty(trim($content))) return ['is_safe' => true];
+ // Bypass moderation for video platforms as they are handled by their own safety measures
+ // and often trigger false positives in AI moderation due to "lack of context".
+ if (preg_match('/(?:https?:\/\/)?(?:www\.)?(?:youtube\.com|youtu\.be|dailymotion\.com|dai\.ly|vimeo\.com)\//i', $content)) {
+ return ['is_safe' => true];
+ }
+
$resp = LocalAIApi::createResponse([
'input' => [
- ['role' => 'system', 'content' => 'You are a content moderator. Analyze the message and return a JSON object with "is_safe" (boolean) and "reason" (string, optional). Safe means no hate speech, extreme violence, or explicit sexual content.'],
+ ['role' => 'system', 'content' => 'You are a content moderator. Analyze the message and return a JSON object with "is_safe" (boolean) and "reason" (string, optional). Safe means no hate speech, extreme violence, or explicit sexual content. Do not flag URLs as unsafe simply because you cannot see the content behind them.'],
['role' => 'user', 'content' => $content],
],
]);
diff --git a/includes/opengraph.php b/includes/opengraph.php
index a241cef..bf2e522 100644
--- a/includes/opengraph.php
+++ b/includes/opengraph.php
@@ -11,6 +11,7 @@ function fetchOpenGraphData($url) {
curl_close($ch);
if (!$html || $info['http_code'] !== 200) return null;
+ if (!class_exists('DOMDocument')) return null;
$doc = new DOMDocument();
@$doc->loadHTML($html);