Compare commits

..

2 Commits

Author SHA1 Message Date
Flatlogic Bot
e0d6d4fcaf Autosave: 20260328-044712 2026-03-28 04:47:13 +00:00
Flatlogic Bot
de5aa451c1 Autosave: 20260328-015333 2026-03-28 01:53:33 +00:00
31 changed files with 13087 additions and 2069 deletions

34
.env.example Normal file
View File

@ -0,0 +1,34 @@
# ---------------------------------------------------------------------------
# External database / Supabase (recommended: Supabase transaction pooler URL)
# ---------------------------------------------------------------------------
DATABASE_URL=postgresql://postgres:<password>@<host>:6543/postgres?sslmode=require
DB_SSL=require
DB_SSL_REJECT_UNAUTHORIZED=false
DB_POOL_MAX=20
DB_CONNECTION_TIMEOUT_MS=10000
DB_IDLE_TIMEOUT_MS=30000
DB_QUERY_TIMEOUT_MS=15000
DB_STATEMENT_TIMEOUT_MS=15000
DB_APP_NAME=flatlogic-backend
# ---------------------------------------------------------------------------
# API server / security
# ---------------------------------------------------------------------------
PORT=8080
ADMIN_TOKEN=change-me-admin-token
API_INGEST_KEY=change-me-ingest-key
WEBHOOK_SECRET=change-me-global-webhook-secret
SHEIN_WEBHOOK_SECRET=change-me-shein-webhook-secret
# ---------------------------------------------------------------------------
# Optional Supabase SDK keys (if later needed by background jobs)
# ---------------------------------------------------------------------------
SUPABASE_URL=https://<project-ref>.supabase.co
SUPABASE_ANON_KEY=<anon-key>
SUPABASE_SERVICE_ROLE_KEY=<service-role-key>
# ---------------------------------------------------------------------------
# Frontend
# ---------------------------------------------------------------------------
VITE_API_BASE_URL=/api
API_SERVER_URL=http://127.0.0.1:8080

42
.github/workflows/ci.yml vendored Normal file
View File

@ -0,0 +1,42 @@
name: CI
on:
pull_request:
push:
branches:
- main
- master
- develop
concurrency:
group: ci-${{ github.ref }}
cancel-in-progress: true
jobs:
build-and-check:
runs-on: ubuntu-latest
timeout-minutes: 20
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 20
cache: pnpm
- name: Enable Corepack
run: |
corepack enable
corepack prepare pnpm@10.16.1 --activate
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Typecheck workspace
run: pnpm typecheck
- name: Build workspace
run: pnpm build

83
.github/workflows/deploy-flatlogic.yml vendored Normal file
View File

@ -0,0 +1,83 @@
name: Deploy to Flatlogic VM
on:
push:
branches:
- main
- master
workflow_dispatch:
repository_dispatch:
types:
- bolt_sync
- replit_sync
- flatlogic_deploy
concurrency:
group: flatlogic-deploy-${{ github.ref_name }}
cancel-in-progress: true
jobs:
deploy:
runs-on: ubuntu-latest
timeout-minutes: 30
environment: production
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 20
cache: pnpm
- name: Enable Corepack
run: |
corepack enable
corepack prepare pnpm@10.16.1 --activate
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Typecheck workspace
run: pnpm typecheck
- name: Build workspace
run: pnpm build
- name: Configure SSH
env:
FLATLOGIC_SSH_KEY: ${{ secrets.FLATLOGIC_SSH_KEY }}
FLATLOGIC_HOST: ${{ secrets.FLATLOGIC_HOST }}
run: |
test -n "$FLATLOGIC_SSH_KEY"
test -n "$FLATLOGIC_HOST"
install -m 700 -d ~/.ssh
printf '%s' "$FLATLOGIC_SSH_KEY" > ~/.ssh/id_ed25519
chmod 600 ~/.ssh/id_ed25519
ssh-keyscan -H "$FLATLOGIC_HOST" >> ~/.ssh/known_hosts
- name: Deploy on Flatlogic VM
env:
FLATLOGIC_HOST: ${{ secrets.FLATLOGIC_HOST }}
FLATLOGIC_USER: ${{ secrets.FLATLOGIC_USER }}
PROJECT_DIR: ${{ secrets.FLATLOGIC_PROJECT_DIR }}
DEPLOY_BRANCH: ${{ secrets.FLATLOGIC_DEPLOY_BRANCH }}
DATABASE_URL: ${{ secrets.DATABASE_URL }}
DB_SSL: ${{ secrets.DB_SSL }}
DB_POOL_MAX: ${{ secrets.DB_POOL_MAX }}
DB_QUERY_TIMEOUT_MS: ${{ secrets.DB_QUERY_TIMEOUT_MS }}
DB_STATEMENT_TIMEOUT_MS: ${{ secrets.DB_STATEMENT_TIMEOUT_MS }}
ADMIN_TOKEN: ${{ secrets.ADMIN_TOKEN }}
API_INGEST_KEY: ${{ secrets.API_INGEST_KEY }}
WEBHOOK_SECRET: ${{ secrets.WEBHOOK_SECRET }}
SHEIN_WEBHOOK_SECRET: ${{ secrets.SHEIN_WEBHOOK_SECRET }}
API_PORT: ${{ secrets.API_PORT }}
STORE_PORT: ${{ secrets.STORE_PORT }}
run: |
test -n "$FLATLOGIC_HOST"
test -n "$FLATLOGIC_USER"
test -n "$PROJECT_DIR"
ssh "$FLATLOGIC_USER@$FLATLOGIC_HOST" \
"export PROJECT_DIR='$PROJECT_DIR' DEPLOY_BRANCH='${DEPLOY_BRANCH:-${GITHUB_REF_NAME}}' DATABASE_URL='$DATABASE_URL' DB_SSL='${DB_SSL:-require}' DB_POOL_MAX='${DB_POOL_MAX:-20}' DB_QUERY_TIMEOUT_MS='${DB_QUERY_TIMEOUT_MS:-15000}' DB_STATEMENT_TIMEOUT_MS='${DB_STATEMENT_TIMEOUT_MS:-15000}' ADMIN_TOKEN='$ADMIN_TOKEN' API_INGEST_KEY='$API_INGEST_KEY' WEBHOOK_SECRET='$WEBHOOK_SECRET' SHEIN_WEBHOOK_SECRET='$SHEIN_WEBHOOK_SECRET' API_PORT='${API_PORT:-8080}' STORE_PORT='${STORE_PORT:-3001}' && bash '$PROJECT_DIR/scripts/flatlogic-deploy.sh'"

View File

@ -6,6 +6,12 @@ import { logger } from "./lib/logger";
const app: Express = express();
function captureRawBody(req: express.Request, _res: express.Response, buf: Buffer): void {
if (buf.length > 0) {
req.rawBody = buf.toString("utf8");
}
}
app.use(
pinoHttp({
logger,
@ -26,8 +32,8 @@ app.use(
}),
);
app.use(cors());
app.use(express.json());
app.use(express.urlencoded({ extended: true }));
app.use(express.json({ verify: captureRawBody }));
app.use(express.urlencoded({ extended: true, verify: captureRawBody }));
app.use("/api", router);

View File

@ -0,0 +1,423 @@
import { db, categoriesTable, integrationEventsTable, productsTable, type ProductVariant } from "@workspace/db";
import { and, desc, eq, sql } from "drizzle-orm";
export type SourceName = "extra" | "shein";
type CategoryInput = {
id?: number;
name?: string;
name_en?: string;
slug?: string;
parent_slug?: string;
};
export type ProductIngestInput = {
source: SourceName;
external_id?: string;
sku?: string;
source_url?: string;
currency?: string;
availability?: string;
name?: string;
name_en?: string;
short_description?: string;
description?: string;
brand?: string;
subcategory?: string;
category_id?: number;
category?: CategoryInput;
price?: number;
original_price?: number;
images?: string[];
sizes?: string[];
colors?: string[];
specs?: Record<string, string>;
marketing_points?: string[];
variants?: ProductVariant[];
tags?: string[];
metadata?: Record<string, unknown>;
stock?: number;
rating?: number;
review_count?: number;
is_trending?: boolean;
is_bestseller?: boolean;
is_new?: boolean;
is_top_rated?: boolean;
};
export type WebhookProductPatch = Partial<ProductIngestInput> & {
source?: SourceName;
external_id?: string;
sku?: string;
};
function asString(value: unknown): string | undefined {
if (typeof value !== "string") return undefined;
const normalized = value.trim();
return normalized.length > 0 ? normalized : undefined;
}
function asNumber(value: unknown): number | undefined {
if (typeof value === "number" && Number.isFinite(value)) return value;
if (typeof value === "string" && value.trim() !== "") {
const parsed = Number(value);
return Number.isFinite(parsed) ? parsed : undefined;
}
return undefined;
}
function asInteger(value: unknown): number | undefined {
const parsed = asNumber(value);
return parsed === undefined ? undefined : Math.max(0, Math.trunc(parsed));
}
function asBoolean(value: unknown, fallback = false): boolean {
if (typeof value === "boolean") return value;
if (typeof value === "string") {
if (["true", "1", "yes", "on"].includes(value.toLowerCase())) return true;
if (["false", "0", "no", "off"].includes(value.toLowerCase())) return false;
}
return fallback;
}
function asStringArray(value: unknown): string[] {
if (!Array.isArray(value)) return [];
return value
.map((entry) => asString(entry))
.filter((entry): entry is string => Boolean(entry));
}
function asStringRecord(value: unknown): Record<string, string> {
if (!value || typeof value !== "object" || Array.isArray(value)) return {};
const entries = Object.entries(value as Record<string, unknown>)
.map(([key, entryValue]) => {
const normalizedKey = asString(key);
const normalizedValue = asString(entryValue);
return normalizedKey && normalizedValue ? [normalizedKey, normalizedValue] : null;
})
.filter((entry): entry is [string, string] => Boolean(entry));
return Object.fromEntries(entries);
}
function asJsonRecord(value: unknown): Record<string, unknown> {
if (!value || typeof value !== "object" || Array.isArray(value)) return {};
return value as Record<string, unknown>;
}
function asVariants(value: unknown): ProductVariant[] {
if (!Array.isArray(value)) return [];
const variants: ProductVariant[] = [];
for (const entry of value) {
if (!entry || typeof entry !== "object") continue;
const raw = entry as Record<string, unknown>;
const label = asString(raw["label"]);
const price = asNumber(raw["price"]);
if (!label || price === undefined) continue;
const original = asNumber(raw["original_price"]);
variants.push({
label,
price: toMoney(price),
original_price: original === undefined ? undefined : toMoney(original),
sku: asString(raw["sku"]),
});
}
return variants;
}
function toMoney(value: number): string {
return value.toFixed(2);
}
function slugify(value: string): string {
return value
.toLowerCase()
.normalize("NFKD")
.replace(/[^a-z0-9\s-]/g, "")
.trim()
.replace(/\s+/g, "-")
.replace(/-+/g, "-")
.replace(/^-|-$/g, "");
}
function resolveSource(value: unknown, fallback: SourceName): SourceName {
return value === "shein" ? "shein" : fallback;
}
function normalizeCategory(value: unknown): CategoryInput | undefined {
if (!value || typeof value !== "object" || Array.isArray(value)) return undefined;
const raw = value as Record<string, unknown>;
return {
id: asInteger(raw["id"]),
name: asString(raw["name"]),
name_en: asString(raw["name_en"]),
slug: asString(raw["slug"]),
parent_slug: asString(raw["parent_slug"]),
};
}
export function normalizeProductInput(raw: unknown, fallbackSource: SourceName = "extra"): ProductIngestInput {
if (!raw || typeof raw !== "object" || Array.isArray(raw)) {
throw new Error("Each product payload must be an object");
}
const input = raw as Record<string, unknown>;
const source = resolveSource(input["source"], fallbackSource);
const product: ProductIngestInput = {
source,
external_id: asString(input["external_id"]),
sku: asString(input["sku"]),
source_url: asString(input["source_url"]),
currency: asString(input["currency"]) ?? "SAR",
availability: asString(input["availability"]) ?? "unknown",
name: asString(input["name"]),
name_en: asString(input["name_en"]),
short_description: asString(input["short_description"]),
description: asString(input["description"]),
brand: asString(input["brand"]),
subcategory: asString(input["subcategory"]),
category_id: asInteger(input["category_id"]),
category: normalizeCategory(input["category"]),
price: asNumber(input["price"]),
original_price: asNumber(input["original_price"]),
images: asStringArray(input["images"]),
sizes: asStringArray(input["sizes"]),
colors: asStringArray(input["colors"]),
specs: asStringRecord(input["specs"]),
marketing_points: asStringArray(input["marketing_points"]),
variants: asVariants(input["variants"]),
tags: asStringArray(input["tags"]),
metadata: asJsonRecord(input["metadata"]),
stock: asInteger(input["stock"]),
rating: asNumber(input["rating"]),
review_count: asInteger(input["review_count"]),
is_trending: asBoolean(input["is_trending"]),
is_bestseller: asBoolean(input["is_bestseller"]),
is_new: asBoolean(input["is_new"], true),
is_top_rated: asBoolean(input["is_top_rated"]),
};
if (!product.external_id && !product.sku) {
throw new Error("Product payload must include external_id or sku");
}
return product;
}
export function normalizeWebhookPatch(raw: unknown, fallbackSource: SourceName = "shein"): WebhookProductPatch {
if (!raw || typeof raw !== "object" || Array.isArray(raw)) {
throw new Error("Each webhook item must be an object");
}
const patch = normalizeProductInput(raw, fallbackSource);
return patch;
}
async function ensureCategory(source: SourceName, category?: CategoryInput, categoryId?: number): Promise<number> {
if (categoryId) {
const existing = await db
.select({ id: categoriesTable.id })
.from(categoriesTable)
.where(eq(categoriesTable.id, categoryId))
.limit(1);
if (existing[0]) return existing[0].id;
}
const fallbackSlug = `uncategorized-${source}`;
const slug = category?.slug ?? (category?.name ? slugify(category.name) : fallbackSlug);
const name = category?.name ?? (source === "shein" ? "شي إن" : "إكسترا");
const existing = await db
.select({ id: categoriesTable.id })
.from(categoriesTable)
.where(and(eq(categoriesTable.source, source), eq(categoriesTable.slug, slug)))
.limit(1);
if (existing[0]) return existing[0].id;
const [inserted] = await db
.insert(categoriesTable)
.values({
name,
name_en: category?.name_en ?? name,
slug,
source,
sort_order: 0,
})
.returning({ id: categoriesTable.id });
if (!inserted) {
throw new Error("Failed to create category for ingested product");
}
return inserted.id;
}
async function findExistingProduct(source: SourceName, externalId?: string, sku?: string) {
if (externalId) {
const rows = await db
.select()
.from(productsTable)
.where(and(eq(productsTable.source, source), eq(productsTable.external_id, externalId)))
.limit(1);
if (rows[0]) return rows[0];
}
if (sku) {
const rows = await db
.select()
.from(productsTable)
.where(and(eq(productsTable.source, source), eq(productsTable.sku, sku)))
.limit(1);
if (rows[0]) return rows[0];
}
return null;
}
export async function upsertExternalProduct(input: ProductIngestInput) {
const existing = await findExistingProduct(input.source, input.external_id, input.sku);
const categoryId = await ensureCategory(input.source, input.category, input.category_id ?? existing?.category_id);
const productCode = input.external_id ?? input.sku ?? String(Date.now());
const resolvedName = input.name ?? existing?.name ?? `${input.source.toUpperCase()} ${productCode}`;
const resolvedPrice = input.price ?? (existing?.price ? Number(existing.price) : 0);
const resolvedRating = input.rating ?? (existing?.rating ? Number(existing.rating) : 0);
const resolvedReviewCount = input.review_count ?? existing?.review_count ?? 0;
const resolvedStock = input.stock ?? existing?.stock ?? 0;
const values = {
source: input.source,
external_id: input.external_id ?? existing?.external_id ?? null,
source_url: input.source_url ?? existing?.source_url ?? null,
currency: input.currency ?? existing?.currency ?? "SAR",
availability: input.availability ?? existing?.availability ?? "unknown",
name: resolvedName,
name_en: input.name_en ?? existing?.name_en ?? null,
short_description: input.short_description ?? existing?.short_description ?? null,
description: input.description ?? existing?.description ?? null,
brand: input.brand ?? existing?.brand ?? null,
subcategory: input.subcategory ?? existing?.subcategory ?? null,
sku: input.sku ?? existing?.sku ?? null,
category_id: categoryId,
price: toMoney(resolvedPrice),
original_price:
input.original_price !== undefined
? toMoney(input.original_price)
: existing?.original_price ?? null,
images: input.images?.length ? input.images : existing?.images ?? [],
sizes: input.sizes?.length ? input.sizes : existing?.sizes ?? [],
colors: input.colors?.length ? input.colors : existing?.colors ?? [],
specs: Object.keys(input.specs ?? {}).length ? input.specs ?? {} : existing?.specs ?? {},
marketing_points:
input.marketing_points?.length ? input.marketing_points : existing?.marketing_points ?? [],
variants: input.variants?.length ? input.variants : existing?.variants ?? [],
tags: input.tags?.length ? input.tags : existing?.tags ?? [],
metadata: Object.keys(input.metadata ?? {}).length ? input.metadata ?? {} : existing?.metadata ?? {},
stock: resolvedStock,
rating: toMoney(resolvedRating),
review_count: resolvedReviewCount,
is_trending: input.is_trending ?? existing?.is_trending ?? false,
is_bestseller: input.is_bestseller ?? existing?.is_bestseller ?? false,
is_new: input.is_new ?? existing?.is_new ?? true,
is_top_rated: input.is_top_rated ?? existing?.is_top_rated ?? false,
last_synced_at: new Date(),
updated_at: new Date(),
};
if (existing) {
const [updated] = await db
.update(productsTable)
.set(values)
.where(eq(productsTable.id, existing.id))
.returning();
return { mode: "updated" as const, product: updated ?? existing };
}
const [created] = await db.insert(productsTable).values(values).returning();
return { mode: "created" as const, product: created };
}
export async function applyWebhookPatch(input: WebhookProductPatch) {
const source = input.source ?? "shein";
return upsertExternalProduct({
...input,
source,
});
}
export async function logIntegrationEvent(params: {
source: string;
eventType: string;
status: string;
payload: Record<string, unknown>;
externalId?: string;
dedupeKey?: string;
itemsTotal?: number;
itemsSucceeded?: number;
itemsFailed?: number;
error?: string;
}) {
const [created] = await db
.insert(integrationEventsTable)
.values({
source: params.source,
event_type: params.eventType,
status: params.status,
external_id: params.externalId ?? null,
dedupe_key: params.dedupeKey ?? null,
items_total: params.itemsTotal ?? 0,
items_succeeded: params.itemsSucceeded ?? 0,
items_failed: params.itemsFailed ?? 0,
error: params.error ?? null,
payload: params.payload,
processed_at: new Date(),
})
.returning();
return created;
}
export async function getPipelineStatus() {
const [productCounts, recentEvents] = await Promise.all([
db
.select({
total: sql<number>`CAST(COUNT(*) AS INTEGER)`,
shein: sql<number>`CAST(SUM(CASE WHEN ${productsTable.source} = 'shein' THEN 1 ELSE 0 END) AS INTEGER)`,
extra: sql<number>`CAST(SUM(CASE WHEN ${productsTable.source} = 'extra' THEN 1 ELSE 0 END) AS INTEGER)`,
})
.from(productsTable),
db
.select()
.from(integrationEventsTable)
.orderBy(desc(integrationEventsTable.created_at))
.limit(10),
]);
return {
database: {
configured: Boolean(process.env["DATABASE_URL"]),
provider: process.env["DATABASE_URL"]?.includes("supabase.co") ? "supabase" : "postgresql",
pool: {
max: Number(process.env["DB_POOL_MAX"] ?? 20),
query_timeout_ms: Number(process.env["DB_QUERY_TIMEOUT_MS"] ?? 15000),
statement_timeout_ms: Number(process.env["DB_STATEMENT_TIMEOUT_MS"] ?? 15000),
},
},
security: {
api_ingest_key_configured: Boolean(process.env["API_INGEST_KEY"]),
webhook_secret_configured: Boolean(process.env["SHEIN_WEBHOOK_SECRET"] || process.env["WEBHOOK_SECRET"]),
admin_token_configured: Boolean(process.env["ADMIN_TOKEN"]),
},
catalog: {
total_products: productCounts[0]?.total ?? 0,
shein_products: productCounts[0]?.shein ?? 0,
extra_products: productCounts[0]?.extra ?? 0,
},
recent_events: recentEvents,
};
}

View File

@ -0,0 +1,67 @@
import crypto from "node:crypto";
import type { Request, Response, NextFunction } from "express";
function readAuthToken(req: Request): string {
const apiKey = req.header("x-api-key")?.trim();
if (apiKey) return apiKey;
const auth = req.header("authorization")?.trim() ?? "";
return auth.startsWith("Bearer ") ? auth.slice(7).trim() : auth;
}
export function requireApiKey(req: Request, res: Response, next: NextFunction): void {
const configuredKey = process.env["API_INGEST_KEY"]?.trim();
if (!configuredKey) {
res.status(503).json({ error: "API ingest key is not configured on this server" });
return;
}
const providedKey = readAuthToken(req);
if (!providedKey || providedKey !== configuredKey) {
res.status(401).json({ error: "Unauthorized — valid API key required" });
return;
}
next();
}
function extractSignature(req: Request): string {
return (
req.header("x-webhook-signature")?.trim() ||
req.header("x-signature")?.trim() ||
req.header("x-hub-signature-256")?.trim() ||
""
);
}
export function requireWebhookSignature(secretEnvNames: string[]): (req: Request, res: Response, next: NextFunction) => void {
return (req, res, next) => {
const secret = secretEnvNames
.map((name) => process.env[name]?.trim())
.find((value): value is string => Boolean(value));
if (!secret) {
res.status(503).json({ error: "Webhook signature secret is not configured on this server" });
return;
}
const rawBody = req.rawBody ?? JSON.stringify(req.body ?? {});
const provided = extractSignature(req).replace(/^sha256=/i, "").toLowerCase();
if (!provided) {
res.status(401).json({ error: "Missing webhook signature" });
return;
}
const expected = crypto.createHmac("sha256", secret).update(rawBody).digest("hex");
const isValid =
provided.length === expected.length &&
crypto.timingSafeEqual(Buffer.from(provided), Buffer.from(expected));
if (!isValid) {
res.status(401).json({ error: "Invalid webhook signature" });
return;
}
next();
};
}

View File

@ -15,6 +15,7 @@ import checkoutEventsRouter from "./checkout-events";
import storeSettingsRouter from "./store-settings";
import imageProxyRouter from "./image-proxy";
import integrationsRouter from "./integrations";
import ingestRouter from "./ingest";
const router = Router();
@ -34,5 +35,6 @@ router.use(analyticsRouter);
router.use(storeSettingsRouter);
router.use(imageProxyRouter);
router.use(integrationsRouter);
router.use(ingestRouter);
export default router;

View File

@ -0,0 +1,193 @@
import { Router, type IRouter } from "express";
import { db } from "@workspace/db";
import { sql } from "drizzle-orm";
import { requireAdmin } from "../middleware/auth";
import { requireApiKey, requireWebhookSignature } from "../middleware/api-key";
import {
applyWebhookPatch,
getPipelineStatus,
logIntegrationEvent,
normalizeProductInput,
normalizeWebhookPatch,
upsertExternalProduct,
type SourceName,
} from "../lib/ingest";
const router: IRouter = Router();
function resolveSource(value: unknown, fallback: SourceName): SourceName {
return value === "shein" ? "shein" : fallback;
}
router.get("/integrations/pipeline/status", requireAdmin, async (req, res) => {
try {
await db.execute(sql`select 1`);
const status = await getPipelineStatus();
res.json({ ok: true, ...status });
} catch (err) {
req.log.error({ err }, "Failed to fetch pipeline status");
res.status(500).json({ error: err instanceof Error ? err.message : "Internal server error" });
}
});
router.get("/ingest/events", requireAdmin, async (req, res) => {
try {
const status = await getPipelineStatus();
res.json(status.recent_events);
} catch (err) {
req.log.error({ err }, "Failed to list ingest events");
res.status(500).json({ error: err instanceof Error ? err.message : "Internal server error" });
}
});
router.post("/ingest/products/upsert", requireApiKey, async (req, res) => {
try {
const payload = normalizeProductInput(req.body, resolveSource(req.body?.source, "extra"));
const result = await upsertExternalProduct(payload);
await logIntegrationEvent({
source: payload.source,
eventType: "products.upsert",
status: "processed",
payload: req.body ?? {},
externalId: payload.external_id ?? payload.sku,
itemsTotal: 1,
itemsSucceeded: 1,
itemsFailed: 0,
});
res.status(result.mode === "created" ? 201 : 200).json(result);
} catch (err) {
req.log.error({ err }, "Failed to upsert ingested product");
await logIntegrationEvent({
source: resolveSource(req.body?.source, "extra"),
eventType: "products.upsert",
status: "failed",
payload: (req.body ?? {}) as Record<string, unknown>,
externalId: req.body?.external_id ?? req.body?.sku,
itemsTotal: 1,
itemsSucceeded: 0,
itemsFailed: 1,
error: err instanceof Error ? err.message : "Unknown error",
});
res.status(400).json({ error: err instanceof Error ? err.message : "Invalid payload" });
}
});
router.post("/ingest/products/bulk", requireApiKey, async (req, res) => {
const source = resolveSource(req.body?.source, "extra");
const products = Array.isArray(req.body?.products) ? req.body.products : [];
const webhookId = typeof req.body?.webhook_id === "string" ? req.body.webhook_id : undefined;
if (products.length === 0) {
res.status(400).json({ error: "Request body must include a non-empty products array" });
return;
}
let processed = 0;
let created = 0;
let updated = 0;
const errors: Array<{ index: number; message: string }> = [];
for (const [index, rawProduct] of products.entries()) {
try {
const product = normalizeProductInput(rawProduct, source);
const result = await upsertExternalProduct(product);
processed += 1;
if (result.mode === "created") created += 1;
if (result.mode === "updated") updated += 1;
} catch (err) {
errors.push({
index,
message: err instanceof Error ? err.message : "Invalid payload",
});
}
}
const status = errors.length > 0 ? (processed > 0 ? "partial" : "failed") : "processed";
await logIntegrationEvent({
source,
eventType: "products.bulk_sync",
status,
payload: {
source,
webhook_id: webhookId,
total_received: products.length,
sample: products.slice(0, 3),
},
dedupeKey: webhookId,
itemsTotal: products.length,
itemsSucceeded: processed,
itemsFailed: errors.length,
error: errors.length > 0 ? JSON.stringify(errors.slice(0, 10)) : undefined,
});
res.status(errors.length > 0 ? 207 : 200).json({
source,
total_received: products.length,
processed,
created,
updated,
failed: errors.length,
errors,
});
});
router.post(
"/webhooks/shein/products",
requireApiKey,
requireWebhookSignature(["SHEIN_WEBHOOK_SECRET", "WEBHOOK_SECRET"]),
async (req, res) => {
const eventType = typeof req.body?.event === "string" ? req.body.event : "shein.products.changed";
const webhookId = typeof req.body?.webhook_id === "string" ? req.body.webhook_id : undefined;
const items = Array.isArray(req.body?.products) ? req.body.products : [];
if (items.length === 0) {
res.status(400).json({ error: "Webhook body must include a non-empty products array" });
return;
}
let processed = 0;
const errors: Array<{ index: number; message: string }> = [];
for (const [index, rawItem] of items.entries()) {
try {
const patch = normalizeWebhookPatch(rawItem, "shein");
await applyWebhookPatch({ ...patch, source: "shein" });
processed += 1;
} catch (err) {
errors.push({
index,
message: err instanceof Error ? err.message : "Invalid webhook item",
});
}
}
const status = errors.length > 0 ? (processed > 0 ? "partial" : "failed") : "processed";
await logIntegrationEvent({
source: "shein",
eventType,
status,
payload: {
webhook_id: webhookId,
event: eventType,
total_received: items.length,
sample: items.slice(0, 3),
},
dedupeKey: webhookId,
itemsTotal: items.length,
itemsSucceeded: processed,
itemsFailed: errors.length,
error: errors.length > 0 ? JSON.stringify(errors.slice(0, 10)) : undefined,
});
res.status(errors.length > 0 ? 207 : 200).json({
event: eventType,
processed,
failed: errors.length,
errors,
});
},
);
export default router;

View File

@ -147,7 +147,7 @@ router.get("/integrations/shein-categories", async (req, res) => {
try {
let categories: SheinCategory[] = [];
let source = "preset";
let scrapeResult: { success: boolean; error?: string; runId?: string } | null = null;
let scrapeResult: Awaited<ReturnType<typeof fetchSheinCategories>> | null = null;
if (mode === "scrape") {
scrapeResult = await fetchSheinCategories();

View File

@ -5,6 +5,10 @@ import { requireAdmin } from "../middleware/auth";
const router: IRouter = Router();
function getSingleParamValue(value: string | string[] | undefined): string {
return Array.isArray(value) ? value[0] ?? "" : value ?? "";
}
function generateOrderNumber(): string {
const now = Date.now();
const random = Math.floor(Math.random() * 1000).toString().padStart(3, "0");
@ -41,7 +45,7 @@ router.get("/orders", async (req, res) => {
router.get("/orders/:id", async (req, res) => {
try {
const id = parseInt(req.params.id);
const id = parseInt(getSingleParamValue(req.params.id), 10);
const [order] = await db.select().from(ordersTable).where(eq(ordersTable.id, id));
if (!order) return res.status(404).json({ error: "Order not found" });
res.json(order);
@ -191,7 +195,7 @@ router.post("/orders", async (req, res) => {
router.delete("/orders/:id", requireAdmin, async (req, res) => {
try {
const id = parseInt(req.params.id);
const id = parseInt(getSingleParamValue(req.params.id), 10);
await db.delete(ordersTable).where(eq(ordersTable.id, id));
res.json({ success: true });
} catch (err) {
@ -202,7 +206,7 @@ router.delete("/orders/:id", requireAdmin, async (req, res) => {
router.put("/orders/:id/status", async (req, res) => {
try {
const id = parseInt(req.params.id);
const id = parseInt(getSingleParamValue(req.params.id), 10);
const { status, tracking_number } = req.body;
// Fetch current order first

View File

@ -0,0 +1,9 @@
declare global {
namespace Express {
interface Request {
rawBody?: string;
}
}
}
export {};

View File

@ -3,9 +3,14 @@
"compilerOptions": {
"outDir": "dist",
"rootDir": "src",
"types": ["node"]
"types": [
"node"
],
"noImplicitReturns": false
},
"include": ["src"],
"include": [
"src"
],
"references": [
{
"path": "../../lib/db"

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,2 +1,4 @@
const BASE = import.meta.env.BASE_URL.replace(/\/$/, "");
export const API = `${BASE}/api`;
const OVERRIDE = import.meta.env.VITE_API_BASE_URL?.replace(/\/$/, "");
export const API = OVERRIDE || `${BASE}/api`;

View File

@ -3,8 +3,8 @@ export type Lang = "ar" | "en";
export const translations = {
ar: {
// Store
store_name: "اكسترا",
store_tagline: "الوجهة الأولى للإلكترونيات والأجهزة المنزلية في المملكة العربية السعودية.",
store_name: "رين",
store_tagline: "متجر رين لتجربة تسوق سعودية أنيقة تجمع الإلكترونيات، الجمال، المنزل والعروض اليومية.",
// Header
search_placeholder: "ابحث عن منتجات...",
top_bar_offer: "⚡ عروض خاصة — خصم يصل إلى 40% على المنتجات المختارة",
@ -15,7 +15,7 @@ export const translations = {
user_cart: "سلتي",
user_logout: "تسجيل الخروج",
user_guest: "مستخدم",
user_member: "عضو اكسترا",
user_member: "عضو رين",
// Auth
auth_login_tab: "تسجيل الدخول",
auth_register_tab: "إنشاء حساب",
@ -46,7 +46,7 @@ export const translations = {
server_error: "تعذر الاتصال بالخادم",
// Home sections
section_view_all: "عرض الكل ←",
section_extra_title: "اكسترا — إلكترونيات وأجهزة",
section_extra_title: "رين — إلكترونيات مختارة",
section_shein_sub: "Fashion, Beauty & Home",
// Product card
product_new: "جديد",
@ -168,10 +168,10 @@ export const translations = {
verifying_sub: "يرجى الانتظار، يتم التحقق من عملية الدفع",
payment_success: "✅ تم الدفع بنجاح!",
payment_success_sub: "شكراً لك! جاري تحويلك للصفحة الرئيسية...",
ssl_badge: "مدفوعاتك آمنة بتشفير TLS ومعايير PCI DSS المعتمدة من مؤسسة النقد العربي السعودي 🔒",
delivery_days_3: "توصيل 3 أيام عمل",
delivery_days_5: "توصيل 5 أيام عمل",
delivery_days_7: "توصيل 7 أيام عمل",
ssl_badge: "مدفوعاتك آمنة 100% بتشفير TLS وبنية دفع محمية على مدار الساعة 🔒",
delivery_days_3: "توصيل سريع داخل المملكة",
delivery_days_5: "توصيل قياسي داخل المملكة",
delivery_days_7: "توصيل إلى جميع المناطق",
// Profile
profile_login_first: "سجّل دخولك أولاً",
profile_login_sub: "للوصول إلى ملفك الشخصي وطلباتك",
@ -195,7 +195,7 @@ export const translations = {
footer_warranty: "الضمان",
footer_contact: "تواصل معنا",
footer_address: "الرياض، المملكة العربية السعودية",
footer_copyright: "© 2025 اكسترا السعودية. جميع الحقوق محفوظة.",
footer_copyright: "© 2025 متجر رين. جميع الحقوق محفوظة.",
// 404
page_not_found: "الصفحة غير موجودة",
not_found: "الصفحة غير موجودة",
@ -209,6 +209,7 @@ export const translations = {
section_trending_title: "الأكثر رواجاً",
section_bestseller_title: "الأكثر مبيعاً",
section_new_title: "وصل حديثاً",
section_top_rated_title: "أعلى تقييماً",
shein_section_title: "أزياء، جمال ومنزل",
browse_all_cat: "تصفح جميع المنتجات",
// Mega menu
@ -241,7 +242,7 @@ export const translations = {
login: "تسجيل الدخول",
logout: "تسجيل الخروج",
user_default: "مستخدم",
extra_member: "عضو اكسترا",
extra_member: "عضو رين",
my_orders: "طلباتي",
my_orders_sub: "تتبع وإدارة طلباتك",
wishlist: "قائمة الأمنيات",
@ -254,8 +255,8 @@ export const translations = {
en: {
// Store
store_name: "eXtra",
store_tagline: "Saudi Arabia's #1 destination for electronics and home appliances.",
store_name: "Rain",
store_tagline: "Rain Store for a premium Saudi shopping experience across electronics, beauty, home, and daily deals.",
// Header
search_placeholder: "Search products...",
top_bar_offer: "⚡ Special Offers — Up to 40% off on selected items",
@ -266,7 +267,7 @@ export const translations = {
user_cart: "My Cart",
user_logout: "Sign Out",
user_guest: "User",
user_member: "eXtra Member",
user_member: "Rain Member",
// Auth
auth_login_tab: "Sign In",
auth_register_tab: "Create Account",
@ -297,7 +298,7 @@ export const translations = {
server_error: "Could not connect to server",
// Home sections
section_view_all: "View All →",
section_extra_title: "eXtra — Electronics & Appliances",
section_extra_title: "Rain — Featured Electronics",
section_shein_sub: "Fashion, Beauty & Home",
// Product card
product_new: "NEW",
@ -419,7 +420,7 @@ export const translations = {
verifying_sub: "Please wait while we verify your payment",
payment_success: "✅ Payment Successful!",
payment_success_sub: "Thank you! Redirecting to home page...",
ssl_badge: "Your payments are secured with TLS encryption & PCI DSS standards approved by Saudi Central Bank (SAMA) 🔒",
ssl_badge: "Your payments are protected with TLS encryption and a continuously monitored secure checkout 🔒",
delivery_days_3: "3 business days delivery",
delivery_days_5: "5 business days delivery",
delivery_days_7: "7 business days delivery",
@ -446,7 +447,7 @@ export const translations = {
footer_warranty: "Warranty",
footer_contact: "Contact Us",
footer_address: "Riyadh, Kingdom of Saudi Arabia",
footer_copyright: "© 2025 eXtra Saudi Arabia. All rights reserved.",
footer_copyright: "© 2025 Rain Store. All rights reserved.",
// 404
page_not_found: "Page Not Found",
not_found: "Page Not Found",
@ -460,6 +461,7 @@ export const translations = {
section_trending_title: "Trending",
section_bestseller_title: "Best Sellers",
section_new_title: "New Arrivals",
section_top_rated_title: "Top Rated",
shein_section_title: "Fashion, Beauty & Home",
browse_all_cat: "Browse All Products",
// Mega menu
@ -492,7 +494,7 @@ export const translations = {
login: "Sign In",
logout: "Sign Out",
user_default: "User",
extra_member: "eXtra Member",
extra_member: "Rain Member",
my_orders: "My Orders",
my_orders_sub: "Track and manage your orders",
wishlist: "Wishlist",

View File

@ -0,0 +1,118 @@
export type PreviewAuthUser = {
id: number;
name: string | null;
email: string;
};
type StoredPreviewUser = PreviewAuthUser & {
password: string;
created_at: string;
};
const STORE_USERS_KEY = "extra_preview_users";
const STORE_AUTH_SALT = "extra_preview_auth_v1";
export const PREVIEW_ADMIN_TOKEN = "preview_admin_token";
const DEMO_PREVIEW_USER: StoredPreviewUser = {
id: 1,
name: "عميل تجريبي",
email: "demo@extra.sa",
password: "Extra123",
created_at: "2026-03-28T00:00:00.000Z",
};
function readUsers(): StoredPreviewUser[] {
if (typeof localStorage === "undefined") return [DEMO_PREVIEW_USER];
try {
const parsed = JSON.parse(localStorage.getItem(STORE_USERS_KEY) || "[]");
const users = Array.isArray(parsed) ? parsed : [];
return users.some((user) => user.email === DEMO_PREVIEW_USER.email) ? users : [DEMO_PREVIEW_USER, ...users];
} catch {
return [DEMO_PREVIEW_USER];
}
}
function writeUsers(users: StoredPreviewUser[]) {
if (typeof localStorage === "undefined") return;
localStorage.setItem(STORE_USERS_KEY, JSON.stringify(users));
}
function normalizeEmail(email: string) {
return email.trim().toLowerCase();
}
function makeToken(userId: number) {
return `preview_user_${userId}_${STORE_AUTH_SALT}`;
}
export function isJsonResponse(res: Response) {
const contentType = res.headers.get("content-type") || "";
return contentType.includes("application/json");
}
export function registerPreviewStoreUser(input: {
name?: string;
email: string;
password: string;
confirm_password?: string;
}) {
const email = normalizeEmail(input.email);
const name = input.name?.trim() || null;
const password = input.password || "";
const confirm = input.confirm_password || "";
if (!email || !/^[^\s@]+@[^\s@]+\.[^\s@]+$/.test(email)) {
throw new Error("البريد الإلكتروني غير صحيح");
}
if (password.length < 8) {
throw new Error("كلمة المرور يجب أن تكون 8 أحرف على الأقل");
}
if (!/[A-Z]/.test(password)) {
throw new Error("كلمة المرور يجب أن تحتوي على حرف كبير");
}
if (!/[0-9]/.test(password)) {
throw new Error("كلمة المرور يجب أن تحتوي على رقم");
}
if (password !== confirm) {
throw new Error("كلمة المرور وتأكيدها غير متطابقين");
}
const users = readUsers();
if (users.some((user) => user.email === email)) {
throw new Error("البريد الإلكتروني مستخدم بالفعل");
}
const id = users.reduce((max, user) => Math.max(max, user.id), 0) + 1;
const newUser: StoredPreviewUser = {
id,
name,
email,
password,
created_at: new Date().toISOString(),
};
users.push(newUser);
writeUsers(users);
const user: PreviewAuthUser = { id: newUser.id, name: newUser.name, email: newUser.email };
return { user, token: makeToken(user.id) };
}
export function loginPreviewStoreUser(input: { email: string; password: string }) {
const email = normalizeEmail(input.email);
const users = readUsers();
const user = users.find((entry) => entry.email === email && entry.password === input.password);
if (!user) {
throw new Error("البريد الإلكتروني أو كلمة المرور غير صحيحة");
}
return {
user: { id: user.id, name: user.name, email: user.email },
token: makeToken(user.id),
};
}
export function loginPreviewAdmin(input: { username: string; password: string }) {
if (input.username === "admin" && input.password === "admin123") {
return { token: PREVIEW_ADMIN_TOKEN, username: "admin" };
}
throw new Error("بيانات الدخول غير صحيحة");
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -4,13 +4,7 @@ import tailwindcss from "@tailwindcss/vite";
import path from "path";
import runtimeErrorOverlay from "@replit/vite-plugin-runtime-error-modal";
const rawPort = process.env.PORT;
if (!rawPort) {
throw new Error(
"PORT environment variable is required but was not provided.",
);
}
const rawPort = process.env.PORT ?? "3001";
const port = Number(rawPort);
@ -18,13 +12,8 @@ if (Number.isNaN(port) || port <= 0) {
throw new Error(`Invalid PORT value: "${rawPort}"`);
}
const basePath = process.env.BASE_PATH;
if (!basePath) {
throw new Error(
"BASE_PATH environment variable is required but was not provided.",
);
}
const basePath = process.env.BASE_PATH ?? "/";
const apiProxyTarget = process.env.API_SERVER_URL ?? "http://127.0.0.1:8080";
export default defineConfig({
base: basePath,
@ -62,6 +51,12 @@ export default defineConfig({
port,
host: "0.0.0.0",
allowedHosts: true,
proxy: {
"/api": {
target: apiProxyTarget,
changeOrigin: true,
},
},
fs: {
strict: true,
deny: ["**/.*"],

View File

@ -5,13 +5,7 @@ import path from "path";
import runtimeErrorOverlay from "@replit/vite-plugin-runtime-error-modal";
import { mockupPreviewPlugin } from "./mockupPreviewPlugin";
const rawPort = process.env.PORT;
if (!rawPort) {
throw new Error(
"PORT environment variable is required but was not provided.",
);
}
const rawPort = process.env.PORT ?? "3001";
const port = Number(rawPort);
@ -19,13 +13,7 @@ if (Number.isNaN(port) || port <= 0) {
throw new Error(`Invalid PORT value: "${rawPort}"`);
}
const basePath = process.env.BASE_PATH;
if (!basePath) {
throw new Error(
"BASE_PATH environment variable is required but was not provided.",
);
}
const basePath = process.env.BASE_PATH ?? "/";
export default defineConfig({
base: basePath,

View File

@ -0,0 +1,184 @@
# Flatlogic CI/CD + External DB + Ingestion API
هذا الإعداد يجعل المشروع يعمل بهذه الصورة:
- **GitHub** هو مصدر الحقيقة للكود.
- **Bolt / Replit** يدفعان التعديلات إلى نفس المستودع أو يرسلان `repository_dispatch`.
- **GitHub Actions** تبني المشروع وتتحقق منه ثم تنشره إلى VM الخاص بـ Flatlogic.
- **Flatlogic / API Server** يدير بيانات المنتجات القادمة من أدوات السحب وويبهوكات شي إن.
- **Supabase / PostgreSQL** هو مخزن البيانات الخارجي للمنتجات والفئات وسجل أحداث التكامل.
## ما تمت إضافته
- `.github/workflows/ci.yml`
- Typecheck + Build لكل Push / Pull Request.
- `.github/workflows/deploy-flatlogic.yml`
- ينشر تلقائيًا إلى VM عند التحديث على `main` أو `master`.
- يدعم `repository_dispatch` لأنواع:
- `bolt_sync`
- `replit_sync`
- `flatlogic_deploy`
- `scripts/flatlogic-deploy.sh`
- يسحب آخر نسخة من GitHub.
- يثبت الحزم.
- يشغل `typecheck` و `build`.
- يطبق schema قاعدة البيانات عبر Drizzle.
- يعيد تشغيل `extra-store` و `flatlogic-api` عبر PM2.
- `.env.example`
- كل متغيرات البيئة المطلوبة للـ DB والـ API والأمان.
- API endpoints جديدة داخل `artifacts/api-server`.
## الأسرار المطلوبة في GitHub Actions
أضف هذه القيم في **GitHub → Settings → Secrets and variables → Actions**:
### أسرار النشر إلى Flatlogic VM
- `FLATLOGIC_HOST`
- `FLATLOGIC_USER`
- `FLATLOGIC_SSH_KEY`
- `FLATLOGIC_PROJECT_DIR`
- `FLATLOGIC_DEPLOY_BRANCH` (اختياري)
### أسرار الـ backend
- `DATABASE_URL`
- `DB_SSL` = `require`
- `DB_POOL_MAX` = `20`
- `DB_QUERY_TIMEOUT_MS` = `15000`
- `DB_STATEMENT_TIMEOUT_MS` = `15000`
- `ADMIN_TOKEN`
- `API_INGEST_KEY`
- `WEBHOOK_SECRET`
- `SHEIN_WEBHOOK_SECRET`
- `API_PORT` = `8080`
- `STORE_PORT` = `3001`
## إعداد قاعدة البيانات الخارجية (Supabase / PostgreSQL)
الحد الأدنى المقترح لاستيعاب 2000 منتج من Extra + Shein:
- استخدم **Postgres خارجي** أو **Supabase**.
- يفضل في Supabase استخدام **transaction pooler** داخل `DATABASE_URL`.
- الإعدادات الافتراضية المضافة في الكود:
- `DB_POOL_MAX=20`
- `DB_QUERY_TIMEOUT_MS=15000`
- `DB_STATEMENT_TIMEOUT_MS=15000`
- `keepAlive=true`
- أضف الـ schema بالأمر:
```bash
pnpm --filter @workspace/db run push
```
> ملاحظة: الجدول `products` صار يدعم الآن `source`, `external_id`, `source_url`, `currency`, `availability`, `metadata`, `last_synced_at` مع فهارس مخصصة للبحث والتزامن.
## API Endpoints الجديدة
### 1) Bulk ingestion للمنتجات
`POST /api/ingest/products/bulk`
Headers:
```text
x-api-key: <API_INGEST_KEY>
content-type: application/json
```
Body مثال:
```json
{
"source": "shein",
"webhook_id": "apify-run-123",
"products": [
{
"external_id": "shein-10001",
"sku": "SKU-10001",
"name": "فستان صيفي",
"brand": "SHEIN",
"price": 149,
"original_price": 199,
"stock": 25,
"availability": "in_stock",
"sizes": ["S", "M", "L"],
"colors": ["Black", "Pink"],
"images": ["https://example.com/1.jpg"],
"category": {
"slug": "dresses",
"name": "فساتين"
},
"source_url": "https://example.com/product/10001"
}
]
}
```
### 2) Upsert منتج مفرد
`POST /api/ingest/products/upsert`
نفس الحماية عبر `x-api-key`.
### 3) Webhook تحديثات شي إن
`POST /api/webhooks/shein/products`
Headers:
```text
x-api-key: <API_INGEST_KEY>
x-webhook-signature: sha256=<hmac_sha256_of_raw_body>
content-type: application/json
```
Body مثال:
```json
{
"webhook_id": "shein-webhook-987",
"event": "price.updated",
"products": [
{
"external_id": "shein-10001",
"price": 139,
"stock": 12,
"availability": "low_stock",
"sizes": ["S", "M"]
}
]
}
```
### 4) Pipeline status
`GET /api/integrations/pipeline/status`
- محمي بـ `Authorization: Bearer <ADMIN_TOKEN>`
- يعرض:
- حالة إعداد الأمان
- حالة الـ DB
- عدد المنتجات حسب المصدر
- آخر أحداث التكامل
## ربط Bolt / Replit مع GitHub
أفضل سيناريو:
1. اجعل **Bolt** أو **Replit** يدفعان إلى نفس مستودع GitHub.
2. كل Push إلى `main` يشغل:
- `ci.yml`
- ثم `deploy-flatlogic.yml`
3. النتيجة: يتم تحديث الموقع والـ backend تلقائيًا.
إذا كانت الأداة لا تدفع مباشرة إلى GitHub، استخدم `repository_dispatch` من GitHub API بنوع:
- `bolt_sync`
- `replit_sync`
## ملاحظات تشغيلية
- الواجهة الأمامية الآن تدعم `VITE_API_BASE_URL` إذا أردت backend مختلفًا عن نفس الدومين.
- في وضع التطوير، Vite يمرر `/api` إلى `http://127.0.0.1:8080` عبر proxy.
- إذا لم تكن أسرار الـ backend موجودة، فسيستمر المتجر الأمامي بالعمل، لكن تشغيل خدمة الـ API سيتم تخطيه أثناء النشر.

View File

@ -1,2 +1 @@
export * from "./generated/api";
export * from "./generated/types";

View File

@ -4,13 +4,56 @@ import * as schema from "./schema";
const { Pool } = pg;
if (!process.env.DATABASE_URL) {
const databaseUrl = process.env.DATABASE_URL;
if (!databaseUrl) {
throw new Error(
"DATABASE_URL must be set. Did you forget to provision a database?",
);
}
export const pool = new Pool({ connectionString: process.env.DATABASE_URL });
function envNumber(name: string, fallback: number): number {
const raw = process.env[name];
if (!raw) return fallback;
const parsed = Number(raw);
return Number.isFinite(parsed) && parsed > 0 ? parsed : fallback;
}
function resolveSsl() {
const sslEnv = (process.env.DB_SSL ?? "auto").toLowerCase();
if (sslEnv === "disable" || sslEnv === "false") {
return false;
}
const rejectUnauthorized =
(process.env.DB_SSL_REJECT_UNAUTHORIZED ?? "false").toLowerCase() ===
"true";
if (
sslEnv === "require" ||
resolvedDatabaseUrl.includes("sslmode=require") ||
resolvedDatabaseUrl.includes("supabase.co")
) {
return { rejectUnauthorized };
}
return undefined;
}
const resolvedDatabaseUrl = databaseUrl;
export const pool = new Pool({
connectionString: resolvedDatabaseUrl,
max: envNumber("DB_POOL_MAX", 20),
idleTimeoutMillis: envNumber("DB_IDLE_TIMEOUT_MS", 30_000),
connectionTimeoutMillis: envNumber("DB_CONNECTION_TIMEOUT_MS", 10_000),
keepAlive: true,
application_name: process.env.DB_APP_NAME ?? "flatlogic-backend",
query_timeout: envNumber("DB_QUERY_TIMEOUT_MS", 15_000),
statement_timeout: envNumber("DB_STATEMENT_TIMEOUT_MS", 15_000),
ssl: resolveSsl(),
});
export const db = drizzle(pool, { schema });
export * from "./schema";

View File

@ -1,21 +1,29 @@
import { pgTable, serial, text, integer, timestamp } from "drizzle-orm/pg-core";
import { pgTable, serial, text, integer, timestamp, index } from "drizzle-orm/pg-core";
import { createInsertSchema } from "drizzle-zod";
import { z } from "zod/v4";
export const categoriesTable = pgTable("categories", {
id: serial("id").primaryKey(),
name: text("name").notNull(),
name_en: text("name_en"),
slug: text("slug"),
icon: text("icon"),
image_url: text("image_url"),
sort_order: integer("sort_order").notNull().default(0),
parent_id: integer("parent_id"),
source: text("source").default("extra"),
shein_cat_id: text("shein_cat_id"),
shein_url: text("shein_url"),
created_at: timestamp("created_at").defaultNow(),
});
export const categoriesTable = pgTable(
"categories",
{
id: serial("id").primaryKey(),
name: text("name").notNull(),
name_en: text("name_en"),
slug: text("slug"),
icon: text("icon"),
image_url: text("image_url"),
sort_order: integer("sort_order").notNull().default(0),
parent_id: integer("parent_id"),
source: text("source").default("extra"),
shein_cat_id: text("shein_cat_id"),
shein_url: text("shein_url"),
created_at: timestamp("created_at").defaultNow(),
},
(table) => ({
slugIdx: index("categories_slug_idx").on(table.slug),
sourceIdx: index("categories_source_idx").on(table.source),
parentIdx: index("categories_parent_idx").on(table.parent_id),
}),
);
export const insertCategorySchema = createInsertSchema(categoriesTable).omit({ id: true, created_at: true });
export type InsertCategory = z.infer<typeof insertCategorySchema>;

View File

@ -10,3 +10,4 @@ export * from "./admin";
export * from "./support";
export * from "./offers";
export * from "./users";
export * from "./integration-events";

View File

@ -0,0 +1,29 @@
import { pgTable, serial, text, jsonb, timestamp, integer, index } from "drizzle-orm/pg-core";
export const integrationEventsTable = pgTable(
"integration_events",
{
id: serial("id").primaryKey(),
source: text("source").notNull(),
event_type: text("event_type").notNull(),
status: text("status").notNull().default("received"),
external_id: text("external_id"),
dedupe_key: text("dedupe_key"),
items_total: integer("items_total").notNull().default(0),
items_succeeded: integer("items_succeeded").notNull().default(0),
items_failed: integer("items_failed").notNull().default(0),
error: text("error"),
payload: jsonb("payload").$type<Record<string, unknown>>().default({}),
created_at: timestamp("created_at").defaultNow(),
processed_at: timestamp("processed_at"),
},
(table) => ({
sourceIdx: index("integration_events_source_idx").on(table.source),
statusIdx: index("integration_events_status_idx").on(table.status),
createdAtIdx: index("integration_events_created_at_idx").on(table.created_at),
dedupeKeyIdx: index("integration_events_dedupe_key_idx").on(table.dedupe_key),
}),
);
export type IntegrationEvent = typeof integrationEventsTable.$inferSelect;
export type InsertIntegrationEvent = typeof integrationEventsTable.$inferInsert;

View File

@ -1,4 +1,4 @@
import { pgTable, serial, text, integer, numeric, boolean, jsonb, timestamp } from "drizzle-orm/pg-core";
import { pgTable, serial, text, integer, numeric, boolean, jsonb, timestamp, index, uniqueIndex } from "drizzle-orm/pg-core";
import { createInsertSchema } from "drizzle-zod";
import { z } from "zod/v4";
import { categoriesTable } from "./categories";
@ -10,35 +10,55 @@ export type ProductVariant = {
sku?: string;
};
export const productsTable = pgTable("products", {
id: serial("id").primaryKey(),
name: text("name").notNull(),
name_en: text("name_en"),
short_description: text("short_description"),
description: text("description"),
brand: text("brand"),
subcategory: text("subcategory"),
sku: text("sku"),
category_id: integer("category_id").notNull().references(() => categoriesTable.id),
price: numeric("price", { precision: 10, scale: 2 }).notNull(),
original_price: numeric("original_price", { precision: 10, scale: 2 }),
images: jsonb("images").$type<string[]>().default([]),
sizes: jsonb("sizes").$type<string[]>().default([]),
colors: jsonb("colors").$type<string[]>().default([]),
specs: jsonb("specs").$type<Record<string, string>>().default({}),
marketing_points: jsonb("marketing_points").$type<string[]>().default([]),
variants: jsonb("variants").$type<ProductVariant[]>().default([]),
tags: jsonb("tags").$type<string[]>().default([]),
stock: integer("stock").notNull().default(0),
rating: numeric("rating", { precision: 3, scale: 2 }).default("0"),
review_count: integer("review_count").default(0),
is_trending: boolean("is_trending").default(false),
is_bestseller: boolean("is_bestseller").default(false),
is_new: boolean("is_new").default(true),
is_top_rated: boolean("is_top_rated").default(false),
created_at: timestamp("created_at").defaultNow(),
updated_at: timestamp("updated_at").defaultNow(),
});
export const productsTable = pgTable(
"products",
{
id: serial("id").primaryKey(),
source: text("source").notNull().default("extra"),
external_id: text("external_id"),
source_url: text("source_url"),
currency: text("currency").notNull().default("SAR"),
availability: text("availability").notNull().default("unknown"),
name: text("name").notNull(),
name_en: text("name_en"),
short_description: text("short_description"),
description: text("description"),
brand: text("brand"),
subcategory: text("subcategory"),
sku: text("sku"),
category_id: integer("category_id").notNull().references(() => categoriesTable.id),
price: numeric("price", { precision: 10, scale: 2 }).notNull(),
original_price: numeric("original_price", { precision: 10, scale: 2 }),
images: jsonb("images").$type<string[]>().default([]),
sizes: jsonb("sizes").$type<string[]>().default([]),
colors: jsonb("colors").$type<string[]>().default([]),
specs: jsonb("specs").$type<Record<string, string>>().default({}),
marketing_points: jsonb("marketing_points").$type<string[]>().default([]),
variants: jsonb("variants").$type<ProductVariant[]>().default([]),
tags: jsonb("tags").$type<string[]>().default([]),
metadata: jsonb("metadata").$type<Record<string, unknown>>().default({}),
stock: integer("stock").notNull().default(0),
rating: numeric("rating", { precision: 3, scale: 2 }).default("0"),
review_count: integer("review_count").default(0),
is_trending: boolean("is_trending").default(false),
is_bestseller: boolean("is_bestseller").default(false),
is_new: boolean("is_new").default(true),
is_top_rated: boolean("is_top_rated").default(false),
last_synced_at: timestamp("last_synced_at").defaultNow(),
created_at: timestamp("created_at").defaultNow(),
updated_at: timestamp("updated_at").defaultNow(),
},
(table) => ({
sourceExternalIdUnique: uniqueIndex("products_source_external_id_uidx").on(
table.source,
table.external_id,
),
sourceSkuIdx: index("products_source_sku_idx").on(table.source, table.sku),
categoryIdx: index("products_category_idx").on(table.category_id),
brandIdx: index("products_brand_idx").on(table.brand),
updatedAtIdx: index("products_updated_at_idx").on(table.updated_at),
}),
);
export const insertProductSchema = createInsertSchema(productsTable).omit({ id: true, created_at: true, updated_at: true });
export type InsertProduct = z.infer<typeof insertProductSchema>;

View File

@ -6,7 +6,11 @@
"preinstall": "sh -c 'rm -f package-lock.json yarn.lock; case \"$npm_config_user_agent\" in pnpm/*) ;; *) echo \"Use pnpm instead\" >&2; exit 1 ;; esac'",
"build": "pnpm run typecheck && pnpm -r --if-present run build",
"typecheck:libs": "tsc --build",
"typecheck": "pnpm run typecheck:libs && pnpm -r --filter \"./artifacts/**\" --filter \"./scripts\" --if-present run typecheck"
"typecheck": "pnpm run typecheck:libs && pnpm -r --filter \"./artifacts/**\" --filter \"./scripts\" --if-present run typecheck",
"api:build": "pnpm --filter @workspace/api-server run build",
"api:typecheck": "pnpm --filter @workspace/api-server run typecheck",
"db:push": "pnpm --filter @workspace/db run push",
"deploy:flatlogic": "bash ./scripts/flatlogic-deploy.sh"
},
"private": true,
"devDependencies": {

58
scripts/flatlogic-deploy.sh Executable file
View File

@ -0,0 +1,58 @@
#!/usr/bin/env bash
set -euo pipefail
PROJECT_DIR="${PROJECT_DIR:-$(pwd)}"
DEPLOY_BRANCH="${DEPLOY_BRANCH:-main}"
STORE_PORT="${STORE_PORT:-3001}"
API_PORT="${API_PORT:-8080}"
cd "$PROJECT_DIR"
echo "[deploy] Updating repository to origin/${DEPLOY_BRANCH}"
git fetch origin "$DEPLOY_BRANCH"
git checkout "$DEPLOY_BRANCH"
git reset --hard "origin/$DEPLOY_BRANCH"
echo "[deploy] Ensuring pnpm is available"
corepack enable
corepack prepare pnpm@10.16.1 --activate
echo "[deploy] Installing dependencies"
pnpm install --frozen-lockfile
echo "[deploy] Running checks"
pnpm typecheck
pnpm build
if [[ -n "${DATABASE_URL:-}" ]]; then
echo "[deploy] Applying Drizzle schema to external PostgreSQL/Supabase"
pnpm --filter @workspace/db run push
else
echo "[deploy] DATABASE_URL is missing; skipping DB schema push"
fi
echo "[deploy] Restarting storefront"
if pm2 describe extra-store >/dev/null 2>&1; then
PORT="$STORE_PORT" pm2 restart extra-store --update-env
else
PORT="$STORE_PORT" pm2 start pnpm --name extra-store --interpreter bash -- -lc "pnpm --filter @workspace/extra-store run dev"
fi
if [[ -n "${DATABASE_URL:-}" && -n "${ADMIN_TOKEN:-}" && -n "${API_INGEST_KEY:-}" ]]; then
echo "[deploy] Restarting API backend"
if pm2 describe flatlogic-api >/dev/null 2>&1; then
PORT="$API_PORT" NODE_ENV=production pm2 restart flatlogic-api --update-env
else
PORT="$API_PORT" NODE_ENV=production pm2 start pnpm --name flatlogic-api --interpreter bash -- -lc "pnpm --filter @workspace/api-server run start"
fi
else
echo "[deploy] API secrets are incomplete; skipping API process start"
fi
echo "[deploy] Health checks"
curl -fsS "http://127.0.0.1:${STORE_PORT}/" >/dev/null
if pm2 describe flatlogic-api >/dev/null 2>&1; then
curl -fsS "http://127.0.0.1:${API_PORT}/api/healthz" >/dev/null
fi
echo "[deploy] Done"

12
scripts/post-merge.sh Normal file → Executable file
View File

@ -1,4 +1,12 @@
#!/bin/bash
set -e
set -euo pipefail
corepack enable
corepack prepare pnpm@10.16.1 --activate
pnpm install --frozen-lockfile
pnpm --filter db push
if [[ -n "${DATABASE_URL:-}" ]]; then
pnpm --filter @workspace/db run push
else
echo "DATABASE_URL is not set; skipping database push"
fi