Compare commits

..

No commits in common. "ai-dev" and "master" have entirely different histories.

723 changed files with 122487 additions and 86506 deletions

5
.gitignore vendored
View File

@ -3,7 +3,4 @@
node_modules/ node_modules/
*/node_modules/ */node_modules/
**/node_modules/ **/node_modules/
*/build/ */build/
package-lock.json
CLAUDE.md
.claude/

View File

@ -9,7 +9,6 @@ RUN yarn build
FROM node:20.15.1-alpine FROM node:20.15.1-alpine
# FFmpeg is bundled via npm package ffmpeg-static
WORKDIR /app WORKDIR /app
COPY backend/package.json backend/yarn.lock ./ COPY backend/package.json backend/yarn.lock ./
RUN yarn install --pure-lockfile RUN yarn install --pure-lockfile

View File

@ -11,11 +11,16 @@ WORKDIR /app/backend
COPY backend/package.json backend/yarn.lock ./ COPY backend/package.json backend/yarn.lock ./
RUN yarn install --pure-lockfile RUN yarn install --pure-lockfile
FROM node:20.15.1-alpine AS app-shell-deps
RUN apk add --no-cache git
WORKDIR /app/app-shell
COPY app-shell/package.json app-shell/yarn.lock ./
RUN yarn install --pure-lockfile
# Nginx setup and application build # Nginx setup and application build
FROM node:20.15.1-alpine AS build FROM node:20.15.1-alpine AS build
RUN apk add --no-cache git nginx curl RUN apk add --no-cache git nginx curl
RUN apk add --no-cache lsof procps RUN apk add --no-cache lsof procps
# FFmpeg is bundled via npm package ffmpeg-static
RUN yarn global add concurrently RUN yarn global add concurrently
RUN apk add --no-cache \ RUN apk add --no-cache \
@ -38,9 +43,11 @@ ENV PATH /root/.yarn/bin:/root/.config/yarn/global/node_modules/.bin:$PATH
WORKDIR /app WORKDIR /app
COPY --from=frontend-deps /app/frontend /app/frontend COPY --from=frontend-deps /app/frontend /app/frontend
COPY --from=backend-deps /app/backend /app/backend COPY --from=backend-deps /app/backend /app/backend
COPY --from=app-shell-deps /app/app-shell /app/app-shell
COPY frontend /app/frontend COPY frontend /app/frontend
COPY backend /app/backend COPY backend /app/backend
COPY app-shell /app/app-shell
COPY docker /app/docker COPY docker /app/docker
# Copy all files from root to /app # Copy all files from root to /app
@ -61,6 +68,8 @@ EXPOSE 8080
ENV NODE_ENV=dev_stage ENV NODE_ENV=dev_stage
ENV FRONT_PORT=3001 ENV FRONT_PORT=3001
ENV BACKEND_PORT=3000 ENV BACKEND_PORT=3000
ENV APP_SHELL_PORT=4000
CMD ["sh", "-c", "\ CMD ["sh", "-c", "\
yarn --cwd /app/frontend dev & echo $! > /app/pids/frontend.pid && \ yarn --cwd /app/frontend dev & echo $! > /app/pids/frontend.pid && \
@ -71,5 +80,6 @@ CMD ["sh", "-c", "\
while ! nc -z localhost ${BACKEND_PORT}; do \ while ! nc -z localhost ${BACKEND_PORT}; do \
sleep 2; \ sleep 2; \
done && \ done && \
echo 'Backend and frontend are up.' && \ echo 'Backend is up. Starting app_shell for Git check...' && \
wait $NGINX_PID"] yarn --cwd /app/app-shell start && \
wait $NGINX_PID"]

393
README.md
View File

@ -1,281 +1,244 @@
# Tour Builder Platform # Tour Builder Platform
A web application for building and managing interactive virtual tours with drag-and-drop editing, video transitions, and PWA offline support.
## Features ## This project was generated by [Flatlogic Platform](https://flatlogic.com).
- **Visual Tour Builder** - Drag-and-drop editor for creating interactive tour pages
- **Video Transitions** - Smooth video-based transitions between pages with forward/reverse playback - Frontend: [React.js](https://flatlogic.com/templates?framework%5B%5D=react&sort=default)
- **Multiple Element Types** - Navigation buttons, hotspots, galleries, tooltips, video/audio players
- **Three-Tier Publishing** - Dev → Stage → Production workflow with environment isolation
- **Asset Preloading** - Direct S3 download via presigned URLs for instant page navigation
- **PWA Offline Mode** - Tours work offline with Cache API and IndexedDB storage
- **Role-Based Access Control** - Granular permissions system
- **Team Collaboration** - Project memberships with role-based access
- **Asset Management** - Upload, optimize, and manage media assets with variants
- **Multi-Language Support** - i18n ready
## Tech Stack
| Layer | Technology |
|-------|------------| - Backend: [NodeJS](https://flatlogic.com/templates?backend%5B%5D=nodejs&sort=default)
| Frontend | Next.js 15, React 19, TypeScript, Redux Toolkit, Tailwind CSS |
| Backend | Node.js, Express, Sequelize ORM | <details><summary>Backend Folder Structure</summary>
| Database | PostgreSQL |
| Authentication | JWT, Google OAuth, Microsoft OAuth | The generated application has the following backend folder structure:
| File Storage | AWS S3 / Google Cloud Storage (direct presigned URL access) |
| PWA | Serwist Service Worker, Cache API, IndexedDB (Dexie) |
## Quick Start `src` folder which contains your working files that will be used later to create the build. The src folder contains folders as:
### Prerequisites - `auth` - config the library for authentication and authorization;
- Node.js 18+ - `db` - contains such folders as:
- PostgreSQL 14+
- Yarn (backend) / npm (frontend)
### Database Setup (First Time) - `api` - documentation that is automatically generated by jsdoc or other tools;
```bash - `migrations` - is a skeleton of the database or all the actions that users do with the database;
# Create database user and database
PGPASSWORD='postgres' psql -U postgres -c "CREATE USER app_39215 WITH PASSWORD 'your-password';"
PGPASSWORD='postgres' psql -U postgres -c "CREATE DATABASE app_39215 OWNER app_39215;"
```
### Start Backend (Terminal 1) - `models`- what will represent the database for the backend;
```bash - `seeders` - the entity that creates the data for the database.
cd backend
yarn install
export $(cat .env | xargs) && NODE_ENV=production yarn start
```
Backend runs on **http://localhost:8080** - `routes` - this folder would contain all the routes that you have created using Express Router and what they do would be exported from a Controller file;
### Start Frontend (Terminal 2) - `services` - contains such folders as `emails` and `notifications`.
</details>
```bash
cd frontend
npm install
npm run dev
```
Frontend runs on **http://localhost:3000**
- Database: PostgreSQL
### Default Login - app-shel: Core application framework that provides essential infrastructure services
for the entire application.
-----------------------
### We offer 2 ways how to start the project locally: by running Frontend and Backend or with Docker.
-----------------------
After seeding, login with credentials configured in `backend/.env`: ## To start the project:
- Email: `ADMIN_EMAIL` (default: admin@flatlogic.com)
- Password: `ADMIN_PASS` (default: 88dbeaf8)
## Project Structure ### Backend:
``` > Please change current folder: `cd backend`
├── backend/ # Node.js/Express API server
│ ├── src/
│ │ ├── routes/ # REST API endpoints
│ │ ├── services/ # Business logic
│ │ ├── db/
│ │ │ ├── models/ # Sequelize models
│ │ │ ├── api/ # Database access layer
│ │ │ ├── migrations/ # Schema migrations
│ │ │ └── seeders/ # Seed data
│ │ ├── auth/ # Passport.js authentication
│ │ └── middlewares/ # Express middlewares
│ └── README.md # Backend documentation
├── frontend/ # Next.js React application
│ ├── src/
│ │ ├── pages/ # Next.js pages
│ │ ├── components/ # React components
│ │ ├── stores/ # Redux Toolkit slices
│ │ ├── hooks/ # Custom React hooks
│ │ ├── types/ # TypeScript definitions
│ │ └── lib/ # Utility libraries
│ └── README.md # Frontend documentation
└── docker/ # Docker Compose setup
├── docker-compose.yml
├── start-backend.sh
├── wait-for-it.sh
└── README.md # Docker documentation
```
## Key Workflows
### Tour Creation
1. Create a new project in the dashboard #### Install local dependencies:
2. Open the **Constructor** (`/constructor?projectId=...`) `yarn install`
3. Add pages with background images/videos
4. Place interactive elements (buttons, hotspots, etc.)
5. Configure navigation targets and transitions on elements
6. Preview in **Runtime** mode
7. Publish: Dev → Stage → Production
### Publishing Flow ------------
Three-tier environment model with separate content per environment: #### Adjust local db:
##### 1. Install postgres:
``` MacOS:
Dev Environment Stage Environment Production Environment
│ │ │
/constructor?projectId= /p/[projectSlug]/stage /p/[projectSlug]
(editing mode) (preview) (public access)
│ │ │
└── Save to Stage ──────►└── Publish ─────────────►│
```
| Action | Endpoint | Description | `brew install postgres`
|--------|----------|-------------|
| Save to Stage | `POST /api/publish/save-to-stage` | Copy dev pages to stage |
| Publish | `POST /api/publish` | Copy stage pages to production |
Pages have an `environment` field (`dev`, `stage`, `production`) that determines visibility. > if you dont have brew please install it (https://brew.sh) and repeat step `brew install postgres`.
### Element Types Ubuntu:
| Type | Description | `sudo apt update`
|------|-------------|
| `navigation_next` | Forward navigation button |
| `navigation_prev` | Back navigation button |
| `spot` | Clickable hotspot area |
| `description` | Text description overlay |
| `tooltip` | Hover tooltip |
| `gallery` | Image gallery |
| `carousel` | Image carousel |
| `logo` | Logo element |
| `video_player` | Embedded video player |
| `audio_player` | Audio player |
| `popup` | Modal popup |
**Element Defaults:** Each element type has configurable default settings that follow a three-tier hierarchy: `sudo apt install postgresql postgresql-contrib`
- **Global** (`element_type_defaults`) - Platform-wide defaults (auto-seeded)
- **Project** (`project_element_defaults`) - Per-project overrides (auto-snapshotted on project creation)
- **Instance** (`tour_pages.ui_schema_json`) - Page-specific element values
## API Overview ##### 2. Create db and admin user:
Before run and test connection, make sure you have created a database as described in the above configuration. You can use the `psql` command to create a user and database.
Base URL: `http://localhost:8080/api` `psql postgres --u postgres`
| Endpoint | Description | Next, type this command for creating a new user with password then give access for creating the database.
|----------|-------------|
| `POST /auth/signin/local` | Login |
| `POST /auth/signup` | Register |
| `GET /auth/me` | Current user |
| `GET /projects` | List projects |
| `POST /publish/save-to-stage` | Copy dev → stage |
| `POST /publish` | Copy stage → production |
| `GET /tour_pages` | List tour pages |
| `GET /assets` | List assets |
| `POST /file/presign` | Get S3 presigned URLs for asset download (public) |
Full API documentation: `http://localhost:8080/api-docs` (Swagger) `postgres-# CREATE ROLE admin WITH LOGIN PASSWORD 'admin_pass';`
## Docker Setup `postgres-# ALTER ROLE admin CREATEDB;`
```bash Quit `psql` then log in again using the new user that previously created.
cd docker
chmod +x start-backend.sh wait-for-it.sh
# Start with fresh database `postgres-# \q`
rm -rf data && docker-compose up
# Or keep existing data `psql postgres -U admin`
docker-compose up
```
Access at `http://localhost:3000` Type this command to creating a new database.
## Environment Variables `postgres=> CREATE DATABASE db_{your_project_name};`
### Backend (`backend/.env`) Then give that new user privileges to the new database then quit the `psql`.
```env `postgres=> GRANT ALL PRIVILEGES ON DATABASE db_{your_project_name} TO admin;`
# Database
DB_HOST=localhost
DB_PORT=5432
DB_NAME=app_39215
DB_USER=app_39215
DB_PASSWORD=your-password
# JWT `postgres=> \q`
SECRET_KEY=your-secret-key
# Admin (for seeding)
ADMIN_EMAIL=admin@example.com
ADMIN_PASS=admin-password
# AWS S3 (optional)
AWS_S3_BUCKET=your-bucket
AWS_S3_REGION=us-east-1
AWS_ACCESS_KEY_ID=your-key
AWS_SECRET_ACCESS_KEY=your-secret
# OAuth (optional)
GOOGLE_CLIENT_ID=...
GOOGLE_CLIENT_SECRET=...
MS_CLIENT_ID=...
MS_CLIENT_SECRET=...
# Email - AWS SES (optional)
EMAIL_USER=...
EMAIL_PASS=...
```
### Frontend (`frontend/.env.local`) ------------
```env
NEXT_PUBLIC_BACK_API=http://localhost:8080/api
```
## Common Commands #### Create database:
`yarn db:create`
### Backend #### Start production build:
`yarn start`
```bash
cd backend
yarn start # Start server (migrate + seed + watch)
yarn db:migrate # Run migrations
yarn db:seed # Seed data
yarn db:reset # Drop + create + migrate + seed
yarn lint # ESLint
```
### Frontend
```bash
cd frontend
npm run dev # Development server
npm run build # Production build
npm run lint # ESLint
npm run format # Prettier
```
## Troubleshooting
### Connection Refused ### Frontend:
1. Ensure PostgreSQL is running > Please change current folder: `cd frontend`
2. Check that port 5432 (db), 8080 (backend), 3000 (frontend) are available
3. Verify database credentials in `.env`
### Database Issues
```bash
# Reset database completely
cd backend
yarn db:reset
```
### Permission Denied
Ensure the database user has proper privileges: ## To start the project with Docker:
### Description:
```sql The project contains the **docker folder** and the `Dockerfile`.
GRANT ALL PRIVILEGES ON DATABASE app_39215 TO app_39215;
```
## License The `Dockerfile` is used to Deploy the project to Google Cloud.
Proprietary - Tour Builder Platform The **docker folder** contains a couple of helper scripts:
- `docker-compose.yml` (all our services: web, backend, db are described here)
- `start-backend.sh` (starts backend, but only after the database)
- `wait-for-it.sh` (imported from https://github.com/vishnubob/wait-for-it)
> To avoid breaking the application, we recommend you don't edit the following files: everything that includes the **docker folder** and `Dokerfile`.
## Run services:
1. Install docker compose (https://docs.docker.com/compose/install/)
2. Move to `docker` folder. All next steps should be done from this folder.
``` cd docker ```
3. Make executables from `wait-for-it.sh` and `start-backend.sh`:
``` chmod +x start-backend.sh && chmod +x wait-for-it.sh ```
4. Download dependend projects for services.
5. Review the docker-compose.yml file. Make sure that all services have Dockerfiles. Only db service doesn't require a Dockerfile.
6. Make sure you have needed ports (see them in `ports`) available on your local machine.
7. Start services:
7.1. With an empty database `rm -rf data && docker-compose up`
7.2. With a stored (from previus runs) database data `docker-compose up`
8. Check http://localhost:3000
9. Stop services:
9.1. Just press `Ctr+C`
## Most common errors:
1. `connection refused`
There could be many reasons, but the most common are:
- The port is not open on the destination machine.
- The port is open on the destination machine, but its backlog of pending connections is full.
- A firewall between the client and server is blocking access (also check local firewalls).
After checking for firewalls and that the port is open, use telnet to connect to the IP/port to test connectivity. This removes any potential issues from your application.
***MacOS:***
If you suspect that your SSH service might be down, you can run this command to find out:
`sudo service ssh status`
If the command line returns a status of down, then youve likely found the reason behind your connectivity error.
***Ubuntu:***
Sometimes a connection refused error can also indicate that there is an IP address conflict on your network. You can search for possible IP conflicts by running:
`arp-scan -I eth0 -l | grep <ipaddress>`
`arp-scan -I eth0 -l | grep <ipaddress>`
and
`arping <ipaddress>`
2. `yarn db:create` creates database with the assembled tables (on MacOS with Postgres database)
The workaround - put the next commands to your Postgres database terminal:
`DROP SCHEMA public CASCADE;`
`CREATE SCHEMA public;`
`GRANT ALL ON SCHEMA public TO postgres;`
`GRANT ALL ON SCHEMA public TO public;`
Afterwards, continue to start your project in the backend directory by running:
`yarn start`

Binary file not shown.

After

Width:  |  Height:  |  Size: 73 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 93 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 91 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

View File

@ -1,6 +1,6 @@
DB_NAME=app_39948 DB_NAME=app_39215
DB_USER=app_39948 DB_USER=app_39215
DB_PASS=d82cf4a2-477c-4a75-acec-ec606e0b8a01 DB_PASS=88dbeaf8-e906-405e-9e41-c3baadeda5c6
DB_HOST=127.0.0.1 DB_HOST=127.0.0.1
DB_PORT=5432 DB_PORT=5432
PORT=3000 PORT=3000

View File

@ -10,7 +10,6 @@ module.exports = {
'import' 'import'
], ],
rules: { rules: {
'import/no-unresolved': 'error', 'import/no-unresolved': 'error'
'no-unused-vars': ['error', { argsIgnorePattern: '^_' }]
} }
}; };

View File

@ -1,8 +1,6 @@
FROM node:20.15.1-alpine FROM node:20.15.1-alpine
# Install bash and FFmpeg for video processing (reversed video generation) RUN apk update && apk add bash
RUN apk update && apk add --no-cache bash ffmpeg
# Create app directory # Create app directory
WORKDIR /usr/src/app WORKDIR /usr/src/app

View File

@ -1,367 +1,56 @@
# Tour Builder Platform - Backend
Node.js/Express REST API server with Sequelize ORM for the Tour Builder Platform. #Tour Builder Platform - template backend,
## Tech Stack #### Run App on local machine:
- **Runtime**: Node.js 18+ ##### Install local dependencies:
- **Framework**: Express 4.x - `yarn install`
- **Database**: PostgreSQL with Sequelize ORM
- **Authentication**: Passport.js (JWT, Google OAuth, Microsoft OAuth)
- **File Storage**: AWS S3 / Google Cloud Storage / Local filesystem
- **Email**: Nodemailer with AWS SES
- **API Docs**: Swagger/OpenAPI
## Prerequisites ------------
- Node.js 18+ ##### Adjust local db:
- PostgreSQL 14+ ###### 1. Install postgres:
- Yarn package manager - MacOS:
- `brew install postgres`
## Quick Start - Ubuntu:
- `sudo apt update`
- `sudo apt install postgresql postgresql-contrib`
```bash ###### 2. Create db and admin user:
# Install dependencies - Before run and test connection, make sure you have created a database as described in the above configuration. You can use the `psql` command to create a user and database.
yarn install - `psql postgres --u postgres`
# Create database (first time only) - Next, type this command for creating a new user with password then give access for creating the database.
yarn db:create - `postgres-# CREATE ROLE admin WITH LOGIN PASSWORD 'admin_pass';`
- `postgres-# ALTER ROLE admin CREATEDB;`
# Start server (runs migrations, seeds, and watches for changes) - Quit `psql` then log in again using the new user that previously created.
export $(cat .env | xargs) && NODE_ENV=production yarn start - `postgres-# \q`
``` - `psql postgres -U admin`
The server runs on **port 8080** by default. - Type this command to creating a new database.
- `postgres=> CREATE DATABASE db_tour_builder_platform;`
## Environment Variables - Then give that new user privileges to the new database then quit the `psql`.
- `postgres=> GRANT ALL PRIVILEGES ON DATABASE db_tour_builder_platform TO admin;`
- `postgres=> \q`
------------
Create a `.env` file in the backend directory: #### Api Documentation (Swagger)
```env http://localhost:8080/api-docs (local host)
# Database (required)
DB_HOST=localhost
DB_PORT=5432
DB_NAME=app_39215
DB_USER=app_39215
DB_PASSWORD=your_password
# JWT Secret (required) http://host_name/api-docs
SECRET_KEY=your-secret-key
# Admin credentials (for seeding) ------------
ADMIN_EMAIL=admin@example.com
ADMIN_PASS=admin_password
USER_PASS=user_password
# AWS S3 (optional - for file storage) ##### Setup database tables or update after schema change
AWS_S3_BUCKET=your-bucket - `yarn db:migrate`
AWS_S3_REGION=us-east-1
AWS_ACCESS_KEY_ID=your-access-key
AWS_SECRET_ACCESS_KEY=your-secret-key
AWS_S3_PREFIX=your-prefix
# Google OAuth (optional) ##### Seed the initial data (admin accounts, relevant for the first setup):
GOOGLE_CLIENT_ID=your-client-id - `yarn db:seed`
GOOGLE_CLIENT_SECRET=your-client-secret
##### Start build:
# Microsoft OAuth (optional) - `yarn start`
MS_CLIENT_ID=your-client-id
MS_CLIENT_SECRET=your-client-secret
# Email - AWS SES (optional)
EMAIL_USER=ses-smtp-user
EMAIL_PASS=ses-smtp-password
# OpenAI (optional)
GPT_KEY=your-openai-key
```
## Project Structure
```
backend/src/
├── index.js # Express app entry point
├── config.js # Environment configuration
├── helpers.js # Utility functions (wrapAsync)
├── auth/ # Passport.js authentication strategies
│ └── auth.js # JWT, Google, Microsoft strategies
├── db/
│ ├── db.config.js # Database connection config (per environment)
│ ├── models/ # Sequelize model definitions (16 models)
│ ├── api/ # Database access layer (CRUD per model)
│ ├── migrations/ # Database migrations
│ └── seeders/ # Seed data (admin users, permissions, roles)
├── routes/ # Express route handlers (22 routes)
│ ├── auth.js # Authentication endpoints
│ ├── projects.js # Project CRUD
│ ├── tour_pages.js # Tour page management
│ ├── assets.js # Asset management
│ ├── file.js # File upload/download, presigned URLs
│ ├── publish.js # Publishing workflow
│ ├── search.js # Global search
│ └── ... # Other entity routes
├── services/ # Business logic layer (21 services)
│ ├── auth.js # Auth service (JWT, OAuth)
│ ├── publish.js # Publishing workflow logic
│ ├── file.js # File storage abstraction
│ ├── search.js # Global search service
│ ├── email/ # Email templates and sending
│ ├── notifications/ # Error classes and i18n messages
│ └── ... # Other entity services
├── middlewares/
│ ├── check-permissions.js # RBAC permission checking
│ ├── runtime-context.js # Environment detection from headers
│ ├── runtime-public.js # Public runtime access (no auth)
│ ├── upload.js # File upload handling (multer)
│ └── rateLimiter.js # Rate limiting for API endpoints
├── factories/
│ ├── router.factory.js # Generate CRUD routes
│ └── service.factory.js # Generate service classes
└── utils/
├── env-validation.js # Environment variable validation (Joi)
├── errors.js # Custom error classes
├── logger.js # Pino logger configuration
└── index.js # Utils barrel export
```
## Database Setup
### Create Database User and Database
```bash
# Connect to PostgreSQL
psql postgres -U postgres
# Create user
CREATE ROLE app_39215 WITH LOGIN PASSWORD 'your-password';
ALTER ROLE app_39215 CREATEDB;
# Create database
CREATE DATABASE app_39215 OWNER app_39215;
GRANT ALL PRIVILEGES ON DATABASE app_39215 TO app_39215;
\q
```
### Available Commands
```bash
yarn db:create # Create database
yarn db:drop # Drop database
yarn db:migrate # Run pending migrations
yarn db:migrate:undo # Undo last migration
yarn db:migrate:undo:all # Undo all migrations
yarn db:migrate:status # Show migration status
yarn db:seed # Run all seeders
yarn db:seed:undo # Undo all seeders
yarn db:reset # Drop, create, migrate, and seed
yarn start # Migrate, seed, and start with watch
yarn lint # Run ESLint
```
## API Documentation
Swagger UI available at: `http://localhost:8080/api-docs`
### Core Endpoints
| Endpoint | Description |
|----------|-------------|
| `POST /api/auth/signin/local` | Email/password login |
| `POST /api/auth/signup` | User registration |
| `GET /api/auth/me` | Current user info (JWT required) |
| `GET /api/auth/signin/google` | Google OAuth login |
| `GET /api/auth/signin/microsoft` | Microsoft OAuth login |
### Entity CRUD Pattern
All entities follow standard REST patterns:
```
GET /api/{entity} # List with pagination & filters
GET /api/{entity}/:id # Get single record
POST /api/{entity} # Create record
PUT /api/{entity}/:id # Update record
DELETE /api/{entity}/:id # Soft delete record
```
### Main Entities
| Entity | Description |
|--------|-------------|
| `projects` | Virtual tour projects |
| `tour_pages` | Pages within a tour (elements, navigation, transitions stored in ui_schema_json) |
| `assets` | Uploaded media files |
| `asset_variants` | Resized/optimized asset versions |
| `element_type_defaults` | Global element default settings |
| `project_element_defaults` | Project-specific element settings |
| `project_audio_tracks` | Background audio for projects |
| `publish_events` | Publishing history and status tracking |
| `pwa_caches` | PWA cache manifests for offline support |
| `presigned_url_requests` | S3 presigned URL request tracking |
| `access_logs` | User access audit trail |
| `users` | User accounts |
| `roles` | User roles |
| `permissions` | Granular permissions |
| `project_memberships` | Team access per project |
### Element Defaults Hierarchy
UI elements use a three-tier defaults system:
```
element_type_defaults (Global)
│ auto-snapshot on project creation
project_element_defaults (Project)
│ applied when creating elements
tour_pages.ui_schema_json (Instance)
```
1. **Global** (`element_type_defaults`) - Platform-wide defaults for 11 element types (navigation, tooltip, gallery, etc.). Auto-seeded on first API access.
2. **Project** (`project_element_defaults`) - Per-project overrides. Automatically snapshotted from global when a project is created. Can be customized independently.
3. **Instance** (`tour_pages.ui_schema_json`) - Page-specific elements with their settings stored inline. Created in constructor with project defaults applied.
**Additional Endpoints:**
- `POST /api/project-element-defaults/:id/reset` - Reset to current global default
- `GET /api/project-element-defaults/:id/diff` - Compare with global default
### Publishing Workflow
Three-tier environment model for content: `dev``stage``production`
```
POST /api/publish/save-to-stage # Copy dev content to stage (body: { projectId })
POST /api/publish # Copy stage content to production (body: { projectId })
```
Pages have an `environment` field (`dev`, `stage`, or `production`) that determines visibility:
- **Constructor** (`/constructor?projectId=`) - Always shows `dev` environment
- **Stage preview** (`/p/[slug]/stage`) - Shows `stage` environment
- **Public runtime** (`/p/[slug]`) - Shows `production` environment
## Authentication
### JWT Authentication
Protected routes require JWT token in Authorization header:
```
Authorization: Bearer <jwt-token>
```
### OAuth Providers
- **Google**: `/api/auth/signin/google`
- **Microsoft**: `/api/auth/signin/microsoft`
## File Storage
Storage provider is auto-detected based on available credentials:
1. **AWS S3** - If `AWS_S3_BUCKET` is configured
2. **Google Cloud Storage** - If GCS credentials are available
3. **Local filesystem** - Fallback (files stored in system temp directory)
### Upload Flow (Presigned URLs)
```
POST /api/file/presigned-url # Get upload URL (authenticated)
PUT {presigned-url} # Upload directly to S3
POST /api/assets # Register asset in database
```
### Download Flow (Direct S3 Access)
For runtime asset preloading, the frontend can request presigned download URLs:
```
POST /api/file/presign # Get download URLs (public endpoint)
Request: { urls: ["assets/img1.jpg", "assets/video.mp4", ...] }
Response: { presignedUrls: { "assets/img1.jpg": "https://s3...", ... } }
```
- **Max URLs per request**: 50
- **URL expiry**: 1 hour
- **Public endpoint**: No authentication required (for runtime playback)
This allows the frontend to download assets directly from S3, bypassing the backend for better performance.
## RBAC (Role-Based Access Control)
### Permission Format
```
{ACTION}_{ENTITY}
```
Actions: `CREATE`, `READ`, `UPDATE`, `DELETE`
Example: `CREATE_PROJECTS`, `READ_TOUR_PAGES`, `UPDATE_ASSETS`
### Default Roles
| Role | Description |
|------|-------------|
| Administrator | Full access to all features (user/role/permission management) |
| Platform Owner | Full project access, user management |
| Account Manager | Project and asset management |
| Tour Designer | Create and edit tours, assets, pages |
| Content Reviewer | Review and update content (read/update access) |
| Analytics Viewer | Read-only access for viewing data |
| Public | Minimal access for public users |
## Environment Detection
### Server Environment (NODE_ENV)
The backend uses `NODE_ENV` to determine database configuration:
| Value | Database | Description |
|-------|----------|-------------|
| `production` | Production config | Live environment |
| `dev_stage` | Staging config | Staging environment |
| (other) | Development config | Local development |
### Content Environment (tour_pages.environment)
Separate from server environment, tour pages have a content environment field:
| Value | Access | Description |
|-------|--------|-------------|
| `dev` | Constructor only | Editing/draft content |
| `stage` | Stage preview | Pre-production review |
| `production` | Public runtime | Published content |
The `X-Runtime-Environment` header (set by frontend) determines which content environment to query. The `runtime-context.js` middleware resolves this for API requests.
## Docker
See `docker/` directory for Docker Compose setup:
```bash
cd docker
docker-compose up
```
## Logging
Uses Pino logger with pretty printing in development:
```javascript
const logger = require('pino')();
logger.info('Server started');
logger.error({ err }, 'Error occurred');
```

18071
backend/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -5,67 +5,53 @@
"start": "npm run db:migrate && npm run db:seed && npm run watch", "start": "npm run db:migrate && npm run db:seed && npm run watch",
"lint": "eslint . --ext .js", "lint": "eslint . --ext .js",
"db:migrate": "sequelize-cli db:migrate", "db:migrate": "sequelize-cli db:migrate",
"db:migrate:undo": "sequelize-cli db:migrate:undo",
"db:migrate:undo:all": "sequelize-cli db:migrate:undo:all",
"db:migrate:status": "sequelize-cli db:migrate:status",
"db:seed": "sequelize-cli db:seed:all", "db:seed": "sequelize-cli db:seed:all",
"db:seed:undo": "sequelize-cli db:seed:undo:all",
"db:drop": "sequelize-cli db:drop", "db:drop": "sequelize-cli db:drop",
"db:create": "sequelize-cli db:create", "db:create": "sequelize-cli db:create",
"db:reset": "npm run db:drop && npm run db:create && npm run db:migrate && npm run db:seed",
"watch": "node watcher.js" "watch": "node watcher.js"
}, },
"dependencies": { "dependencies": {
"@aws-sdk/client-s3": "^3.1011.0", "@aws-sdk/client-s3": "^3.1010.0",
"@aws-sdk/s3-request-presigner": "^3.1016.0", "@google-cloud/storage": "^5.18.2",
"@google-cloud/storage": "^7.0.0", "axios": "^1.6.7",
"axios": "^1.13.0", "bcrypt": "5.1.1",
"bcrypt": "^6.0.0",
"chokidar": "^4.0.3", "chokidar": "^4.0.3",
"cors": "^2.8.6", "cors": "2.8.5",
"csv-parser": "^3.2.0", "csv-parser": "^3.0.0",
"dotenv": "^16.4.0",
"express": "4.18.2", "express": "4.18.2",
"express-validator": "^7.0.0",
"ffmpeg-static": "^5.2.0",
"ffprobe-static": "^3.1.0",
"fluent-ffmpeg": "^2.1.3",
"formidable": "1.2.2", "formidable": "1.2.2",
"helmet": "^8.0.0", "helmet": "4.1.1",
"joi": "^17.13.0",
"json2csv": "^5.0.7", "json2csv": "^5.0.7",
"jsonwebtoken": "^9.0.0", "jsonwebtoken": "8.5.1",
"lodash": "^4.17.23", "lodash": "4.17.21",
"moment": "2.30.1", "moment": "2.30.1",
"multer": "^2.0.0", "multer": "^1.4.4",
"mysql2": "2.2.5", "mysql2": "2.2.5",
"nodemailer": "6.9.9", "nodemailer": "6.9.9",
"passport": "^0.7.0", "passport": "^0.7.0",
"passport-google-oauth2": "^0.2.0", "passport-google-oauth2": "^0.2.0",
"passport-jwt": "^4.0.1", "passport-jwt": "^4.0.1",
"passport-microsoft": "^2.0.0", "passport-microsoft": "^0.1.0",
"pg": "^8.20.0", "pg": "8.4.1",
"pg-hstore": "2.3.4", "pg-hstore": "2.3.4",
"pino": "^9.0.0", "sequelize": "6.35.2",
"pino-pretty": "^11.0.0",
"sequelize": "^6.37.0",
"sequelize-json-schema": "^2.1.1", "sequelize-json-schema": "^2.1.1",
"sqlite": "4.0.15", "sqlite": "4.0.15",
"swagger-jsdoc": "^6.2.8", "swagger-jsdoc": "^6.2.8",
"swagger-ui-express": "^5.0.0", "swagger-ui-express": "^5.0.0",
"tedious": "^18.6.0" "tedious": "^18.2.4"
}, },
"engines": { "engines": {
"node": ">=18" "node": ">=18"
}, },
"private": true, "private": true,
"devDependencies": { "devDependencies": {
"cross-env": "^7.0.3", "cross-env": "7.0.3",
"eslint": "^8.57.0", "eslint": "^8.23.1",
"eslint-plugin-import": "^2.29.1", "eslint-plugin-import": "^2.29.1",
"mocha": "^10.0.0", "mocha": "8.1.3",
"node-mocks-http": "^1.17.0", "node-mocks-http": "1.9.0",
"nodemon": "^3.0.0", "nodemon": "2.0.5",
"sequelize-cli": "^6.6.5" "sequelize-cli": "6.6.2"
} }
} }

View File

@ -1,10 +1,10 @@
'use strict'; "use strict";
const fs = require('fs'); const fs = require("fs");
const path = require('path'); const path = require("path");
const http = require('http'); const http = require("http");
const https = require('https'); const https = require("https");
const { URL } = require('url'); const { URL } = require("url");
let CONFIG_CACHE = null; let CONFIG_CACHE = null;
@ -40,7 +40,7 @@ async function createResponse(params, options = {}) {
if (!Array.isArray(payload.input) || payload.input.length === 0) { if (!Array.isArray(payload.input) || payload.input.length === 0) {
return { return {
success: false, success: false,
error: 'input_missing', error: "input_missing",
message: 'Parameter "input" is required and must be a non-empty array.', message: 'Parameter "input" is required and must be a non-empty array.',
}; };
} }
@ -56,7 +56,7 @@ async function createResponse(params, options = {}) {
} }
const data = initial.data; const data = initial.data;
if (data && typeof data === 'object' && data.ai_request_id) { if (data && typeof data === "object" && data.ai_request_id) {
const pollTimeout = Number(options.poll_timeout ?? 300); const pollTimeout = Number(options.poll_timeout ?? 300);
const pollInterval = Number(options.poll_interval ?? 5); const pollInterval = Number(options.poll_interval ?? 5);
return await awaitResponse(data.ai_request_id, { return await awaitResponse(data.ai_request_id, {
@ -78,16 +78,16 @@ async function request(pathValue, payload = {}, options = {}) {
if (!resolvedPath) { if (!resolvedPath) {
return { return {
success: false, success: false,
error: 'project_id_missing', error: "project_id_missing",
message: 'PROJECT_ID is not defined; cannot resolve AI proxy endpoint.', message: "PROJECT_ID is not defined; cannot resolve AI proxy endpoint.",
}; };
} }
if (!cfg.projectUuid) { if (!cfg.projectUuid) {
return { return {
success: false, success: false,
error: 'project_uuid_missing', error: "project_uuid_missing",
message: 'PROJECT_UUID is not defined; aborting AI request.', message: "PROJECT_UUID is not defined; aborting AI request.",
}; };
} }
@ -101,21 +101,21 @@ async function request(pathValue, payload = {}, options = {}) {
const verifyTls = resolveVerifyTls(options.verify_tls, cfg.verifyTls); const verifyTls = resolveVerifyTls(options.verify_tls, cfg.verifyTls);
const headers = { const headers = {
Accept: 'application/json', Accept: "application/json",
'Content-Type': 'application/json', "Content-Type": "application/json",
[cfg.projectHeader]: cfg.projectUuid, [cfg.projectHeader]: cfg.projectUuid,
}; };
if (Array.isArray(options.headers)) { if (Array.isArray(options.headers)) {
for (const header of options.headers) { for (const header of options.headers) {
if (typeof header === 'string' && header.includes(':')) { if (typeof header === "string" && header.includes(":")) {
const [name, value] = header.split(':', 2); const [name, value] = header.split(":", 2);
headers[name.trim()] = value.trim(); headers[name.trim()] = value.trim();
} }
} }
} }
const body = JSON.stringify(bodyPayload); const body = JSON.stringify(bodyPayload);
return sendRequest(url, 'POST', body, headers, timeout, verifyTls); return sendRequest(url, "POST", body, headers, timeout, verifyTls);
} }
async function fetchStatus(aiRequestId, options = {}) { async function fetchStatus(aiRequestId, options = {}) {
@ -123,8 +123,8 @@ async function fetchStatus(aiRequestId, options = {}) {
if (!cfg.projectUuid) { if (!cfg.projectUuid) {
return { return {
success: false, success: false,
error: 'project_uuid_missing', error: "project_uuid_missing",
message: 'PROJECT_UUID is not defined; aborting status check.', message: "PROJECT_UUID is not defined; aborting status check.",
}; };
} }
@ -134,19 +134,19 @@ async function fetchStatus(aiRequestId, options = {}) {
const verifyTls = resolveVerifyTls(options.verify_tls, cfg.verifyTls); const verifyTls = resolveVerifyTls(options.verify_tls, cfg.verifyTls);
const headers = { const headers = {
Accept: 'application/json', Accept: "application/json",
[cfg.projectHeader]: cfg.projectUuid, [cfg.projectHeader]: cfg.projectUuid,
}; };
if (Array.isArray(options.headers)) { if (Array.isArray(options.headers)) {
for (const header of options.headers) { for (const header of options.headers) {
if (typeof header === 'string' && header.includes(':')) { if (typeof header === "string" && header.includes(":")) {
const [name, value] = header.split(':', 2); const [name, value] = header.split(":", 2);
headers[name.trim()] = value.trim(); headers[name.trim()] = value.trim();
} }
} }
} }
return sendRequest(url, 'GET', null, headers, timeout, verifyTls); return sendRequest(url, "GET", null, headers, timeout, verifyTls);
} }
async function awaitResponse(aiRequestId, options = {}) { async function awaitResponse(aiRequestId, options = {}) {
@ -165,8 +165,8 @@ async function awaitResponse(aiRequestId, options = {}) {
if (statusResp.success) { if (statusResp.success) {
const data = statusResp.data || {}; const data = statusResp.data || {};
if (data && typeof data === 'object') { if (data && typeof data === "object") {
if (data.status === 'success') { if (data.status === "success") {
isPending = false; isPending = false;
return { return {
success: true, success: true,
@ -174,12 +174,12 @@ async function awaitResponse(aiRequestId, options = {}) {
data: data.response || data, data: data.response || data,
}; };
} }
if (data.status === 'failed') { if (data.status === "failed") {
isPending = false; isPending = false;
return { return {
success: false, success: false,
status: 500, status: 500,
error: String(data.error || 'AI request failed'), error: String(data.error || "AI request failed"),
data, data,
}; };
} }
@ -191,8 +191,8 @@ async function awaitResponse(aiRequestId, options = {}) {
if (Date.now() >= deadline) { if (Date.now() >= deadline) {
return { return {
success: false, success: false,
error: 'timeout', error: "timeout",
message: 'Timed out waiting for AI response.', message: "Timed out waiting for AI response.",
}; };
} }
@ -201,14 +201,13 @@ async function awaitResponse(aiRequestId, options = {}) {
} }
function extractText(response) { function extractText(response) {
const payload = const payload = response && typeof response === "object" ? response.data || response : null;
response && typeof response === 'object' ? response.data || response : null; if (!payload || typeof payload !== "object") {
if (!payload || typeof payload !== 'object') { return "";
return '';
} }
if (Array.isArray(payload.output)) { if (Array.isArray(payload.output)) {
let combined = ''; let combined = "";
for (const item of payload.output) { for (const item of payload.output) {
if (!item || !Array.isArray(item.content)) { if (!item || !Array.isArray(item.content)) {
continue; continue;
@ -216,9 +215,9 @@ function extractText(response) {
for (const block of item.content) { for (const block of item.content) {
if ( if (
block && block &&
typeof block === 'object' && typeof block === "object" &&
block.type === 'output_text' && block.type === "output_text" &&
typeof block.text === 'string' && typeof block.text === "string" &&
block.text.length > 0 block.text.length > 0
) { ) {
combined += block.text; combined += block.text;
@ -234,38 +233,32 @@ function extractText(response) {
payload.choices && payload.choices &&
payload.choices[0] && payload.choices[0] &&
payload.choices[0].message && payload.choices[0].message &&
typeof payload.choices[0].message.content === 'string' typeof payload.choices[0].message.content === "string"
) { ) {
return payload.choices[0].message.content; return payload.choices[0].message.content;
} }
return ''; return "";
} }
function decodeJsonFromResponse(response) { function decodeJsonFromResponse(response) {
const text = extractText(response); const text = extractText(response);
if (!text) { if (!text) {
throw new Error('No text found in AI response.'); throw new Error("No text found in AI response.");
} }
const parsed = parseJson(text); const parsed = parseJson(text);
if (parsed.ok && parsed.value && typeof parsed.value === 'object') { if (parsed.ok && parsed.value && typeof parsed.value === "object") {
return parsed.value; return parsed.value;
} }
const stripped = stripJsonFence(text); const stripped = stripJsonFence(text);
if (stripped !== text) { if (stripped !== text) {
const parsedStripped = parseJson(stripped); const parsedStripped = parseJson(stripped);
if ( if (parsedStripped.ok && parsedStripped.value && typeof parsedStripped.value === "object") {
parsedStripped.ok &&
parsedStripped.value &&
typeof parsedStripped.value === 'object'
) {
return parsedStripped.value; return parsedStripped.value;
} }
throw new Error( throw new Error(`JSON parse failed after stripping fences: ${parsedStripped.error}`);
`JSON parse failed after stripping fences: ${parsedStripped.error}`,
);
} }
throw new Error(`JSON parse failed: ${parsed.error}`); throw new Error(`JSON parse failed: ${parsed.error}`);
@ -278,7 +271,7 @@ function config() {
ensureEnvLoaded(); ensureEnvLoaded();
const baseUrl = process.env.AI_PROXY_BASE_URL || 'https://flatlogic.com'; const baseUrl = process.env.AI_PROXY_BASE_URL || "https://flatlogic.com";
const projectId = process.env.PROJECT_ID || null; const projectId = process.env.PROJECT_ID || null;
let responsesPath = process.env.AI_RESPONSES_PATH || null; let responsesPath = process.env.AI_RESPONSES_PATH || null;
if (!responsesPath && projectId) { if (!responsesPath && projectId) {
@ -293,8 +286,8 @@ function config() {
responsesPath, responsesPath,
projectId, projectId,
projectUuid: process.env.PROJECT_UUID || null, projectUuid: process.env.PROJECT_UUID || null,
projectHeader: process.env.AI_PROJECT_HEADER || 'project-uuid', projectHeader: process.env.AI_PROJECT_HEADER || "project-uuid",
defaultModel: process.env.AI_DEFAULT_MODEL || 'gpt-5-mini', defaultModel: process.env.AI_DEFAULT_MODEL || "gpt-5-mini",
timeout, timeout,
verifyTls, verifyTls,
}; };
@ -303,38 +296,29 @@ function config() {
} }
function buildUrl(pathValue, baseUrl) { function buildUrl(pathValue, baseUrl) {
const trimmed = String(pathValue || '').trim(); const trimmed = String(pathValue || "").trim();
if (trimmed === '') { if (trimmed === "") {
return baseUrl; return baseUrl;
} }
if (trimmed.startsWith('http://') || trimmed.startsWith('https://')) { if (trimmed.startsWith("http://") || trimmed.startsWith("https://")) {
return trimmed; return trimmed;
} }
if (trimmed.startsWith('/')) { if (trimmed.startsWith("/")) {
return `${baseUrl}${trimmed}`; return `${baseUrl}${trimmed}`;
} }
return `${baseUrl}/${trimmed}`; return `${baseUrl}/${trimmed}`;
} }
function resolveStatusPath(aiRequestId, cfg) { function resolveStatusPath(aiRequestId, cfg) {
const basePath = (cfg.responsesPath || '').replace(/\/+$/, ''); const basePath = (cfg.responsesPath || "").replace(/\/+$/, "");
if (!basePath) { if (!basePath) {
return `/ai-request/${encodeURIComponent(String(aiRequestId))}/status`; return `/ai-request/${encodeURIComponent(String(aiRequestId))}/status`;
} }
const normalized = basePath.endsWith('/ai-request') const normalized = basePath.endsWith("/ai-request") ? basePath : `${basePath}/ai-request`;
? basePath
: `${basePath}/ai-request`;
return `${normalized}/${encodeURIComponent(String(aiRequestId))}/status`; return `${normalized}/${encodeURIComponent(String(aiRequestId))}/status`;
} }
function sendRequest( function sendRequest(urlString, method, body, headers, timeoutSeconds, verifyTls) {
urlString,
method,
body,
headers,
timeoutSeconds,
verifyTls,
) {
return new Promise((resolve) => { return new Promise((resolve) => {
let targetUrl; let targetUrl;
try { try {
@ -342,13 +326,13 @@ function sendRequest(
} catch (err) { } catch (err) {
resolve({ resolve({
success: false, success: false,
error: 'invalid_url', error: "invalid_url",
message: err.message, message: err.message,
}); });
return; return;
} }
const isHttps = targetUrl.protocol === 'https:'; const isHttps = targetUrl.protocol === "https:";
const requestFn = isHttps ? https.request : http.request; const requestFn = isHttps ? https.request : http.request;
const options = { const options = {
protocol: targetUrl.protocol, protocol: targetUrl.protocol,
@ -364,12 +348,12 @@ function sendRequest(
} }
const req = requestFn(options, (res) => { const req = requestFn(options, (res) => {
let responseBody = ''; let responseBody = "";
res.setEncoding('utf8'); res.setEncoding("utf8");
res.on('data', (chunk) => { res.on("data", (chunk) => {
responseBody += chunk; responseBody += chunk;
}); });
res.on('end', () => { res.on("end", () => {
const status = res.statusCode || 0; const status = res.statusCode || 0;
const parsed = parseJson(responseBody); const parsed = parseJson(responseBody);
const payload = parsed.ok ? parsed.value : responseBody; const payload = parsed.ok ? parsed.value : responseBody;
@ -388,11 +372,9 @@ function sendRequest(
} }
const errorMessage = const errorMessage =
parsed.ok && payload && typeof payload === 'object' parsed.ok && payload && typeof payload === "object"
? String( ? String(payload.error || payload.message || "AI proxy request failed")
payload.error || payload.message || 'AI proxy request failed', : String(responseBody || "AI proxy request failed");
)
: String(responseBody || 'AI proxy request failed');
resolve({ resolve({
success: false, success: false,
@ -404,14 +386,14 @@ function sendRequest(
}); });
}); });
req.on('timeout', () => { req.on("timeout", () => {
req.destroy(new Error('request_timeout')); req.destroy(new Error("request_timeout"));
}); });
req.on('error', (err) => { req.on("error", (err) => {
resolve({ resolve({
success: false, success: false,
error: 'request_failed', error: "request_failed",
message: err.message, message: err.message,
}); });
}); });
@ -424,8 +406,8 @@ function sendRequest(
} }
function parseJson(value) { function parseJson(value) {
if (typeof value !== 'string' || value.trim() === '') { if (typeof value !== "string" || value.trim() === "") {
return { ok: false, error: 'empty_response' }; return { ok: false, error: "empty_response" };
} }
try { try {
return { ok: true, value: JSON.parse(value) }; return { ok: true, value: JSON.parse(value) };
@ -436,14 +418,11 @@ function parseJson(value) {
function stripJsonFence(text) { function stripJsonFence(text) {
const trimmed = text.trim(); const trimmed = text.trim();
if (trimmed.startsWith('```json')) { if (trimmed.startsWith("```json")) {
return trimmed return trimmed.replace(/^```json/, "").replace(/```$/, "").trim();
.replace(/^```json/, '')
.replace(/```$/, '')
.trim();
} }
if (trimmed.startsWith('```')) { if (trimmed.startsWith("```")) {
return trimmed.replace(/^```/, '').replace(/```$/, '').trim(); return trimmed.replace(/^```/, "").replace(/```$/, "").trim();
} }
return text; return text;
} }
@ -457,7 +436,7 @@ function resolveVerifyTls(value, fallback) {
if (value === undefined || value === null) { if (value === undefined || value === null) {
return Boolean(fallback); return Boolean(fallback);
} }
return String(value).toLowerCase() !== 'false' && String(value) !== '0'; return String(value).toLowerCase() !== "false" && String(value) !== "0";
} }
function ensureEnvLoaded() { function ensureEnvLoaded() {
@ -465,32 +444,29 @@ function ensureEnvLoaded() {
return; return;
} }
const envPath = path.resolve(__dirname, '../../../../.env'); const envPath = path.resolve(__dirname, "../../../../.env");
if (!fs.existsSync(envPath)) { if (!fs.existsSync(envPath)) {
return; return;
} }
let content; let content;
try { try {
content = fs.readFileSync(envPath, 'utf8'); content = fs.readFileSync(envPath, "utf8");
} catch (err) { } catch (err) {
throw new Error(`Failed to read executor .env: ${err.message}`); throw new Error(`Failed to read executor .env: ${err.message}`);
} }
for (const line of content.split(/\r?\n/)) { for (const line of content.split(/\r?\n/)) {
const trimmed = line.trim(); const trimmed = line.trim();
if (!trimmed || trimmed.startsWith('#') || !trimmed.includes('=')) { if (!trimmed || trimmed.startsWith("#") || !trimmed.includes("=")) {
continue; continue;
} }
const [rawKey, ...rest] = trimmed.split('='); const [rawKey, ...rest] = trimmed.split("=");
const key = rawKey.trim(); const key = rawKey.trim();
if (!key) { if (!key) {
continue; continue;
} }
const value = rest const value = rest.join("=").trim().replace(/^['"]|['"]$/g, "");
.join('=')
.trim()
.replace(/^['"]|['"]$/g, '');
if (!process.env[key]) { if (!process.env[key]) {
process.env[key] = value; process.env[key] = value;
} }

View File

@ -10,69 +10,59 @@ const GoogleStrategy = require('passport-google-oauth2').Strategy;
const MicrosoftStrategy = require('passport-microsoft').Strategy; const MicrosoftStrategy = require('passport-microsoft').Strategy;
const UsersDBApi = require('../db/api/users'); const UsersDBApi = require('../db/api/users');
passport.use(
new JWTstrategy(
{
passReqToCallback: true,
secretOrKey: config.secret_key,
jwtFromRequest: ExtractJWT.fromAuthHeaderAsBearerToken(),
},
async (req, token, done) => {
try {
// Use lightweight auth query - only loads essential fields + permissions
const user = await UsersDBApi.findByForAuth({ email: token.user.email });
if (user && user.disabled) { passport.use(new JWTstrategy({
return done(new Error(`User '${user.email}' is disabled`)); passReqToCallback: true,
} secretOrKey: config.secret_key,
jwtFromRequest: ExtractJWT.fromAuthHeaderAsBearerToken()
}, async (req, token, done) => {
try {
const user = await UsersDBApi.findBy( {email: token.user.email});
req.currentUser = user; if (user && user.disabled) {
return done (new Error(`User '${user.email}' is disabled`));
}
return done(null, user); req.currentUser = user;
} catch (error) {
done(error);
}
},
),
);
passport.use( return done(null, user);
new GoogleStrategy( } catch (error) {
{ done(error);
clientID: config.google.clientId, }
clientSecret: config.google.clientSecret, }));
callbackURL: config.apiUrl + '/auth/signin/google/callback',
passReqToCallback: true,
},
function (request, accessToken, refreshToken, profile, done) {
socialStrategy(profile.email, profile, providers.GOOGLE, done);
},
),
);
passport.use( passport.use(new GoogleStrategy({
new MicrosoftStrategy( clientID: config.google.clientId,
{ clientSecret: config.google.clientSecret,
clientID: config.microsoft.clientId, callbackURL: config.apiUrl + '/auth/signin/google/callback',
clientSecret: config.microsoft.clientSecret, passReqToCallback: true
callbackURL: config.apiUrl + '/auth/signin/microsoft/callback', },
passReqToCallback: true, function (request, accessToken, refreshToken, profile, done) {
}, socialStrategy(profile.email, profile, providers.GOOGLE, done);
function (request, accessToken, refreshToken, profile, done) { }
const email = profile._json.mail || profile._json.userPrincipalName; ));
socialStrategy(email, profile, providers.MICROSOFT, done);
},
), passport.use(new MicrosoftStrategy({
); clientID: config.microsoft.clientId,
clientSecret: config.microsoft.clientSecret,
callbackURL: config.apiUrl + '/auth/signin/microsoft/callback',
passReqToCallback: true
},
function (request, accessToken, refreshToken, profile, done) {
const email = profile._json.mail || profile._json.userPrincipalName;
socialStrategy(email, profile, providers.MICROSOFT, done);
}
));
function socialStrategy(email, profile, provider, done) { function socialStrategy(email, profile, provider, done) {
db.users.findOrCreate({ where: { email, provider } }).then(([user]) => { db.users.findOrCreate({where: {email, provider}}).then(([user]) => {
const body = { const body = {
id: user.id, id: user.id,
email: user.email, email: user.email,
name: profile.displayName, name: profile.displayName,
}; };
const token = helpers.jwtSign({ user: body }); const token = helpers.jwtSign({user: body});
return done(null, { token }); return done(null, {token});
}); });
} }

View File

@ -1,15 +1,42 @@
const os = require('os'); const os = require('os');
const fs = require('fs');
const path = require('path'); const path = require('path');
require('dotenv').config({ path: path.resolve(__dirname, '../.env') }); const envFilePath = path.resolve(__dirname, '../.env');
const { validateEnv } = require('./utils/env-validation'); if (fs.existsSync(envFilePath)) {
validateEnv(); const envContent = fs.readFileSync(envFilePath, 'utf8');
envContent.split('\n').forEach((line) => {
const trimmedLine = line.trim();
if (!trimmedLine || trimmedLine.startsWith('#')) {
return;
}
const delimiterIndex = trimmedLine.indexOf('=');
if (delimiterIndex === -1) {
return;
}
const key = trimmedLine.slice(0, delimiterIndex).trim();
const rawValue = trimmedLine.slice(delimiterIndex + 1).trim();
if (!key || Object.prototype.hasOwnProperty.call(process.env, key)) {
return;
}
const unquotedValue = rawValue.replace(/^['"]|['"]$/g, '');
process.env[key] = unquotedValue;
});
}
const config = { const config = {
gcloud: { gcloud: {
bucket: 'fldemo-files', bucket: "fldemo-files",
hash: 'afeefb9d49f5b7977577876b99532ac7', hash: "afeefb9d49f5b7977577876b99532ac7"
}, },
s3: { s3: {
bucket: process.env.AWS_S3_BUCKET || '', bucket: process.env.AWS_S3_BUCKET || '',
@ -17,53 +44,37 @@ const config = {
accessKeyId: process.env.AWS_ACCESS_KEY_ID || '', accessKeyId: process.env.AWS_ACCESS_KEY_ID || '',
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY || '', secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY || '',
prefix: process.env.AWS_S3_PREFIX || 'afeefb9d49f5b7977577876b99532ac7', prefix: process.env.AWS_S3_PREFIX || 'afeefb9d49f5b7977577876b99532ac7',
// Timeout configuration (in milliseconds)
connectionTimeout:
parseInt(process.env.AWS_S3_CONNECTION_TIMEOUT, 10) || 5000,
requestTimeout: parseInt(process.env.AWS_S3_REQUEST_TIMEOUT, 10) || 30000,
// Retry configuration
maxAttempts: parseInt(process.env.AWS_S3_MAX_ATTEMPTS, 10) || 3,
// Connection pool configuration
maxSockets: parseInt(process.env.AWS_S3_MAX_SOCKETS, 10) || 50,
keepAlive: process.env.AWS_S3_KEEP_ALIVE !== 'false',
// Presigned URL expiry (in seconds)
presignExpirySeconds:
parseInt(process.env.AWS_S3_PRESIGN_EXPIRY, 10) || 3600,
}, },
bcrypt: { bcrypt: {
saltRounds: 12, saltRounds: 12
}, },
admin_pass: process.env.ADMIN_PASS || '88dbeaf8', admin_pass: "88dbeaf8",
user_pass: process.env.USER_PASS || 'c3baadeda5c6', user_pass: "c3baadeda5c6",
admin_email: process.env.ADMIN_EMAIL || 'admin@flatlogic.com', admin_email: "admin@flatlogic.com",
providers: { providers: {
LOCAL: 'local', LOCAL: 'local',
GOOGLE: 'google', GOOGLE: 'google',
MICROSOFT: 'microsoft', MICROSOFT: 'microsoft'
}, },
secret_key: process.env.SECRET_KEY || '88dbeaf8-e906-405e-9e41-c3baadeda5c6', secret_key: process.env.SECRET_KEY || '88dbeaf8-e906-405e-9e41-c3baadeda5c6',
remote: '', remote: '',
port: process.env.NODE_ENV === 'production' ? '' : '8080', port: process.env.NODE_ENV === "production" ? "" : "8080",
hostUI: process.env.NODE_ENV === 'production' ? '' : 'http://localhost', hostUI: process.env.NODE_ENV === "production" ? "" : "http://localhost",
portUI: process.env.NODE_ENV === 'production' ? '' : '3000', portUI: process.env.NODE_ENV === "production" ? "" : "3000",
portUIProd: process.env.NODE_ENV === 'production' ? '' : ':3000', portUIProd: process.env.NODE_ENV === "production" ? "" : ":3000",
swaggerUI: process.env.NODE_ENV === 'production' ? '' : 'http://localhost', swaggerUI: process.env.NODE_ENV === "production" ? "" : "http://localhost",
swaggerPort: process.env.NODE_ENV === 'production' ? '' : ':8080', swaggerPort: process.env.NODE_ENV === "production" ? "" : ":8080",
google: { google: {
clientId: process.env.GOOGLE_CLIENT_ID || '', clientId: process.env.GOOGLE_CLIENT_ID || '',
clientSecret: process.env.GOOGLE_CLIENT_SECRET || '', clientSecret: process.env.GOOGLE_CLIENT_SECRET || '',
}, },
microsoft: { microsoft: {
clientId: process.env.MS_CLIENT_ID || '', clientId: process.env.MS_CLIENT_ID || '',
clientSecret: process.env.MS_CLIENT_SECRET || '', clientSecret: process.env.MS_CLIENT_SECRET || '',
}, },
uploadDir: os.tmpdir(), uploadDir: os.tmpdir(),
// Local cache for S3 proxy downloads (improves performance for repeated requests)
s3CacheDir: process.env.S3_CACHE_DIR || path.join(os.tmpdir(), 's3-cache'),
s3CacheEnabled: process.env.S3_CACHE_ENABLED !== 'false', // Enabled by default
s3CacheMaxAge: parseInt(process.env.S3_CACHE_MAX_AGE, 10) || 86400, // 24 hours
email: { email: {
from: 'Tour Builder Platform <app@flatlogic.app>', from: 'Tour Builder Platform <app@flatlogic.app>',
host: 'email-smtp.us-east-1.amazonaws.com', host: 'email-smtp.us-east-1.amazonaws.com',
@ -74,26 +85,29 @@ const config = {
}, },
tls: { tls: {
rejectUnauthorized: process.env.EMAIL_TLS_REJECT_UNAUTHORIZED !== 'false', rejectUnauthorized: process.env.EMAIL_TLS_REJECT_UNAUTHORIZED !== 'false',
}, }
}, },
roles: { roles: {
admin: 'Administrator', admin: 'Administrator',
user: 'Analytics Viewer',
user: 'Analytics Viewer',
}, },
project_uuid: '88dbeaf8-e906-405e-9e41-c3baadeda5c6', project_uuid: '88dbeaf8-e906-405e-9e41-c3baadeda5c6',
flHost: flHost: process.env.NODE_ENV === 'production' || process.env.NODE_ENV === 'dev_stage' ? 'https://flatlogic.com/projects' : 'http://localhost:3000/projects',
process.env.NODE_ENV === 'production' ||
process.env.NODE_ENV === 'dev_stage'
? 'https://flatlogic.com/projects'
: 'http://localhost:3000/projects',
gpt_key: process.env.GPT_KEY || '', gpt_key: process.env.GPT_KEY || '',
}; };
config.host = config.pexelsKey = process.env.PEXELS_KEY || '';
process.env.NODE_ENV === 'production' ? config.remote : 'http://localhost';
config.pexelsQuery = 'Architect drafting blueprint';
config.host = process.env.NODE_ENV === "production" ? config.remote : "http://localhost";
config.apiUrl = `${config.host}${config.port ? `:${config.port}` : ``}/api`; config.apiUrl = `${config.host}${config.port ? `:${config.port}` : ``}/api`;
config.swaggerUrl = `${config.swaggerUI}${config.swaggerPort}`; config.swaggerUrl = `${config.swaggerUI}${config.swaggerPort}`;
config.uiUrl = `${config.hostUI}${config.portUI ? `:${config.portUI}` : ``}/#`; config.uiUrl = `${config.hostUI}${config.portUI ? `:${config.portUI}` : ``}/#`;

View File

@ -1,88 +1,514 @@
const GenericDBApi = require('./base.api');
const db = require('../models'); const db = require('../models');
const Utils = require('../utils');
class Access_logsDBApi extends GenericDBApi {
static get MODEL() {
return db.access_logs;
}
static get TABLE_NAME() {
return 'access_logs';
}
static get SEARCHABLE_FIELDS() { const Sequelize = db.Sequelize;
return ['path', 'ip_address', 'user_agent']; const Op = Sequelize.Op;
}
static get RANGE_FIELDS() { module.exports = class Access_logsDBApi {
return ['accessed_at'];
}
static get ENUM_FIELDS() {
return ['environment']; static async create(data, options) {
} const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
static get CSV_FIELDS() { const access_logs = await db.access_logs.create(
return [ {
'id', id: data.id || undefined,
'environment',
'path', environment: data.environment
'ip_address', ||
'user_agent', null
'accessed_at', ,
'createdAt',
]; path: data.path
} ||
null
,
ip_address: data.ip_address
||
null
,
user_agent: data.user_agent
||
null
,
accessed_at: data.accessed_at
||
null
,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
static get AUTOCOMPLETE_FIELD() {
return 'path'; await access_logs.setProject( data.project || null, {
} transaction,
});
await access_logs.setUser( data.user || null, {
transaction,
});
static get ASSOCIATIONS() {
return [
{ field: 'project', setter: 'setProject', isArray: false },
{ field: 'user', setter: 'setUser', isArray: false },
];
}
static get FIND_BY_INCLUDES() {
return [{ association: 'project' }, { association: 'user' }];
}
static get FIND_ALL_INCLUDES() { return access_logs;
return [ }
{ model: db.projects, as: 'project', required: false },
{ model: db.users, as: 'user', required: false },
]; static async bulkImport(data, options) {
} const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const access_logsData = data.map((item, index) => ({
id: item.id || undefined,
environment: item.environment
||
null
,
path: item.path
||
null
,
ip_address: item.ip_address
||
null
,
user_agent: item.user_agent
||
null
,
accessed_at: item.accessed_at
||
null
,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const access_logs = await db.access_logs.bulkCreate(access_logsData, { transaction });
// For each item created, replace relation files
return access_logs;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const access_logs = await db.access_logs.findByPk(id, {transaction});
const updatePayload = {};
if (data.environment !== undefined) updatePayload.environment = data.environment;
if (data.path !== undefined) updatePayload.path = data.path;
if (data.ip_address !== undefined) updatePayload.ip_address = data.ip_address;
if (data.user_agent !== undefined) updatePayload.user_agent = data.user_agent;
if (data.accessed_at !== undefined) updatePayload.accessed_at = data.accessed_at;
updatePayload.updatedById = currentUser.id;
await access_logs.update(updatePayload, {transaction});
if (data.project !== undefined) {
await access_logs.setProject(
data.project,
{ transaction }
);
}
if (data.user !== undefined) {
await access_logs.setUser(
data.user,
{ transaction }
);
}
return access_logs;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const access_logs = await db.access_logs.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of access_logs) {
await record.update(
{deletedBy: currentUser.id},
{transaction}
);
}
for (const record of access_logs) {
await record.destroy({transaction});
}
});
return access_logs;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const access_logs = await db.access_logs.findByPk(id, options);
await access_logs.update({
deletedBy: currentUser.id
}, {
transaction,
});
await access_logs.destroy({
transaction
});
return access_logs;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const access_logs = await db.access_logs.findOne({
where,
transaction,
});
if (!access_logs) {
return access_logs;
}
const output = access_logs.get({plain: true});
output.project = await access_logs.getProject({
transaction
});
output.user = await access_logs.getUser({
transaction
});
return output;
}
static async findAll(
filter,
options
) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
offset = currentPage * limit;
let include = [
static get RELATION_FILTERS() {
return [
{ {
filterKey: 'project',
model: db.projects, model: db.projects,
as: 'project', as: 'project',
searchField: 'name',
where: filter.project ? {
[Op.or]: [
{ id: { [Op.in]: filter.project.split('|').map(term => Utils.uuid(term)) } },
{
name: {
[Op.or]: filter.project.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
}
},
]
} : {},
}, },
{ {
filterKey: 'user',
model: db.users, model: db.users,
as: 'user', as: 'user',
searchField: 'firstName',
where: filter.user ? {
[Op.or]: [
{ id: { [Op.in]: filter.user.split('|').map(term => Utils.uuid(term)) } },
{
firstName: {
[Op.or]: filter.user.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
}
},
]
} : {},
}, },
]; ];
}
static getFieldMapping(data) { if (filter) {
return { if (filter.id) {
id: data.id || undefined, where = {
environment: data.environment || null, ...where,
path: data.path || null, ['id']: Utils.uuid(filter.id),
ip_address: data.ip_address || null, };
user_agent: data.user_agent || null, }
accessed_at: data.accessed_at || null,
};
}
}
module.exports = Access_logsDBApi;
if (filter.path) {
where = {
...where,
[Op.and]: Utils.ilike(
'access_logs',
'path',
filter.path,
),
};
}
if (filter.ip_address) {
where = {
...where,
[Op.and]: Utils.ilike(
'access_logs',
'ip_address',
filter.ip_address,
),
};
}
if (filter.user_agent) {
where = {
...where,
[Op.and]: Utils.ilike(
'access_logs',
'user_agent',
filter.user_agent,
),
};
}
if (filter.accessed_atRange) {
const [start, end] = filter.accessed_atRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
accessed_at: {
...where.accessed_at,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
accessed_at: {
...where.accessed_at,
[Op.lte]: end,
},
};
}
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true'
};
}
if (filter.environment) {
where = {
...where,
environment: filter.environment,
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
const queryOptions = {
where,
include,
distinct: true,
order: filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.access_logs.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(query, limit, offset, ) {
let where = {};
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike(
'access_logs',
'path',
query,
),
],
};
}
const records = await db.access_logs.findAll({
attributes: [ 'id', 'path' ],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['path', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.path,
}));
}
};

View File

@ -1,84 +1,503 @@
const GenericDBApi = require('./base.api');
const db = require('../models'); const db = require('../models');
const Utils = require('../utils');
class Asset_variantsDBApi extends GenericDBApi {
static get MODEL() {
return db.asset_variants;
}
static get TABLE_NAME() {
return 'asset_variants';
}
static get SEARCHABLE_FIELDS() { const Sequelize = db.Sequelize;
return ['cdn_url']; const Op = Sequelize.Op;
}
static get RANGE_FIELDS() { module.exports = class Asset_variantsDBApi {
return ['width_px', 'height_px', 'size_mb'];
}
static get ENUM_FIELDS() {
return ['variant_type']; static async create(data, options) {
} const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
static get CSV_FIELDS() { const asset_variants = await db.asset_variants.create(
return [ {
'id', id: data.id || undefined,
'variant_type',
'cdn_url', variant_type: data.variant_type
'width_px', ||
'height_px', null
'size_mb', ,
'createdAt',
]; cdn_url: data.cdn_url
} ||
null
,
width_px: data.width_px
||
null
,
height_px: data.height_px
||
null
,
size_mb: data.size_mb
||
null
,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
static get AUTOCOMPLETE_FIELD() {
return 'variant_type'; await asset_variants.setAsset( data.asset || null, {
} transaction,
});
static get ASSOCIATIONS() {
return [{ field: 'asset', setter: 'setAsset', isArray: false }];
}
static get FIND_BY_INCLUDES() {
return [{ association: 'asset' }];
} return asset_variants;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const asset_variantsData = data.map((item, index) => ({
id: item.id || undefined,
variant_type: item.variant_type
||
null
,
cdn_url: item.cdn_url
||
null
,
width_px: item.width_px
||
null
,
height_px: item.height_px
||
null
,
size_mb: item.size_mb
||
null
,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const asset_variants = await db.asset_variants.bulkCreate(asset_variantsData, { transaction });
// For each item created, replace relation files
return asset_variants;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const asset_variants = await db.asset_variants.findByPk(id, {transaction});
const updatePayload = {};
if (data.variant_type !== undefined) updatePayload.variant_type = data.variant_type;
if (data.cdn_url !== undefined) updatePayload.cdn_url = data.cdn_url;
if (data.width_px !== undefined) updatePayload.width_px = data.width_px;
if (data.height_px !== undefined) updatePayload.height_px = data.height_px;
if (data.size_mb !== undefined) updatePayload.size_mb = data.size_mb;
updatePayload.updatedById = currentUser.id;
await asset_variants.update(updatePayload, {transaction});
if (data.asset !== undefined) {
await asset_variants.setAsset(
data.asset,
{ transaction }
);
}
return asset_variants;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const asset_variants = await db.asset_variants.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of asset_variants) {
await record.update(
{deletedBy: currentUser.id},
{transaction}
);
}
for (const record of asset_variants) {
await record.destroy({transaction});
}
});
return asset_variants;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const asset_variants = await db.asset_variants.findByPk(id, options);
await asset_variants.update({
deletedBy: currentUser.id
}, {
transaction,
});
await asset_variants.destroy({
transaction
});
return asset_variants;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const asset_variants = await db.asset_variants.findOne({
where,
transaction,
});
if (!asset_variants) {
return asset_variants;
}
const output = asset_variants.get({plain: true});
output.asset = await asset_variants.getAsset({
transaction
});
return output;
}
static async findAll(
filter,
options
) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
offset = currentPage * limit;
let include = [
static get FIND_ALL_INCLUDES() {
return [
{ {
model: db.assets, model: db.assets,
as: 'asset', as: 'asset',
required: false,
where: filter.asset ? {
[Op.or]: [
{ id: { [Op.in]: filter.asset.split('|').map(term => Utils.uuid(term)) } },
{
name: {
[Op.or]: filter.asset.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
}
},
]
} : {},
}, },
]; ];
}
static get RELATION_FILTERS() { if (filter) {
return [ if (filter.id) {
{ where = {
filterKey: 'asset', ...where,
model: db.assets, ['id']: Utils.uuid(filter.id),
as: 'asset', };
searchField: 'name', }
},
];
}
static getFieldMapping(data) {
return { if (filter.cdn_url) {
id: data.id || undefined, where = {
assetId: data.assetId || null, ...where,
variant_type: data.variant_type || null, [Op.and]: Utils.ilike(
cdn_url: data.cdn_url || null, 'asset_variants',
storage_key: data.storage_key || null, 'cdn_url',
width_px: data.width_px || null, filter.cdn_url,
height_px: data.height_px || null, ),
size_mb: data.size_mb || null, };
}; }
}
}
module.exports = Asset_variantsDBApi;
if (filter.width_pxRange) {
const [start, end] = filter.width_pxRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
width_px: {
...where.width_px,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
width_px: {
...where.width_px,
[Op.lte]: end,
},
};
}
}
if (filter.height_pxRange) {
const [start, end] = filter.height_pxRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
height_px: {
...where.height_px,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
height_px: {
...where.height_px,
[Op.lte]: end,
},
};
}
}
if (filter.size_mbRange) {
const [start, end] = filter.size_mbRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
size_mb: {
...where.size_mb,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
size_mb: {
...where.size_mb,
[Op.lte]: end,
},
};
}
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true'
};
}
if (filter.variant_type) {
where = {
...where,
variant_type: filter.variant_type,
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
const queryOptions = {
where,
include,
distinct: true,
order: filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.asset_variants.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(query, limit, offset, ) {
let where = {};
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike(
'asset_variants',
'variant_type',
query,
),
],
};
}
const records = await db.asset_variants.findAll({
attributes: [ 'id', 'variant_type' ],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['variant_type', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.variant_type,
}));
}
};

View File

@ -1,92 +1,721 @@
const GenericDBApi = require('./base.api');
const db = require('../models'); const db = require('../models');
const Utils = require('../utils');
class AssetsDBApi extends GenericDBApi {
static get MODEL() {
return db.assets;
}
static get TABLE_NAME() {
return 'assets';
}
static get SEARCHABLE_FIELDS() { const Sequelize = db.Sequelize;
return ['name', 'cdn_url', 'storage_key', 'mime_type', 'checksum']; const Op = Sequelize.Op;
}
static get RANGE_FIELDS() { module.exports = class AssetsDBApi {
return ['size_mb', 'width_px', 'height_px', 'duration_sec'];
}
static get ENUM_FIELDS() {
return ['asset_type', 'type', 'is_public']; static async create(data, options) {
} const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
static get UUID_FIELDS() { const assets = await db.assets.create(
return ['projectId']; {
} id: data.id || undefined,
name: data.name
||
null
,
asset_type: data.asset_type
||
null
,
cdn_url: data.cdn_url
||
null
,
storage_key: data.storage_key
||
null
,
mime_type: data.mime_type
||
null
,
size_mb: data.size_mb
||
null
,
width_px: data.width_px
||
null
,
height_px: data.height_px
||
null
,
duration_sec: data.duration_sec
||
null
,
checksum: data.checksum
||
null
,
is_public: data.is_public
||
false
,
is_deleted: data.is_deleted
||
false
,
deleted_at_time: data.deleted_at_time
||
null
,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
static get CSV_FIELDS() {
return [ await assets.setProject( data.project || null, {
'id', transaction,
'name', });
'asset_type',
'type',
'cdn_url',
'storage_key',
'mime_type',
'size_mb',
'createdAt',
];
}
static get AUTOCOMPLETE_FIELD() {
return 'name';
}
static get ASSOCIATIONS() {
return [{ field: 'project', setter: 'setProject', isArray: false }];
}
static get FIND_BY_INCLUDES() { return assets;
return [ }
{ association: 'asset_variants_asset' },
{ association: 'project' },
];
}
static get FIND_ALL_INCLUDES() { static async bulkImport(data, options) {
return [{ model: db.projects, as: 'project', required: false }]; const currentUser = (options && options.currentUser) || { id: null };
} const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const assetsData = data.map((item, index) => ({
id: item.id || undefined,
name: item.name
||
null
,
asset_type: item.asset_type
||
null
,
cdn_url: item.cdn_url
||
null
,
storage_key: item.storage_key
||
null
,
mime_type: item.mime_type
||
null
,
size_mb: item.size_mb
||
null
,
width_px: item.width_px
||
null
,
height_px: item.height_px
||
null
,
duration_sec: item.duration_sec
||
null
,
checksum: item.checksum
||
null
,
is_public: item.is_public
||
false
,
is_deleted: item.is_deleted
||
false
,
deleted_at_time: item.deleted_at_time
||
null
,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const assets = await db.assets.bulkCreate(assetsData, { transaction });
// For each item created, replace relation files
return assets;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const assets = await db.assets.findByPk(id, {transaction});
const updatePayload = {};
if (data.name !== undefined) updatePayload.name = data.name;
if (data.asset_type !== undefined) updatePayload.asset_type = data.asset_type;
if (data.cdn_url !== undefined) updatePayload.cdn_url = data.cdn_url;
if (data.storage_key !== undefined) updatePayload.storage_key = data.storage_key;
if (data.mime_type !== undefined) updatePayload.mime_type = data.mime_type;
if (data.size_mb !== undefined) updatePayload.size_mb = data.size_mb;
if (data.width_px !== undefined) updatePayload.width_px = data.width_px;
if (data.height_px !== undefined) updatePayload.height_px = data.height_px;
if (data.duration_sec !== undefined) updatePayload.duration_sec = data.duration_sec;
if (data.checksum !== undefined) updatePayload.checksum = data.checksum;
if (data.is_public !== undefined) updatePayload.is_public = data.is_public;
if (data.is_deleted !== undefined) updatePayload.is_deleted = data.is_deleted;
if (data.deleted_at_time !== undefined) updatePayload.deleted_at_time = data.deleted_at_time;
updatePayload.updatedById = currentUser.id;
await assets.update(updatePayload, {transaction});
if (data.project !== undefined) {
await assets.setProject(
data.project,
{ transaction }
);
}
return assets;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const assets = await db.assets.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of assets) {
await record.update(
{deletedBy: currentUser.id},
{transaction}
);
}
for (const record of assets) {
await record.destroy({transaction});
}
});
return assets;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const assets = await db.assets.findByPk(id, options);
await assets.update({
deletedBy: currentUser.id
}, {
transaction,
});
await assets.destroy({
transaction
});
return assets;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const assets = await db.assets.findOne({
where,
transaction,
});
if (!assets) {
return assets;
}
const output = assets.get({plain: true});
output.asset_variants_asset = await assets.getAsset_variants_asset({
transaction
});
output.project = await assets.getProject({
transaction
});
return output;
}
static async findAll(
filter,
options
) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
offset = currentPage * limit;
let include = [
static get RELATION_FILTERS() {
return [
{ {
filterKey: 'project',
model: db.projects, model: db.projects,
as: 'project', as: 'project',
searchField: 'name',
where: filter.project ? {
[Op.or]: [
{ id: { [Op.in]: filter.project.split('|').map(term => Utils.uuid(term)) } },
{
name: {
[Op.or]: filter.project.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
}
},
]
} : {},
}, },
]; ];
}
static getFieldMapping(data) { if (filter) {
return { if (filter.id) {
id: data.id || undefined, where = {
name: data.name || null, ...where,
asset_type: data.asset_type || null, ['id']: Utils.uuid(filter.id),
type: data.type || 'general', };
cdn_url: data.cdn_url || null, }
storage_key: data.storage_key || null,
mime_type: data.mime_type || null,
size_mb: data.size_mb || null,
width_px: data.width_px || null,
height_px: data.height_px || null,
duration_sec: data.duration_sec || null,
checksum: data.checksum || null,
is_public: data.is_public || false,
};
}
}
module.exports = AssetsDBApi;
if (filter.name) {
where = {
...where,
[Op.and]: Utils.ilike(
'assets',
'name',
filter.name,
),
};
}
if (filter.cdn_url) {
where = {
...where,
[Op.and]: Utils.ilike(
'assets',
'cdn_url',
filter.cdn_url,
),
};
}
if (filter.storage_key) {
where = {
...where,
[Op.and]: Utils.ilike(
'assets',
'storage_key',
filter.storage_key,
),
};
}
if (filter.mime_type) {
where = {
...where,
[Op.and]: Utils.ilike(
'assets',
'mime_type',
filter.mime_type,
),
};
}
if (filter.checksum) {
where = {
...where,
[Op.and]: Utils.ilike(
'assets',
'checksum',
filter.checksum,
),
};
}
if (filter.size_mbRange) {
const [start, end] = filter.size_mbRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
size_mb: {
...where.size_mb,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
size_mb: {
...where.size_mb,
[Op.lte]: end,
},
};
}
}
if (filter.width_pxRange) {
const [start, end] = filter.width_pxRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
width_px: {
...where.width_px,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
width_px: {
...where.width_px,
[Op.lte]: end,
},
};
}
}
if (filter.height_pxRange) {
const [start, end] = filter.height_pxRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
height_px: {
...where.height_px,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
height_px: {
...where.height_px,
[Op.lte]: end,
},
};
}
}
if (filter.duration_secRange) {
const [start, end] = filter.duration_secRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
duration_sec: {
...where.duration_sec,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
duration_sec: {
...where.duration_sec,
[Op.lte]: end,
},
};
}
}
if (filter.deleted_at_timeRange) {
const [start, end] = filter.deleted_at_timeRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
deleted_at_time: {
...where.deleted_at_time,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
deleted_at_time: {
...where.deleted_at_time,
[Op.lte]: end,
},
};
}
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true'
};
}
if (filter.asset_type) {
where = {
...where,
asset_type: filter.asset_type,
};
}
if (filter.is_public) {
where = {
...where,
is_public: filter.is_public,
};
}
if (filter.is_deleted) {
where = {
...where,
is_deleted: filter.is_deleted,
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
const queryOptions = {
where,
include,
distinct: true,
order: filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.assets.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(query, limit, offset, ) {
let where = {};
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike(
'assets',
'name',
query,
),
],
};
}
const records = await db.assets.findAll({
attributes: [ 'id', 'name' ],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['name', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.name,
}));
}
};

View File

@ -1,480 +0,0 @@
const db = require('../models');
const Utils = require('../utils');
const { parse } = require('json2csv');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
class GenericDBApi {
static get MODEL() {
throw new Error('MODEL must be defined in subclass');
}
static get TABLE_NAME() {
return this.MODEL.getTableName();
}
static get SEARCHABLE_FIELDS() {
return [];
}
static get RANGE_FIELDS() {
return [];
}
static get ENUM_FIELDS() {
return [];
}
/**
* UUID fields that require validation before querying.
* These are typically foreign key fields like 'projectId'.
* Invalid UUIDs will return empty results instead of causing DB errors.
* Override in subclass to specify fields.
* Example: return ['projectId', 'userId'];
*/
static get UUID_FIELDS() {
return [];
}
static get RELATION_FILTERS() {
return [];
}
static get CSV_FIELDS() {
return ['id', 'createdAt'];
}
static get AUTOCOMPLETE_FIELD() {
return 'name';
}
static get ASSOCIATIONS() {
return [];
}
static get FIND_BY_INCLUDES() {
return [];
}
static get FIND_ALL_INCLUDES() {
return [];
}
/**
* Fields that should be automatically JSON-stringified
* Override in subclass to specify fields.
* Example: return ['settings_json', 'metadata_json'];
*/
static get JSON_FIELDS() {
return [];
}
/**
* Custom field transformers for data mapping.
* Override in subclass to add custom transformations.
* Example:
* return {
* email: (value) => value?.toLowerCase().trim(),
* slug: (value) => value?.toLowerCase().replace(/\s+/g, '-'),
* };
*/
static get FIELD_TRANSFORMERS() {
return {};
}
/**
* Field mapping configuration for declarative field handling.
* Override in subclass to specify how fields should be mapped.
* Example:
* return {
* name: { default: null },
* sort_order: { default: 0 },
* is_active: { default: true },
* };
*/
static get FIELD_DEFAULTS() {
return {};
}
/**
* Transform input data for database operations.
* Template Method Pattern: Uses JSON_FIELDS, FIELD_TRANSFORMERS, and FIELD_DEFAULTS
* to declaratively transform data, reducing boilerplate in subclasses.
*
* Override this method for complex custom transformations that can't be
* expressed declaratively.
*
* @param {Object} data - Input data to transform
* @returns {Object} - Transformed data ready for database
*/
static getFieldMapping(data) {
if (!data) return data;
const mapped = { ...data };
// Apply field defaults
for (const [field, config] of Object.entries(this.FIELD_DEFAULTS)) {
if (mapped[field] === undefined) {
mapped[field] = config.default;
} else if (mapped[field] === null && config.nullDefault !== undefined) {
mapped[field] = config.nullDefault;
}
}
// Auto-stringify JSON fields
for (const field of this.JSON_FIELDS) {
if (mapped[field] !== undefined && mapped[field] !== null) {
if (typeof mapped[field] !== 'string') {
mapped[field] = JSON.stringify(mapped[field]);
}
}
}
// Apply custom transformers
for (const [field, transformer] of Object.entries(
this.FIELD_TRANSFORMERS,
)) {
if (mapped[field] !== undefined) {
mapped[field] = transformer(mapped[field]);
}
}
return mapped;
}
static async create(data, options = {}) {
const currentUser = options.currentUser || { id: null };
const transaction = options.transaction;
const mappedData = this.getFieldMapping(data);
const record = await this.MODEL.create(
{
...mappedData,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
for (const assoc of this.ASSOCIATIONS) {
if (data[assoc.field] !== undefined) {
await record[assoc.setter](
data[assoc.field] || (assoc.isArray ? [] : null),
{ transaction },
);
}
}
return record;
}
static async bulkImport(data, options = {}) {
const currentUser = options.currentUser || { id: null };
const transaction = options.transaction;
const recordsData = data.map((item, index) => ({
...this.getFieldMapping(item),
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
return this.MODEL.bulkCreate(recordsData, { transaction });
}
static async update(id, data, options = {}) {
const currentUser = options.currentUser || { id: null };
const transaction = options.transaction;
const record = await this.MODEL.findByPk(id, { transaction });
if (!record) {
throw { status: 404, message: `${this.TABLE_NAME} not found` };
}
const updatePayload = { updatedById: currentUser.id };
const mappedData = this.getFieldMapping(data);
for (const [key, value] of Object.entries(mappedData)) {
if (value !== undefined) {
updatePayload[key] = value;
}
}
await record.update(updatePayload, { transaction });
for (const assoc of this.ASSOCIATIONS) {
if (data[assoc.field] !== undefined) {
await record[assoc.setter](data[assoc.field], { transaction });
}
}
return record;
}
/**
* Partial update - only updates fields explicitly passed in data.
* Unlike update(), this doesn't go through getFieldMapping which
* converts missing fields to null.
*
* Use this when you need to update specific fields without affecting others.
*
* @param {string} id - Record ID
* @param {Object} data - Fields to update (only these will be modified)
* @param {Object} options - Options with currentUser and transaction
*/
static async partialUpdate(id, data, options = {}) {
const currentUser = options.currentUser || { id: null };
const transaction = options.transaction;
const record = await this.MODEL.findByPk(id, { transaction });
if (!record) {
throw { status: 404, message: `${this.TABLE_NAME} not found` };
}
const updatePayload = { updatedById: currentUser.id };
// Only include fields that are explicitly in the data object
for (const [key, value] of Object.entries(data)) {
if (value !== undefined) {
updatePayload[key] = value;
}
}
await record.update(updatePayload, { transaction });
return record;
}
static async deleteByIds(ids, options = {}) {
const currentUser = options.currentUser || { id: null };
const transaction = options.transaction;
const records = await this.MODEL.findAll({
where: { id: { [Op.in]: ids } },
transaction,
});
for (const record of records) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of records) {
await record.destroy({ transaction });
}
return records;
}
static async remove(id, options = {}) {
const currentUser = options.currentUser || { id: null };
const transaction = options.transaction;
const record = await this.MODEL.findByPk(id, { transaction });
if (!record) {
throw { status: 404, message: `${this.TABLE_NAME} not found` };
}
await record.update({ deletedBy: currentUser.id }, { transaction });
await record.destroy({ transaction });
return record;
}
static async findBy(where, options = {}) {
const transaction = options.transaction;
const include =
options.include !== undefined ? options.include : this.FIND_BY_INCLUDES;
const record = await this.MODEL.findOne({
where,
transaction,
include,
});
if (!record) {
return null;
}
return record.get({ plain: true });
}
static async findAll(filter = {}, options = {}) {
filter = filter || {};
const limit = filter.limit || 0;
const currentPage = +filter.page || 0;
const offset = currentPage * limit;
let where = {};
let include = [...this.FIND_ALL_INCLUDES];
if (filter.id) {
if (!Utils.isValidUuid(filter.id)) {
return { rows: [], count: 0 };
}
where.id = filter.id;
}
for (const field of this.SEARCHABLE_FIELDS) {
if (filter[field]) {
where[Op.and] = Utils.ilike(this.TABLE_NAME, field, filter[field]);
}
}
for (const field of this.RANGE_FIELDS) {
const rangeKey = `${field}Range`;
if (filter[rangeKey]) {
const [start, end] = filter[rangeKey];
if (start !== undefined && start !== null && start !== '') {
where[field] = { ...where[field], [Op.gte]: start };
}
if (end !== undefined && end !== null && end !== '') {
where[field] = { ...where[field], [Op.lte]: end };
}
}
}
for (const field of this.ENUM_FIELDS) {
if (filter[field] !== undefined) {
where[field] = filter[field];
}
}
// Validate UUID fields - return empty results for invalid UUIDs
for (const field of this.UUID_FIELDS) {
if (filter[field] !== undefined) {
if (!Utils.isValidUuid(filter[field])) {
return { rows: [], count: 0 };
}
where[field] = filter[field];
}
}
if (filter.active !== undefined) {
where.active = filter.active === true || filter.active === 'true';
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where.createdAt = { ...where.createdAt, [Op.gte]: start };
}
if (end !== undefined && end !== null && end !== '') {
where.createdAt = { ...where.createdAt, [Op.lte]: end };
}
}
for (const rel of this.RELATION_FILTERS) {
if (filter[rel.filterKey]) {
const searchTerms = filter[rel.filterKey].split('|');
const validUuids = Utils.filterValidUuids(searchTerms);
// Build OR conditions array
const orConditions = [];
// Add UUID condition only if there are valid UUIDs
if (validUuids.length > 0) {
orConditions.push({ id: { [Op.in]: validUuids } });
}
// Add text search condition if searchField is defined
if (rel.searchField) {
orConditions.push({
[rel.searchField]: {
[Op.or]: searchTerms.map((term) => ({
[Op.iLike]: `%${term}%`,
})),
},
});
}
const relInclude = {
model: rel.model,
as: rel.as,
required: orConditions.length > 0,
where:
orConditions.length > 0 ? { [Op.or]: orConditions } : undefined,
};
include = [relInclude, ...include];
}
}
try {
if (options.countOnly) {
const count = await this.MODEL.count({
where,
include: include.filter((entry) => entry.required || entry.where),
distinct: true,
transaction: options.transaction,
});
return {
rows: [],
count,
};
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options.transaction,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
};
const { rows, count } = await this.MODEL.findAndCountAll(queryOptions);
return {
rows,
count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(query, limit, offset) {
let where = {};
if (query) {
const orConditions = [
Utils.ilike(this.TABLE_NAME, this.AUTOCOMPLETE_FIELD, query),
];
if (Utils.isValidUuid(query)) {
orConditions.unshift({ id: query });
}
where = { [Op.or]: orConditions };
}
const records = await this.MODEL.findAll({
attributes: ['id', this.AUTOCOMPLETE_FIELD],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
order: [[this.AUTOCOMPLETE_FIELD, 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record[this.AUTOCOMPLETE_FIELD],
}));
}
static toCSV(rows) {
const opts = { fields: this.CSV_FIELDS };
return parse(rows, opts);
}
}
module.exports = GenericDBApi;

View File

@ -1,298 +0,0 @@
const GenericDBApi = require('./base.api');
const db = require('../models');
class Element_type_defaultsDBApi extends GenericDBApi {
static get MODEL() {
return db.element_type_defaults;
}
static get TABLE_NAME() {
return 'element_type_defaults';
}
static get SEARCHABLE_FIELDS() {
return ['name', 'element_type'];
}
static get RANGE_FIELDS() {
return ['sort_order'];
}
static get ENUM_FIELDS() {
return [];
}
static get CSV_FIELDS() {
return [
'id',
'element_type',
'name',
'sort_order',
'is_active',
'createdAt',
];
}
static get AUTOCOMPLETE_FIELD() {
return 'name';
}
// Declarative field configuration using base class patterns
static get JSON_FIELDS() {
return ['default_settings_json'];
}
static get FIELD_DEFAULTS() {
return {
element_type: { default: null },
name: { default: null },
sort_order: { default: 0 },
};
}
static getFieldMapping(data) {
// Apply base class transformations (JSON fields, defaults, transformers)
const mapped = super.getFieldMapping(data);
return {
id: mapped.id || undefined,
element_type: mapped.element_type,
name: mapped.name,
sort_order: mapped.sort_order,
default_settings_json: mapped.default_settings_json,
};
}
static get DEFAULT_ROWS() {
return [
{
element_type: 'navigation_next',
name: 'Navigation Forward Button',
sort_order: 1,
default_settings_json: {
label: 'Navigation: Forward',
navLabel: 'Forward',
navType: 'forward',
navDisabled: false,
transitionReverseMode: 'auto_reverse',
transitionDurationSec: 0.7,
appearDelaySec: 0,
appearDurationSec: null,
},
},
{
element_type: 'navigation_prev',
name: 'Navigation Back Button',
sort_order: 2,
default_settings_json: {
label: 'Navigation: Back',
navLabel: 'Back',
navType: 'back',
navDisabled: false,
transitionReverseMode: 'auto_reverse',
transitionDurationSec: 0.7,
appearDelaySec: 0,
appearDurationSec: null,
},
},
{
element_type: 'tooltip',
name: 'Tooltip',
sort_order: 3,
default_settings_json: {
label: 'Tooltip',
tooltipTitle: 'Tooltip title',
tooltipText: 'Tooltip text',
appearDelaySec: 0,
appearDurationSec: null,
},
},
{
element_type: 'description',
name: 'Description',
sort_order: 4,
default_settings_json: {
label: 'Description',
descriptionTitle: 'TITLE',
descriptionText: '',
descriptionTitleFontSize: '48px',
descriptionTextFontSize: '36px',
descriptionTitleFontFamily: 'inherit',
descriptionTextFontFamily: 'inherit',
descriptionTitleColor: '#000000',
descriptionTextColor: '#4B5563',
descriptionBackgroundColor: 'transparent',
appearDelaySec: 0,
appearDurationSec: null,
},
},
{
element_type: 'gallery',
name: 'Gallery',
sort_order: 5,
default_settings_json: {
label: 'Gallery',
galleryCards: [{ imageUrl: '', title: 'Card 1', description: '' }],
appearDelaySec: 0,
appearDurationSec: null,
},
},
{
element_type: 'carousel',
name: 'Carousel',
sort_order: 6,
default_settings_json: {
label: 'Carousel',
carouselSlides: [{ imageUrl: '', caption: 'Slide 1' }],
carouselPrevIconUrl: '',
carouselNextIconUrl: '',
appearDelaySec: 0,
appearDurationSec: null,
},
},
{
element_type: 'video_player',
name: 'Video Player',
sort_order: 7,
default_settings_json: {
label: 'Video Player',
mediaUrl: '',
mediaAutoplay: true,
mediaLoop: true,
mediaMuted: true,
appearDelaySec: 0,
appearDurationSec: null,
},
},
{
element_type: 'audio_player',
name: 'Audio Player',
sort_order: 8,
default_settings_json: {
label: 'Audio Player',
mediaUrl: '',
mediaAutoplay: true,
mediaLoop: true,
mediaMuted: false,
appearDelaySec: 0,
appearDurationSec: null,
},
},
{
element_type: 'spot',
name: 'Hotspot',
sort_order: 9,
default_settings_json: {
label: 'Hotspot',
iconUrl: '',
appearDelaySec: 0,
appearDurationSec: null,
},
},
{
element_type: 'logo',
name: 'Logo',
sort_order: 10,
default_settings_json: {
label: 'Logo',
iconUrl: '',
backgroundImageUrl: '',
appearDelaySec: 0,
appearDurationSec: null,
},
},
{
element_type: 'popup',
name: 'Popup',
sort_order: 11,
default_settings_json: {
label: 'Popup',
iconUrl: '',
popupTitle: '',
popupContent: '',
appearDelaySec: 0,
appearDurationSec: null,
},
},
];
}
static async ensureInitialized() {
if (!this.initializationPromise) {
this.initializationPromise = (async () => {
let count = 0;
try {
count = await this.MODEL.count();
} catch (error) {
if (error?.original?.code !== '42P01') {
throw error;
}
await this.MODEL.sync();
count = await this.MODEL.count();
}
if (count > 0) return;
const now = new Date();
await this.MODEL.bulkCreate(
this.DEFAULT_ROWS.map((item) => ({
...this.getFieldMapping(item),
createdAt: now,
updatedAt: now,
})),
);
})().catch((error) => {
this.initializationPromise = null;
throw error;
});
}
await this.initializationPromise;
}
static async create(data, options = {}) {
await this.ensureInitialized();
return super.create(data, options);
}
static async bulkImport(data, options = {}) {
await this.ensureInitialized();
return super.bulkImport(data, options);
}
static async update(id, data, options = {}) {
await this.ensureInitialized();
return super.update(id, data, options);
}
static async deleteByIds(ids, options = {}) {
await this.ensureInitialized();
return super.deleteByIds(ids, options);
}
static async remove(id, options = {}) {
await this.ensureInitialized();
return super.remove(id, options);
}
static async findBy(where, options = {}) {
await this.ensureInitialized();
return super.findBy(where, options);
}
static async findAll(filter = {}, options = {}) {
await this.ensureInitialized();
return super.findAll(filter, options);
}
static async findAllAutocomplete(query, limit, offset) {
await this.ensureInitialized();
return super.findAllAutocomplete(query, limit, offset);
}
}
Element_type_defaultsDBApi.initializationPromise = null;
module.exports = Element_type_defaultsDBApi;

View File

@ -1,11 +1,18 @@
const db = require('../models'); const db = require('../models');
const assert = require('assert'); const assert = require('assert');
const services = require('../../services/file/'); const services = require('../../services/file');
module.exports = class FileDBApi { module.exports = class FileDBApi {
static async replaceRelationFiles(relation, rawFiles, options) { static async replaceRelationFiles(
relation,
rawFiles,
options,
) {
assert(relation.belongsTo, 'belongsTo is required'); assert(relation.belongsTo, 'belongsTo is required');
assert(relation.belongsToColumn, 'belongsToColumn is required'); assert(
relation.belongsToColumn,
'belongsToColumn is required',
);
assert(relation.belongsToId, 'belongsToId is required'); assert(relation.belongsToId, 'belongsToId is required');
let files = []; let files = [];
@ -22,9 +29,11 @@ module.exports = class FileDBApi {
static async _addFiles(relation, files, options) { static async _addFiles(relation, files, options) {
const transaction = (options && options.transaction) || undefined; const transaction = (options && options.transaction) || undefined;
const currentUser = (options && options.currentUser) || { id: null }; const currentUser = (options && options.currentUser) || {id: null};
const inexistentFiles = files.filter((file) => !!file.new); const inexistentFiles = files.filter(
(file) => !!file.new,
);
for (const file of inexistentFiles) { for (const file of inexistentFiles) {
await db.file.create( await db.file.create(
@ -46,7 +55,11 @@ module.exports = class FileDBApi {
} }
} }
static async _removeLegacyFiles(relation, files, options) { static async _removeLegacyFiles(
relation,
files,
options,
) {
const transaction = (options && options.transaction) || undefined; const transaction = (options && options.transaction) || undefined;
const filesToDelete = await db.file.findAll({ const filesToDelete = await db.file.findAll({
@ -55,9 +68,10 @@ module.exports = class FileDBApi {
belongsToId: relation.belongsToId, belongsToId: relation.belongsToId,
belongsToColumn: relation.belongsToColumn, belongsToColumn: relation.belongsToColumn,
id: { id: {
[db.Sequelize.Op.notIn]: files [db.Sequelize.Op
.notIn]: files
.filter((file) => !file.new) .filter((file) => !file.new)
.map((file) => file.id), .map((file) => file.id)
}, },
}, },
transaction, transaction,

View File

@ -1,155 +0,0 @@
const GenericDBApi = require('./base.api');
const db = require('../models');
/**
* Global Transition Defaults API
*
* Single-row table pattern for platform-wide transition settings.
* Auto-seeds default values if the table is empty.
*/
class Global_transition_defaultsDBApi extends GenericDBApi {
static get MODEL() {
return db.global_transition_defaults;
}
static get TABLE_NAME() {
return 'global_transition_defaults';
}
static get SEARCHABLE_FIELDS() {
return [];
}
static get RANGE_FIELDS() {
return [];
}
static get ENUM_FIELDS() {
return ['transition_type', 'easing'];
}
static get CSV_FIELDS() {
return [
'id',
'transition_type',
'duration_ms',
'easing',
'overlay_color',
'createdAt',
'updatedAt',
];
}
static get AUTOCOMPLETE_FIELD() {
return 'transition_type';
}
static get FIELD_DEFAULTS() {
return {
transition_type: { default: 'fade' },
duration_ms: { default: 700 },
easing: { default: 'ease-in-out' },
overlay_color: { default: '#000000' },
};
}
static get DEFAULT_ROW() {
return {
transition_type: 'fade',
duration_ms: 700,
easing: 'ease-in-out',
overlay_color: '#000000',
};
}
static getFieldMapping(data) {
const mapped = super.getFieldMapping(data);
return {
id: mapped.id || undefined,
transition_type: mapped.transition_type,
duration_ms: mapped.duration_ms,
easing: mapped.easing,
overlay_color: mapped.overlay_color,
};
}
/**
* Ensures the singleton row exists.
* Creates the default row if table is empty.
*/
static async ensureInitialized() {
if (!this.initializationPromise) {
this.initializationPromise = (async () => {
let count = 0;
try {
count = await this.MODEL.count();
} catch (error) {
// Table doesn't exist yet (happens during initial migration)
if (error?.original?.code !== '42P01') {
throw error;
}
await this.MODEL.sync();
count = await this.MODEL.count();
}
if (count > 0) return;
const now = new Date();
await this.MODEL.create({
...this.getFieldMapping(this.DEFAULT_ROW),
createdAt: now,
updatedAt: now,
});
})().catch((error) => {
this.initializationPromise = null;
throw error;
});
}
await this.initializationPromise;
}
/**
* Get the singleton row.
* Always returns a single object, not an array.
*/
static async findOne(options = {}) {
await this.ensureInitialized();
const record = await this.MODEL.findOne({
transaction: options.transaction,
});
if (!record) return null;
return record.get({ plain: true });
}
/**
* Alias for findOne to maintain semantic clarity.
*/
static async get(options = {}) {
return this.findOne(options);
}
static async update(id, data, options = {}) {
await this.ensureInitialized();
return super.update(id, data, options);
}
static async findBy(where, options = {}) {
await this.ensureInitialized();
return super.findBy(where, options);
}
static async findAll(filter = {}, options = {}) {
await this.ensureInitialized();
return super.findAll(filter, options);
}
}
Global_transition_defaultsDBApi.initializationPromise = null;
module.exports = Global_transition_defaultsDBApi;

View File

@ -0,0 +1,656 @@
const db = require('../models');
const Utils = require('../utils');
const {
getRuntimeEnvironment,
getRuntimeProjectSlug,
} = require('./runtime-context');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class Page_elementsDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const page_elements = await db.page_elements.create(
{
id: data.id || undefined,
element_type: data.element_type ?? null,
name: data.name ?? null,
sort_order: data.sort_order ?? 0,
is_visible: data.is_visible ?? false,
x_percent: data.x_percent ?? null,
y_percent: data.y_percent ?? null,
width_percent: data.width_percent ?? null,
height_percent: data.height_percent ?? null,
rotation_deg: data.rotation_deg ?? null,
style_json: data.style_json ?? null,
content_json: data.content_json ?? null,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await page_elements.setPage( data.page || null, {
transaction,
});
return page_elements;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const page_elementsData = data.map((item, index) => ({
id: item.id || undefined,
element_type: item.element_type ?? null,
name: item.name ?? null,
sort_order: item.sort_order ?? 0,
is_visible: item.is_visible ?? false,
x_percent: item.x_percent ?? null,
y_percent: item.y_percent ?? null,
width_percent: item.width_percent ?? null,
height_percent: item.height_percent ?? null,
rotation_deg: item.rotation_deg ?? null,
style_json: item.style_json ?? null,
content_json: item.content_json ?? null,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const page_elements = await db.page_elements.bulkCreate(page_elementsData, { transaction });
// For each item created, replace relation files
return page_elements;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const page_elements = await db.page_elements.findByPk(id, {transaction});
const updatePayload = {};
if (data.element_type !== undefined) updatePayload.element_type = data.element_type;
if (data.name !== undefined) updatePayload.name = data.name;
if (data.sort_order !== undefined) updatePayload.sort_order = data.sort_order;
if (data.is_visible !== undefined) updatePayload.is_visible = data.is_visible;
if (data.x_percent !== undefined) updatePayload.x_percent = data.x_percent;
if (data.y_percent !== undefined) updatePayload.y_percent = data.y_percent;
if (data.width_percent !== undefined) updatePayload.width_percent = data.width_percent;
if (data.height_percent !== undefined) updatePayload.height_percent = data.height_percent;
if (data.rotation_deg !== undefined) updatePayload.rotation_deg = data.rotation_deg;
if (data.style_json !== undefined) updatePayload.style_json = data.style_json;
if (data.content_json !== undefined) updatePayload.content_json = data.content_json;
updatePayload.updatedById = currentUser.id;
await page_elements.update(updatePayload, {transaction});
if (data.page !== undefined) {
await page_elements.setPage(
data.page,
{ transaction }
);
}
return page_elements;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const page_elements = await db.page_elements.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of page_elements) {
await record.update(
{deletedBy: currentUser.id},
{transaction}
);
}
for (const record of page_elements) {
await record.destroy({transaction});
}
});
return page_elements;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const page_elements = await db.page_elements.findByPk(id, options);
await page_elements.update({
deletedBy: currentUser.id
}, {
transaction,
});
await page_elements.destroy({
transaction
});
return page_elements;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const runtimeEnvironment = getRuntimeEnvironment(options);
const runtimeProjectSlug = getRuntimeProjectSlug(options);
const pageInclude = {
model: db.tour_pages,
as: 'page',
required: Boolean(runtimeEnvironment || runtimeProjectSlug),
where: runtimeEnvironment ? { environment: runtimeEnvironment } : {},
include: runtimeProjectSlug
? [{
model: db.projects,
as: 'project',
required: true,
where: { slug: runtimeProjectSlug },
}]
: [],
};
const page_elements = await db.page_elements.findOne(
{ where, include: [pageInclude], transaction },
);
if (!page_elements) {
return page_elements;
}
const output = page_elements.get({plain: true});
output.page = await page_elements.getPage({
transaction
});
return output;
}
static async findAll(
filter,
options
) {
filter = filter || {};
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
offset = currentPage * limit;
let include = [
{
model: db.tour_pages,
as: 'page',
where: filter.page ? {
[Op.or]: [
{ id: { [Op.in]: filter.page.split('|').map(term => Utils.uuid(term)) } },
{
name: {
[Op.or]: filter.page.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
}
},
]
} : {},
},
];
const runtimeEnvironment = getRuntimeEnvironment(options);
const runtimeProjectSlug = getRuntimeProjectSlug(options);
if (runtimeEnvironment) {
include[0].where = {
...(include[0].where || {}),
environment: runtimeEnvironment,
};
include[0].required = true;
}
if (runtimeProjectSlug) {
include[0].include = [{
model: db.projects,
as: 'project',
required: true,
where: { slug: runtimeProjectSlug },
}];
include[0].required = true;
}
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.name) {
where = {
...where,
[Op.and]: Utils.ilike(
'page_elements',
'name',
filter.name,
),
};
}
if (filter.style_json) {
where = {
...where,
[Op.and]: Utils.ilike(
'page_elements',
'style_json',
filter.style_json,
),
};
}
if (filter.content_json) {
where = {
...where,
[Op.and]: Utils.ilike(
'page_elements',
'content_json',
filter.content_json,
),
};
}
if (filter.sort_orderRange) {
const [start, end] = filter.sort_orderRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
sort_order: {
...where.sort_order,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
sort_order: {
...where.sort_order,
[Op.lte]: end,
},
};
}
}
if (filter.x_percentRange) {
const [start, end] = filter.x_percentRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
x_percent: {
...where.x_percent,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
x_percent: {
...where.x_percent,
[Op.lte]: end,
},
};
}
}
if (filter.y_percentRange) {
const [start, end] = filter.y_percentRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
y_percent: {
...where.y_percent,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
y_percent: {
...where.y_percent,
[Op.lte]: end,
},
};
}
}
if (filter.width_percentRange) {
const [start, end] = filter.width_percentRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
width_percent: {
...where.width_percent,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
width_percent: {
...where.width_percent,
[Op.lte]: end,
},
};
}
}
if (filter.height_percentRange) {
const [start, end] = filter.height_percentRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
height_percent: {
...where.height_percent,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
height_percent: {
...where.height_percent,
[Op.lte]: end,
},
};
}
}
if (filter.rotation_degRange) {
const [start, end] = filter.rotation_degRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
rotation_deg: {
...where.rotation_deg,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
rotation_deg: {
...where.rotation_deg,
[Op.lte]: end,
},
};
}
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true'
};
}
if (filter.element_type) {
where = {
...where,
element_type: filter.element_type,
};
}
if (filter.is_visible) {
where = {
...where,
is_visible: filter.is_visible,
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
const queryOptions = {
where,
include,
distinct: true,
order: filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.page_elements.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(query, limit, offset, ) {
let where = {};
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike(
'page_elements',
'name',
query,
),
],
};
}
const records = await db.page_elements.findAll({
attributes: [ 'id', 'name' ],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['name', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.name,
}));
}
};

View File

@ -0,0 +1,589 @@
const db = require('../models');
const Utils = require('../utils');
const {
getRuntimeEnvironment,
getRuntimeProjectSlug,
} = require('./runtime-context');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class Page_linksDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const page_links = await db.page_links.create(
{
id: data.id || undefined,
direction: data.direction
||
null
,
external_url: data.external_url
||
null
,
is_active: data.is_active
||
false
,
trigger_selector: data.trigger_selector
||
null
,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await page_links.setFrom_page( data.from_page || null, {
transaction,
});
await page_links.setTo_page( data.to_page || null, {
transaction,
});
await page_links.setTransition( data.transition || null, {
transaction,
});
return page_links;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const page_linksData = data.map((item, index) => ({
id: item.id || undefined,
direction: item.direction
||
null
,
external_url: item.external_url
||
null
,
is_active: item.is_active
||
false
,
trigger_selector: item.trigger_selector
||
null
,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const page_links = await db.page_links.bulkCreate(page_linksData, { transaction });
// For each item created, replace relation files
return page_links;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const page_links = await db.page_links.findByPk(id, {transaction});
const updatePayload = {};
if (data.direction !== undefined) updatePayload.direction = data.direction;
if (data.external_url !== undefined) updatePayload.external_url = data.external_url;
if (data.is_active !== undefined) updatePayload.is_active = data.is_active;
if (data.trigger_selector !== undefined) updatePayload.trigger_selector = data.trigger_selector;
updatePayload.updatedById = currentUser.id;
await page_links.update(updatePayload, {transaction});
if (data.from_page !== undefined) {
await page_links.setFrom_page(
data.from_page,
{ transaction }
);
}
if (data.to_page !== undefined) {
await page_links.setTo_page(
data.to_page,
{ transaction }
);
}
if (data.transition !== undefined) {
await page_links.setTransition(
data.transition,
{ transaction }
);
}
return page_links;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const page_links = await db.page_links.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of page_links) {
await record.update(
{deletedBy: currentUser.id},
{transaction}
);
}
for (const record of page_links) {
await record.destroy({transaction});
}
});
return page_links;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const page_links = await db.page_links.findByPk(id, options);
await page_links.update({
deletedBy: currentUser.id
}, {
transaction,
});
await page_links.destroy({
transaction
});
return page_links;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const runtimeEnvironment = getRuntimeEnvironment(options);
const runtimeProjectSlug = getRuntimeProjectSlug(options);
const buildProjectInclude = () => (
runtimeProjectSlug
? [{
model: db.projects,
as: 'project',
required: true,
where: { slug: runtimeProjectSlug },
}]
: []
);
const page_links = await db.page_links.findOne(
{
where,
include: [
{
model: db.tour_pages,
as: 'from_page',
required: Boolean(runtimeEnvironment || runtimeProjectSlug),
where: runtimeEnvironment ? { environment: runtimeEnvironment } : {},
include: buildProjectInclude(),
},
{
model: db.transitions,
as: 'transition',
required: false,
where: runtimeEnvironment ? { environment: runtimeEnvironment } : {},
include: buildProjectInclude(),
},
],
transaction,
},
);
if (!page_links) {
return page_links;
}
const output = page_links.get({plain: true});
output.from_page = await page_links.getFrom_page({
transaction
});
output.to_page = await page_links.getTo_page({
transaction
});
output.transition = await page_links.getTransition({
transaction
});
return output;
}
static async findAll(
filter,
options
) {
filter = filter || {};
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
offset = currentPage * limit;
let include = [
{
model: db.tour_pages,
as: 'from_page',
where: filter.from_page ? {
[Op.or]: [
{ id: { [Op.in]: filter.from_page.split('|').map(term => Utils.uuid(term)) } },
{
name: {
[Op.or]: filter.from_page.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
}
},
]
} : {},
},
{
model: db.tour_pages,
as: 'to_page',
where: filter.to_page ? {
[Op.or]: [
{ id: { [Op.in]: filter.to_page.split('|').map(term => Utils.uuid(term)) } },
{
name: {
[Op.or]: filter.to_page.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
}
},
]
} : {},
},
{
model: db.transitions,
as: 'transition',
where: filter.transition ? {
[Op.or]: [
{ id: { [Op.in]: filter.transition.split('|').map(term => Utils.uuid(term)) } },
{
name: {
[Op.or]: filter.transition.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
}
},
]
} : {},
},
];
const runtimeEnvironment = getRuntimeEnvironment(options);
const runtimeProjectSlug = getRuntimeProjectSlug(options);
if (runtimeEnvironment) {
include[0].where = {
...(include[0].where || {}),
environment: runtimeEnvironment,
};
include[0].required = true;
include[1].where = {
...(include[1].where || {}),
environment: runtimeEnvironment,
};
include[2].where = {
...(include[2].where || {}),
environment: runtimeEnvironment,
};
include[2].required = false;
}
if (runtimeProjectSlug) {
include[0].include = [{
model: db.projects,
as: 'project',
required: true,
where: { slug: runtimeProjectSlug },
}];
include[0].required = true;
include[1].include = [{
model: db.projects,
as: 'project',
required: true,
where: { slug: runtimeProjectSlug },
}];
include[2].include = [{
model: db.projects,
as: 'project',
required: true,
where: { slug: runtimeProjectSlug },
}];
include[2].required = false;
}
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.external_url) {
where = {
...where,
[Op.and]: Utils.ilike(
'page_links',
'external_url',
filter.external_url,
),
};
}
if (filter.trigger_selector) {
where = {
...where,
[Op.and]: Utils.ilike(
'page_links',
'trigger_selector',
filter.trigger_selector,
),
};
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true'
};
}
if (filter.direction) {
where = {
...where,
direction: filter.direction,
};
}
if (filter.is_active) {
where = {
...where,
is_active: filter.is_active,
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
const queryOptions = {
where,
include,
distinct: true,
order: filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.page_links.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(query, limit, offset, ) {
let where = {};
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike(
'page_links',
'direction',
query,
),
],
};
}
const records = await db.page_links.findAll({
attributes: [ 'id', 'direction' ],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['direction', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.direction,
}));
}
};

View File

@ -1,53 +1,335 @@
const GenericDBApi = require('./base.api');
const db = require('../models'); const db = require('../models');
const Utils = require('../utils');
class PermissionsDBApi extends GenericDBApi {
static get MODEL() {
return db.permissions;
}
static get TABLE_NAME() {
return 'permissions';
}
static get SEARCHABLE_FIELDS() { const Sequelize = db.Sequelize;
return ['name']; const Op = Sequelize.Op;
}
static get RANGE_FIELDS() { module.exports = class PermissionsDBApi {
return [];
}
static get ENUM_FIELDS() {
return []; static async create(data, options) {
} const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
static get CSV_FIELDS() { const permissions = await db.permissions.create(
return ['id', 'name', 'createdAt']; {
} id: data.id || undefined,
name: data.name
||
null
,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
static get AUTOCOMPLETE_FIELD() {
return 'name';
}
static get ASSOCIATIONS() {
return [];
}
static get FIND_BY_INCLUDES() {
return [];
}
static get FIND_ALL_INCLUDES() { return permissions;
return []; }
}
static getFieldMapping(data) { static async bulkImport(data, options) {
return { const currentUser = (options && options.currentUser) || { id: null };
id: data.id || undefined, const transaction = (options && options.transaction) || undefined;
name: data.name || null,
};
}
}
module.exports = PermissionsDBApi; // Prepare data - wrapping individual data transformations in a map() method
const permissionsData = data.map((item, index) => ({
id: item.id || undefined,
name: item.name
||
null
,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const permissions = await db.permissions.bulkCreate(permissionsData, { transaction });
// For each item created, replace relation files
return permissions;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const permissions = await db.permissions.findByPk(id, {transaction});
const updatePayload = {};
if (data.name !== undefined) updatePayload.name = data.name;
updatePayload.updatedById = currentUser.id;
await permissions.update(updatePayload, {transaction});
return permissions;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const permissions = await db.permissions.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of permissions) {
await record.update(
{deletedBy: currentUser.id},
{transaction}
);
}
for (const record of permissions) {
await record.destroy({transaction});
}
});
return permissions;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const permissions = await db.permissions.findByPk(id, options);
await permissions.update({
deletedBy: currentUser.id
}, {
transaction,
});
await permissions.destroy({
transaction
});
return permissions;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const permissions = await db.permissions.findOne({
where,
transaction,
});
if (!permissions) {
return permissions;
}
const output = permissions.get({plain: true});
return output;
}
static async findAll(
filter,
options
) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
offset = currentPage * limit;
let include = [
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.name) {
where = {
...where,
[Op.and]: Utils.ilike(
'permissions',
'name',
filter.name,
),
};
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true'
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
const queryOptions = {
where,
include,
distinct: true,
order: filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.permissions.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(query, limit, offset, ) {
let where = {};
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike(
'permissions',
'name',
query,
),
],
};
}
const records = await db.permissions.findAll({
attributes: [ 'id', 'name' ],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['name', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.name,
}));
}
};

View File

@ -1,90 +1,571 @@
const GenericDBApi = require('./base.api');
const db = require('../models'); const db = require('../models');
const Utils = require('../utils');
class Presigned_url_requestsDBApi extends GenericDBApi {
static get MODEL() {
return db.presigned_url_requests;
}
static get TABLE_NAME() {
return 'presigned_url_requests';
}
static get SEARCHABLE_FIELDS() { const Sequelize = db.Sequelize;
return ['requested_key', 'mime_type', 'status']; const Op = Sequelize.Op;
}
static get RANGE_FIELDS() { module.exports = class Presigned_url_requestsDBApi {
return ['requested_size_mb', 'expires_at'];
}
static get ENUM_FIELDS() {
return ['purpose', 'asset_type']; static async create(data, options) {
} const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
static get CSV_FIELDS() { const presigned_url_requests = await db.presigned_url_requests.create(
return [ {
'id', id: data.id || undefined,
'purpose',
'asset_type', purpose: data.purpose
'requested_key', ||
'mime_type', null
'status', ,
'createdAt',
]; asset_type: data.asset_type
} ||
null
,
requested_key: data.requested_key
||
null
,
mime_type: data.mime_type
||
null
,
requested_size_mb: data.requested_size_mb
||
null
,
expires_at: data.expires_at
||
null
,
status: data.status
||
null
,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
static get AUTOCOMPLETE_FIELD() {
return 'requested_key'; await presigned_url_requests.setProject( data.project || null, {
} transaction,
});
await presigned_url_requests.setUser( data.user || null, {
transaction,
});
static get ASSOCIATIONS() {
return [
{ field: 'project', setter: 'setProject', isArray: false },
{ field: 'user', setter: 'setUser', isArray: false },
];
}
static get FIND_BY_INCLUDES() {
return [{ association: 'project' }, { association: 'user' }];
}
static get FIND_ALL_INCLUDES() { return presigned_url_requests;
return [ }
{ model: db.projects, as: 'project', required: false },
{ model: db.users, as: 'user', required: false },
]; static async bulkImport(data, options) {
} const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const presigned_url_requestsData = data.map((item, index) => ({
id: item.id || undefined,
purpose: item.purpose
||
null
,
asset_type: item.asset_type
||
null
,
requested_key: item.requested_key
||
null
,
mime_type: item.mime_type
||
null
,
requested_size_mb: item.requested_size_mb
||
null
,
expires_at: item.expires_at
||
null
,
status: item.status
||
null
,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const presigned_url_requests = await db.presigned_url_requests.bulkCreate(presigned_url_requestsData, { transaction });
// For each item created, replace relation files
return presigned_url_requests;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const presigned_url_requests = await db.presigned_url_requests.findByPk(id, {transaction});
const updatePayload = {};
if (data.purpose !== undefined) updatePayload.purpose = data.purpose;
if (data.asset_type !== undefined) updatePayload.asset_type = data.asset_type;
if (data.requested_key !== undefined) updatePayload.requested_key = data.requested_key;
if (data.mime_type !== undefined) updatePayload.mime_type = data.mime_type;
if (data.requested_size_mb !== undefined) updatePayload.requested_size_mb = data.requested_size_mb;
if (data.expires_at !== undefined) updatePayload.expires_at = data.expires_at;
if (data.status !== undefined) updatePayload.status = data.status;
updatePayload.updatedById = currentUser.id;
await presigned_url_requests.update(updatePayload, {transaction});
if (data.project !== undefined) {
await presigned_url_requests.setProject(
data.project,
{ transaction }
);
}
if (data.user !== undefined) {
await presigned_url_requests.setUser(
data.user,
{ transaction }
);
}
return presigned_url_requests;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const presigned_url_requests = await db.presigned_url_requests.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of presigned_url_requests) {
await record.update(
{deletedBy: currentUser.id},
{transaction}
);
}
for (const record of presigned_url_requests) {
await record.destroy({transaction});
}
});
return presigned_url_requests;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const presigned_url_requests = await db.presigned_url_requests.findByPk(id, options);
await presigned_url_requests.update({
deletedBy: currentUser.id
}, {
transaction,
});
await presigned_url_requests.destroy({
transaction
});
return presigned_url_requests;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const presigned_url_requests = await db.presigned_url_requests.findOne({
where,
transaction,
});
if (!presigned_url_requests) {
return presigned_url_requests;
}
const output = presigned_url_requests.get({plain: true});
output.project = await presigned_url_requests.getProject({
transaction
});
output.user = await presigned_url_requests.getUser({
transaction
});
return output;
}
static async findAll(
filter,
options
) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
offset = currentPage * limit;
let include = [
static get RELATION_FILTERS() {
return [
{ {
filterKey: 'project',
model: db.projects, model: db.projects,
as: 'project', as: 'project',
searchField: 'name',
where: filter.project ? {
[Op.or]: [
{ id: { [Op.in]: filter.project.split('|').map(term => Utils.uuid(term)) } },
{
name: {
[Op.or]: filter.project.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
}
},
]
} : {},
}, },
{ {
filterKey: 'user',
model: db.users, model: db.users,
as: 'user', as: 'user',
searchField: 'firstName',
where: filter.user ? {
[Op.or]: [
{ id: { [Op.in]: filter.user.split('|').map(term => Utils.uuid(term)) } },
{
firstName: {
[Op.or]: filter.user.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
}
},
]
} : {},
}, },
]; ];
}
static getFieldMapping(data) { if (filter) {
return { if (filter.id) {
id: data.id || undefined, where = {
purpose: data.purpose || null, ...where,
asset_type: data.asset_type || null, ['id']: Utils.uuid(filter.id),
requested_key: data.requested_key || null, };
mime_type: data.mime_type || null, }
requested_size_mb: data.requested_size_mb || null,
expires_at: data.expires_at || null,
status: data.status || null,
};
}
}
module.exports = Presigned_url_requestsDBApi;
if (filter.requested_key) {
where = {
...where,
[Op.and]: Utils.ilike(
'presigned_url_requests',
'requested_key',
filter.requested_key,
),
};
}
if (filter.mime_type) {
where = {
...where,
[Op.and]: Utils.ilike(
'presigned_url_requests',
'mime_type',
filter.mime_type,
),
};
}
if (filter.status) {
where = {
...where,
[Op.and]: Utils.ilike(
'presigned_url_requests',
'status',
filter.status,
),
};
}
if (filter.requested_size_mbRange) {
const [start, end] = filter.requested_size_mbRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
requested_size_mb: {
...where.requested_size_mb,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
requested_size_mb: {
...where.requested_size_mb,
[Op.lte]: end,
},
};
}
}
if (filter.expires_atRange) {
const [start, end] = filter.expires_atRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
expires_at: {
...where.expires_at,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
expires_at: {
...where.expires_at,
[Op.lte]: end,
},
};
}
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true'
};
}
if (filter.purpose) {
where = {
...where,
purpose: filter.purpose,
};
}
if (filter.asset_type) {
where = {
...where,
asset_type: filter.asset_type,
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
const queryOptions = {
where,
include,
distinct: true,
order: filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.presigned_url_requests.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(query, limit, offset, ) {
let where = {};
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike(
'presigned_url_requests',
'requested_key',
query,
),
],
};
}
const records = await db.presigned_url_requests.findAll({
attributes: [ 'id', 'requested_key' ],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['requested_key', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.requested_key,
}));
}
};

View File

@ -1,4 +1,4 @@
const GenericDBApi = require('./base.api');
const db = require('../models'); const db = require('../models');
const Utils = require('../utils'); const Utils = require('../utils');
const { const {
@ -6,194 +6,591 @@ const {
applyRuntimeProjectFilter, applyRuntimeProjectFilter,
} = require('./runtime-context'); } = require('./runtime-context');
const Sequelize = db.Sequelize; const Sequelize = db.Sequelize;
const Op = Sequelize.Op; const Op = Sequelize.Op;
class Project_audio_tracksDBApi extends GenericDBApi { module.exports = class Project_audio_tracksDBApi {
static get MODEL() {
return db.project_audio_tracks;
}
static get TABLE_NAME() {
return 'project_audio_tracks'; static async create(data, options) {
} const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
static get SEARCHABLE_FIELDS() { const project_audio_tracks = await db.project_audio_tracks.create(
return ['source_key', 'name', 'slug', 'url']; {
} id: data.id || undefined,
static get RANGE_FIELDS() { environment: data.environment
return ['volume', 'sort_order']; ||
} null
,
static get ENUM_FIELDS() {
return ['environment', 'loop', 'is_enabled']; source_key: data.source_key
} ||
null
static get CSV_FIELDS() { ,
return [
'id', name: data.name
'environment', ||
'source_key', null
'name', ,
'slug',
'url', slug: data.slug
'loop', ||
'volume', null
'createdAt', ,
];
} url: data.url
||
static get AUTOCOMPLETE_FIELD() { null
return 'name'; ,
}
loop: data.loop
static get ASSOCIATIONS() { ||
return [{ field: 'project', setter: 'setProject', isArray: false }]; false
}
,
static getFieldMapping(data) {
return { volume: data.volume
id: data.id || undefined, ||
environment: data.environment || null, null
source_key: data.source_key || null, ,
name: data.name || null,
slug: data.slug || null, sort_order: data.sort_order
url: data.url || null, ||
loop: data.loop || false, null
volume: data.volume || null, ,
sort_order: data.sort_order || null,
is_enabled: data.is_enabled || false, is_enabled: data.is_enabled
}; ||
} false
static async findBy(where, options = {}) { ,
const transaction = options.transaction;
const queryWhere = applyRuntimeEnvironment({ ...where }, options); importHash: data.importHash || null,
const projectInclude = applyRuntimeProjectFilter( createdById: currentUser.id,
{ model: db.projects, as: 'project' }, updatedById: currentUser.id,
options, },
{ transaction },
); );
const record = await this.MODEL.findOne({
where: queryWhere, await project_audio_tracks.setProject( data.project || null, {
transaction, transaction,
include: [projectInclude], });
});
if (!record) return null;
return record.get({ plain: true });
}
static async findAll(filter = {}, options = {}) {
filter = filter || {};
const limit = filter.limit || 0;
const currentPage = +filter.page || 0;
const offset = currentPage * limit;
let where = {}; return project_audio_tracks;
}
const terms = filter.project ? filter.project.split('|') : []; static async bulkImport(data, options) {
const validUuids = Utils.filterValidUuids(terms); const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const project_audio_tracksData = data.map((item, index) => ({
id: item.id || undefined,
environment: item.environment
||
null
,
source_key: item.source_key
||
null
,
name: item.name
||
null
,
slug: item.slug
||
null
,
url: item.url
||
null
,
loop: item.loop
||
false
,
volume: item.volume
||
null
,
sort_order: item.sort_order
||
null
,
is_enabled: item.is_enabled
||
false
,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const project_audio_tracks = await db.project_audio_tracks.bulkCreate(project_audio_tracksData, { transaction });
// For each item created, replace relation files
return project_audio_tracks;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const project_audio_tracks = await db.project_audio_tracks.findByPk(id, {transaction});
const updatePayload = {};
if (data.environment !== undefined) updatePayload.environment = data.environment;
if (data.source_key !== undefined) updatePayload.source_key = data.source_key;
if (data.name !== undefined) updatePayload.name = data.name;
if (data.slug !== undefined) updatePayload.slug = data.slug;
if (data.url !== undefined) updatePayload.url = data.url;
if (data.loop !== undefined) updatePayload.loop = data.loop;
if (data.volume !== undefined) updatePayload.volume = data.volume;
if (data.sort_order !== undefined) updatePayload.sort_order = data.sort_order;
if (data.is_enabled !== undefined) updatePayload.is_enabled = data.is_enabled;
updatePayload.updatedById = currentUser.id;
await project_audio_tracks.update(updatePayload, {transaction});
if (data.project !== undefined) {
await project_audio_tracks.setProject(
data.project,
{ transaction }
);
}
return project_audio_tracks;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const project_audio_tracks = await db.project_audio_tracks.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of project_audio_tracks) {
await record.update(
{deletedBy: currentUser.id},
{transaction}
);
}
for (const record of project_audio_tracks) {
await record.destroy({transaction});
}
});
return project_audio_tracks;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const project_audio_tracks = await db.project_audio_tracks.findByPk(id, options);
await project_audio_tracks.update({
deletedBy: currentUser.id
}, {
transaction,
});
await project_audio_tracks.destroy({
transaction
});
return project_audio_tracks;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const queryWhere = applyRuntimeEnvironment({ ...where }, options);
const projectInclude = applyRuntimeProjectFilter(
{
model: db.projects,
as: 'project',
},
options,
);
const project_audio_tracks = await db.project_audio_tracks.findOne(
{ where: queryWhere, include: [projectInclude], transaction },
);
if (!project_audio_tracks) {
return project_audio_tracks;
}
const output = project_audio_tracks.get({plain: true});
output.project = await project_audio_tracks.getProject({
transaction
});
return output;
}
static async findAll(
filter,
options
) {
filter = filter || {};
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
offset = currentPage * limit;
let include = [ let include = [
{ {
model: db.projects, model: db.projects,
as: 'project', as: 'project',
where: filter.project
? { where: filter.project ? {
[Op.or]: [ [Op.or]: [
...(validUuids.length > 0 { id: { [Op.in]: filter.project.split('|').map(term => Utils.uuid(term)) } },
? [{ id: { [Op.in]: validUuids } }] {
: []), name: {
{ [Op.or]: filter.project.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
name: { }
[Op.or]: terms.map((term) => ({ [Op.iLike]: `%${term}%` })), },
}, ]
}, } : {},
],
}
: {},
}, },
]; ];
include[0] = applyRuntimeProjectFilter(include[0], options);
include[0] = applyRuntimeProjectFilter(include[0], options); if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.id) {
if (!Utils.isValidUuid(filter.id)) { if (filter.source_key) {
return { rows: [], count: 0 }; where = {
} ...where,
where.id = filter.id; [Op.and]: Utils.ilike(
} 'project_audio_tracks',
'source_key',
filter.source_key,
),
};
}
if (filter.name) {
where = {
...where,
[Op.and]: Utils.ilike(
'project_audio_tracks',
'name',
filter.name,
),
};
}
if (filter.slug) {
where = {
...where,
[Op.and]: Utils.ilike(
'project_audio_tracks',
'slug',
filter.slug,
),
};
}
if (filter.url) {
where = {
...where,
[Op.and]: Utils.ilike(
'project_audio_tracks',
'url',
filter.url,
),
};
}
for (const field of this.SEARCHABLE_FIELDS) {
if (filter[field]) {
where[Op.and] = Utils.ilike(this.TABLE_NAME, field, filter[field]);
}
}
for (const field of this.RANGE_FIELDS) {
const rangeKey = `${field}Range`; if (filter.volumeRange) {
if (filter[rangeKey]) { const [start, end] = filter.volumeRange;
const [start, end] = filter[rangeKey];
if (start !== undefined && start !== null && start !== '') { if (start !== undefined && start !== null && start !== '') {
where[field] = { ...where[field], [Op.gte]: start }; where = {
...where,
volume: {
...where.volume,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
volume: {
...where.volume,
[Op.lte]: end,
},
};
}
}
if (filter.sort_orderRange) {
const [start, end] = filter.sort_orderRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
sort_order: {
...where.sort_order,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
sort_order: {
...where.sort_order,
[Op.lte]: end,
},
};
}
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true'
};
}
if (filter.environment) {
where = {
...where,
environment: filter.environment,
};
}
if (filter.loop) {
where = {
...where,
loop: filter.loop,
};
}
if (filter.is_enabled) {
where = {
...where,
is_enabled: filter.is_enabled,
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
} }
if (end !== undefined && end !== null && end !== '') {
where[field] = { ...where[field], [Op.lte]: end }; where = applyRuntimeEnvironment(where, options);
const queryOptions = {
where,
include,
distinct: true,
order: filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.project_audio_tracks.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
} }
}
} }
for (const field of this.ENUM_FIELDS) { static async findAllAutocomplete(query, limit, offset, ) {
if (filter[field] !== undefined) { let where = {};
where[field] = filter[field];
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike(
'project_audio_tracks',
'name',
query,
),
],
};
}
const records = await db.project_audio_tracks.findAll({
attributes: [ 'id', 'name' ],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['name', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.name,
}));
} }
if (filter.active !== undefined) {
where.active = filter.active === true || filter.active === 'true'; };
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where.createdAt = { ...where.createdAt, [Op.gte]: start };
}
if (end !== undefined && end !== null && end !== '') {
where.createdAt = { ...where.createdAt, [Op.lte]: end };
}
}
where = applyRuntimeEnvironment(where, options);
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options.transaction,
};
if (!options.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await this.MODEL.findAndCountAll(queryOptions);
return {
rows: options.countOnly ? [] : rows,
count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
}
module.exports = Project_audio_tracksDBApi;

View File

@ -1,390 +0,0 @@
const GenericDBApi = require('./base.api');
const db = require('../models');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
class Project_element_defaultsDBApi extends GenericDBApi {
static get MODEL() {
return db.project_element_defaults;
}
static get TABLE_NAME() {
return 'project_element_defaults';
}
static get SEARCHABLE_FIELDS() {
return ['name', 'element_type'];
}
static get RANGE_FIELDS() {
return ['sort_order', 'snapshot_version'];
}
static get ENUM_FIELDS() {
return [];
}
static get ASSOCIATIONS() {
return [{ field: 'project', setter: 'setProject', isArray: false }];
}
static get RELATION_FILTERS() {
return [
{
filterKey: 'project',
model: db.projects,
as: 'project',
searchField: 'name',
},
];
}
static get FIND_ALL_INCLUDES() {
return [{ association: 'project' }, { association: 'source_element' }];
}
static get CSV_FIELDS() {
return [
'id',
'element_type',
'name',
'sort_order',
'projectId',
'snapshot_version',
'createdAt',
];
}
static get AUTOCOMPLETE_FIELD() {
return 'name';
}
// Declarative field configuration using base class patterns
static get JSON_FIELDS() {
return ['settings_json'];
}
static get FIELD_DEFAULTS() {
return {
element_type: { default: null },
name: { default: null },
sort_order: { default: 0 },
source_element_id: { default: null },
snapshot_version: { default: 1 },
};
}
static getFieldMapping(data) {
// Apply base class transformations (JSON fields, defaults, transformers)
const mapped = super.getFieldMapping(data);
// Custom mapping for projectId field (accepts both projectId and project)
if (mapped.project && !mapped.projectId) {
mapped.projectId = mapped.project;
}
return {
id: mapped.id || undefined,
element_type: mapped.element_type,
name: mapped.name,
sort_order: mapped.sort_order,
settings_json: mapped.settings_json,
source_element_id: mapped.source_element_id,
snapshot_version: mapped.snapshot_version,
projectId: mapped.projectId,
};
}
/**
* Custom findAll with project filtering
* Supports both 'project' and 'projectId' query params for consistency
*/
static async findAll(filter = {}, options = {}) {
filter = filter || {};
const limit = filter.limit || 0;
const currentPage = +filter.page || 0;
const offset = currentPage * limit;
let where = {};
// Support both 'project' and 'projectId' query params
const projectFilter = filter.project || filter.projectId;
const terms = projectFilter ? projectFilter.split('|') : [];
const validUuids = Utils.filterValidUuids(terms);
let include = [
{
model: db.projects,
as: 'project',
where: projectFilter
? {
[Op.or]: [
...(validUuids.length > 0
? [{ id: { [Op.in]: validUuids } }]
: []),
{
name: {
[Op.or]: terms.map((term) => ({ [Op.iLike]: `%${term}%` })),
},
},
],
}
: {},
},
{
model: db.element_type_defaults,
as: 'source_element',
required: false,
},
];
if (filter.id) {
if (!Utils.isValidUuid(filter.id)) {
return { rows: [], count: 0 };
}
where.id = filter.id;
}
for (const field of this.SEARCHABLE_FIELDS) {
if (filter[field]) {
where[Op.and] = Utils.ilike(this.TABLE_NAME, field, filter[field]);
}
}
for (const field of this.RANGE_FIELDS) {
const rangeKey = `${field}Range`;
if (filter[rangeKey]) {
const [start, end] = filter[rangeKey];
if (start !== undefined && start !== null && start !== '') {
where[field] = { ...where[field], [Op.gte]: start };
}
if (end !== undefined && end !== null && end !== '') {
where[field] = { ...where[field], [Op.lte]: end };
}
}
}
for (const field of this.ENUM_FIELDS) {
if (filter[field] !== undefined) {
where[field] = filter[field];
}
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where.createdAt = { ...where.createdAt, [Op.gte]: start };
}
if (end !== undefined && end !== null && end !== '') {
where.createdAt = { ...where.createdAt, [Op.lte]: end };
}
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['sort_order', 'asc']],
transaction: options.transaction,
};
if (!options.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await this.MODEL.findAndCountAll(queryOptions);
return {
rows: options.countOnly ? [] : rows,
count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
/**
* Find project element default by element type for a specific project
*/
static async findByElementType(projectId, elementType, options = {}) {
return this.MODEL.findOne({
where: {
projectId,
element_type: elementType,
deletedAt: null,
},
...options,
});
}
/**
* Snapshot all global element defaults to a project
* Used when creating a new project
*/
static async snapshotGlobalDefaults(projectId, options = {}) {
const Element_type_defaultsDBApi = require('./element_type_defaults');
// Get all global defaults
const globalDefaults = await Element_type_defaultsDBApi.findAll({});
if (!globalDefaults?.rows?.length) {
return [];
}
// Dedupe by element_type (keep first occurrence)
// Prevents unique constraint violations if global defaults have duplicates
const seenTypes = new Set();
const dedupedDefaults = globalDefaults.rows.filter((row) => {
if (seenTypes.has(row.element_type)) {
console.warn(
`Duplicate element_type in global defaults: ${row.element_type} (skipping)`,
);
return false;
}
seenTypes.add(row.element_type);
return true;
});
const now = new Date();
const currentUserId = options.currentUser?.id || null;
// Create project defaults from global defaults
const projectDefaults = await this.MODEL.bulkCreate(
dedupedDefaults.map((globalDefault) => ({
projectId,
element_type: globalDefault.element_type,
name: globalDefault.name,
sort_order: globalDefault.sort_order,
settings_json: globalDefault.default_settings_json,
source_element_id: globalDefault.id,
snapshot_version: 1,
createdById: currentUserId,
updatedById: currentUserId,
createdAt: now,
updatedAt: now,
})),
{
transaction: options.transaction,
returning: true,
},
);
return projectDefaults;
}
/**
* Reset a project element default to the current global default
*/
static async resetToGlobal(id, options = {}) {
const Element_type_defaultsDBApi = require('./element_type_defaults');
// Ensure global defaults are initialized
await Element_type_defaultsDBApi.ensureInitialized();
// Find the project default
const projectDefault = await this.MODEL.findByPk(id);
if (!projectDefault) {
throw new Error('Project element default not found');
}
// Find the matching global default
const globalDefault = await Element_type_defaultsDBApi.MODEL.findOne({
where: {
element_type: projectDefault.element_type,
deletedAt: null,
},
});
if (!globalDefault) {
throw new Error(
`No global default found for element type: ${projectDefault.element_type}`,
);
}
// Update with global settings and increment version
const now = new Date();
await projectDefault.update(
{
name: globalDefault.name,
sort_order: globalDefault.sort_order,
settings_json: globalDefault.default_settings_json,
source_element_id: globalDefault.id,
snapshot_version: projectDefault.snapshot_version + 1,
updatedById: options.currentUser?.id || null,
updatedAt: now,
},
{
transaction: options.transaction,
},
);
return projectDefault.reload();
}
/**
* Get diff between project default and current global default
*/
static async getDiffFromGlobal(id) {
const Element_type_defaultsDBApi = require('./element_type_defaults');
// Ensure global defaults are initialized
await Element_type_defaultsDBApi.ensureInitialized();
// Find the project default
const projectDefault = await this.MODEL.findByPk(id);
if (!projectDefault) {
throw new Error('Project element default not found');
}
// Find the matching global default
const globalDefault = await Element_type_defaultsDBApi.MODEL.findOne({
where: {
element_type: projectDefault.element_type,
deletedAt: null,
},
});
if (!globalDefault) {
return {
projectDefault,
globalDefault: null,
hasGlobalDefault: false,
isDifferent: true,
};
}
// Parse JSON settings for comparison
const projectSettings =
typeof projectDefault.settings_json === 'string'
? JSON.parse(projectDefault.settings_json || '{}')
: projectDefault.settings_json || {};
const globalSettings =
typeof globalDefault.default_settings_json === 'string'
? JSON.parse(globalDefault.default_settings_json || '{}')
: globalDefault.default_settings_json || {};
const isDifferent =
JSON.stringify(projectSettings) !== JSON.stringify(globalSettings) ||
projectDefault.name !== globalDefault.name ||
projectDefault.sort_order !== globalDefault.sort_order;
return {
projectDefault,
globalDefault,
hasGlobalDefault: true,
isDifferent,
projectSettings,
globalSettings,
};
}
}
module.exports = Project_element_defaultsDBApi;

View File

@ -1,86 +1,501 @@
const GenericDBApi = require('./base.api');
const db = require('../models'); const db = require('../models');
const Utils = require('../utils');
class Project_membershipsDBApi extends GenericDBApi {
static get MODEL() {
return db.project_memberships;
}
static get TABLE_NAME() {
return 'project_memberships';
}
static get SEARCHABLE_FIELDS() { const Sequelize = db.Sequelize;
return []; const Op = Sequelize.Op;
}
static get RANGE_FIELDS() { module.exports = class Project_membershipsDBApi {
return ['invited_at', 'accepted_at'];
}
static get ENUM_FIELDS() {
return ['access_level', 'is_active']; static async create(data, options) {
} const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
static get CSV_FIELDS() { const project_memberships = await db.project_memberships.create(
return [ {
'id', id: data.id || undefined,
'access_level',
'is_active', access_level: data.access_level
'invited_at', ||
'accepted_at', null
'createdAt', ,
];
} is_active: data.is_active
||
false
,
invited_at: data.invited_at
||
null
,
accepted_at: data.accepted_at
||
null
,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
static get AUTOCOMPLETE_FIELD() {
return 'access_level'; await project_memberships.setProject( data.project || null, {
} transaction,
});
await project_memberships.setUser( data.user || null, {
transaction,
});
static get ASSOCIATIONS() {
return [
{ field: 'project', setter: 'setProject', isArray: false },
{ field: 'user', setter: 'setUser', isArray: false },
];
}
static get FIND_BY_INCLUDES() {
return [{ association: 'project' }, { association: 'user' }];
}
static get FIND_ALL_INCLUDES() { return project_memberships;
return [ }
{ model: db.projects, as: 'project', required: false },
{ model: db.users, as: 'user', required: false },
]; static async bulkImport(data, options) {
} const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const project_membershipsData = data.map((item, index) => ({
id: item.id || undefined,
access_level: item.access_level
||
null
,
is_active: item.is_active
||
false
,
invited_at: item.invited_at
||
null
,
accepted_at: item.accepted_at
||
null
,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const project_memberships = await db.project_memberships.bulkCreate(project_membershipsData, { transaction });
// For each item created, replace relation files
return project_memberships;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const project_memberships = await db.project_memberships.findByPk(id, {transaction});
const updatePayload = {};
if (data.access_level !== undefined) updatePayload.access_level = data.access_level;
if (data.is_active !== undefined) updatePayload.is_active = data.is_active;
if (data.invited_at !== undefined) updatePayload.invited_at = data.invited_at;
if (data.accepted_at !== undefined) updatePayload.accepted_at = data.accepted_at;
updatePayload.updatedById = currentUser.id;
await project_memberships.update(updatePayload, {transaction});
if (data.project !== undefined) {
await project_memberships.setProject(
data.project,
{ transaction }
);
}
if (data.user !== undefined) {
await project_memberships.setUser(
data.user,
{ transaction }
);
}
return project_memberships;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const project_memberships = await db.project_memberships.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of project_memberships) {
await record.update(
{deletedBy: currentUser.id},
{transaction}
);
}
for (const record of project_memberships) {
await record.destroy({transaction});
}
});
return project_memberships;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const project_memberships = await db.project_memberships.findByPk(id, options);
await project_memberships.update({
deletedBy: currentUser.id
}, {
transaction,
});
await project_memberships.destroy({
transaction
});
return project_memberships;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const project_memberships = await db.project_memberships.findOne({
where,
transaction,
});
if (!project_memberships) {
return project_memberships;
}
const output = project_memberships.get({plain: true});
output.project = await project_memberships.getProject({
transaction
});
output.user = await project_memberships.getUser({
transaction
});
return output;
}
static async findAll(
filter,
options
) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
offset = currentPage * limit;
let include = [
static get RELATION_FILTERS() {
return [
{ {
filterKey: 'project',
model: db.projects, model: db.projects,
as: 'project', as: 'project',
searchField: 'name',
where: filter.project ? {
[Op.or]: [
{ id: { [Op.in]: filter.project.split('|').map(term => Utils.uuid(term)) } },
{
name: {
[Op.or]: filter.project.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
}
},
]
} : {},
}, },
{ {
filterKey: 'user',
model: db.users, model: db.users,
as: 'user', as: 'user',
searchField: 'firstName',
where: filter.user ? {
[Op.or]: [
{ id: { [Op.in]: filter.user.split('|').map(term => Utils.uuid(term)) } },
{
firstName: {
[Op.or]: filter.user.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
}
},
]
} : {},
}, },
]; ];
}
static getFieldMapping(data) { if (filter) {
return { if (filter.id) {
id: data.id || undefined, where = {
access_level: data.access_level || null, ...where,
is_active: data.is_active || false, ['id']: Utils.uuid(filter.id),
invited_at: data.invited_at || null, };
accepted_at: data.accepted_at || null, }
};
}
}
module.exports = Project_membershipsDBApi;
if (filter.invited_atRange) {
const [start, end] = filter.invited_atRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
invited_at: {
...where.invited_at,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
invited_at: {
...where.invited_at,
[Op.lte]: end,
},
};
}
}
if (filter.accepted_atRange) {
const [start, end] = filter.accepted_atRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
accepted_at: {
...where.accepted_at,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
accepted_at: {
...where.accepted_at,
[Op.lte]: end,
},
};
}
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true'
};
}
if (filter.access_level) {
where = {
...where,
access_level: filter.access_level,
};
}
if (filter.is_active) {
where = {
...where,
is_active: filter.is_active,
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
const queryOptions = {
where,
include,
distinct: true,
order: filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.project_memberships.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(query, limit, offset, ) {
let where = {};
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike(
'project_memberships',
'access_level',
query,
),
],
};
}
const records = await db.project_memberships.findAll({
attributes: [ 'id', 'access_level' ],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['access_level', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.access_level,
}));
}
};

View File

@ -1,277 +0,0 @@
const GenericDBApi = require('./base.api');
const db = require('../models');
const Utils = require('../utils');
const {
applyRuntimeEnvironment,
applyRuntimeProjectFilter,
} = require('./runtime-context');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
class Project_transition_settingsDBApi extends GenericDBApi {
static get MODEL() {
return db.project_transition_settings;
}
static get TABLE_NAME() {
return 'project_transition_settings';
}
static get SEARCHABLE_FIELDS() {
return ['source_key', 'transition_type', 'easing', 'overlay_color'];
}
static get RANGE_FIELDS() {
return ['duration_ms'];
}
static get ENUM_FIELDS() {
return ['environment'];
}
static get CSV_FIELDS() {
return [
'id',
'environment',
'source_key',
'transition_type',
'duration_ms',
'easing',
'overlay_color',
'createdAt',
];
}
static get AUTOCOMPLETE_FIELD() {
return 'transition_type';
}
static get ASSOCIATIONS() {
return [{ field: 'project', setter: 'setProject', isArray: false }];
}
static getFieldMapping(data) {
// Note: environment and projectId are NOT included here because they are
// set explicitly in upsertForProject and should never be changed via data
return {
id: data.id || undefined,
source_key: data.source_key || null,
transition_type: data.transition_type || 'fade',
duration_ms: data.duration_ms !== undefined ? data.duration_ms : 700,
easing: data.easing || 'ease-in-out',
overlay_color: data.overlay_color || '#000000',
};
}
/**
* Find settings by project ID and environment.
* This is the primary method for fetching transition settings.
*
* @param {string} projectId - Project ID
* @param {string} environment - Environment (dev, stage, production)
* @param {object} options - Query options
* @returns {object|null} Settings record or null
*/
static async findByProjectAndEnvironment(
projectId,
environment,
options = {},
) {
const transaction = options.transaction;
const record = await this.MODEL.findOne({
where: {
projectId,
environment,
},
transaction,
include: [
{
model: db.projects,
as: 'project',
},
],
});
if (!record) return null;
return record.get({ plain: true });
}
/**
* Create or update settings for a project/environment combination.
* Uses upsert semantics - creates if not exists, updates if exists.
*
* @param {string} projectId - Project ID
* @param {string} environment - Environment (dev, stage, production)
* @param {object} data - Settings data
* @param {object} options - Query options
* @returns {object} Created or updated record
*/
static async upsertForProject(projectId, environment, data, options = {}) {
const transaction = options.transaction;
const currentUser = options.currentUser;
// Check if record exists
const existing = await this.MODEL.findOne({
where: { projectId, environment },
transaction,
});
if (existing) {
// Update existing record
await existing.update(
{
...this.getFieldMapping(data),
updatedById: currentUser?.id || null,
},
{ transaction },
);
return existing.get({ plain: true });
}
// Create new record
const newRecord = await this.MODEL.create(
{
...this.getFieldMapping(data),
projectId,
environment,
createdById: currentUser?.id || null,
updatedById: currentUser?.id || null,
},
{ transaction },
);
return newRecord.get({ plain: true });
}
static async findBy(where, options = {}) {
const transaction = options.transaction;
const queryWhere = applyRuntimeEnvironment({ ...where }, options);
const projectInclude = applyRuntimeProjectFilter(
{ model: db.projects, as: 'project' },
options,
);
const record = await this.MODEL.findOne({
where: queryWhere,
transaction,
include: [projectInclude],
});
if (!record) return null;
return record.get({ plain: true });
}
static async findAll(filter = {}, options = {}) {
filter = filter || {};
const limit = filter.limit || 0;
const currentPage = +filter.page || 0;
const offset = currentPage * limit;
let where = {};
const terms = filter.project ? filter.project.split('|') : [];
const validUuids = Utils.filterValidUuids(terms);
let include = [
{
model: db.projects,
as: 'project',
where: filter.project
? {
[Op.or]: [
...(validUuids.length > 0
? [{ id: { [Op.in]: validUuids } }]
: []),
{
name: {
[Op.or]: terms.map((term) => ({ [Op.iLike]: `%${term}%` })),
},
},
],
}
: {},
},
];
include[0] = applyRuntimeProjectFilter(include[0], options);
if (filter.id) {
if (!Utils.isValidUuid(filter.id)) {
return { rows: [], count: 0 };
}
where.id = filter.id;
}
for (const field of this.SEARCHABLE_FIELDS) {
if (filter[field]) {
where[Op.and] = Utils.ilike(this.TABLE_NAME, field, filter[field]);
}
}
for (const field of this.RANGE_FIELDS) {
const rangeKey = `${field}Range`;
if (filter[rangeKey]) {
const [start, end] = filter[rangeKey];
if (start !== undefined && start !== null && start !== '') {
where[field] = { ...where[field], [Op.gte]: start };
}
if (end !== undefined && end !== null && end !== '') {
where[field] = { ...where[field], [Op.lte]: end };
}
}
}
for (const field of this.ENUM_FIELDS) {
if (filter[field] !== undefined) {
where[field] = filter[field];
}
}
if (filter.active !== undefined) {
where.active = filter.active === true || filter.active === 'true';
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where.createdAt = { ...where.createdAt, [Op.gte]: start };
}
if (end !== undefined && end !== null && end !== '') {
where.createdAt = { ...where.createdAt, [Op.lte]: end };
}
}
where = applyRuntimeEnvironment(where, options);
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options.transaction,
};
if (!options.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await this.MODEL.findAndCountAll(queryOptions);
return {
rows: options.countOnly ? [] : rows,
count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
}
module.exports = Project_transition_settingsDBApi;

View File

@ -1,230 +1,699 @@
const GenericDBApi = require('./base.api');
const db = require('../models'); const db = require('../models');
const Utils = require('../utils'); const Utils = require('../utils');
const { getRuntimeProjectSlug } = require('./runtime-context'); const {
getRuntimeEnvironment,
getRuntimeProjectSlug,
} = require('./runtime-context');
const Sequelize = db.Sequelize; const Sequelize = db.Sequelize;
const Op = Sequelize.Op; const Op = Sequelize.Op;
class ProjectsDBApi extends GenericDBApi { module.exports = class ProjectsDBApi {
static get MODEL() {
return db.projects;
}
static get TABLE_NAME() {
return 'projects'; static async create(data, options) {
} const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
static get SEARCHABLE_FIELDS() { const projects = await db.projects.create(
return [ {
'name', id: data.id || undefined,
'slug',
'description', name: data.name
'logo_url', ||
'favicon_url', null
'og_image_url', ,
];
} slug: data.slug
||
null
,
description: data.description
||
null
,
phase: data.phase
||
null
,
logo_url: data.logo_url
||
null
,
favicon_url: data.favicon_url
||
null
,
og_image_url: data.og_image_url
||
null
,
theme_config_json: data.theme_config_json
||
null
,
custom_css_json: data.custom_css_json
||
null
,
cdn_base_url: data.cdn_base_url
||
null
,
entry_page_slug: data.entry_page_slug
||
null
,
is_deleted: data.is_deleted
||
false
,
deleted_at_time: data.deleted_at_time
||
null
,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
static get RANGE_FIELDS() {
return [];
}
static get ENUM_FIELDS() {
return [];
}
static get CSV_FIELDS() {
return ['id', 'name', 'slug', 'description', 'logo_url', 'createdAt'];
}
static get AUTOCOMPLETE_FIELD() { return projects;
return 'name'; }
}
static get ASSOCIATIONS() { static async bulkImport(data, options) {
return []; const currentUser = (options && options.currentUser) || { id: null };
} const transaction = (options && options.transaction) || undefined;
static getFieldMapping(data) { // Prepare data - wrapping individual data transformations in a map() method
// Use undefined for missing fields so they're skipped during update const projectsData = data.map((item, index) => ({
// Only include fields that are explicitly provided in data id: item.id || undefined,
// Note: transition_settings moved to project_transition_settings table
return { name: item.name
id: data.id || undefined, ||
name: 'name' in data ? data.name || null : undefined, null
slug: 'slug' in data ? data.slug || null : undefined, ,
description: 'description' in data ? data.description || null : undefined,
logo_url: 'logo_url' in data ? data.logo_url || null : undefined, slug: item.slug
favicon_url: 'favicon_url' in data ? data.favicon_url || null : undefined, ||
og_image_url: null
'og_image_url' in data ? data.og_image_url || null : undefined, ,
design_width: 'design_width' in data ? data.design_width : undefined,
design_height: 'design_height' in data ? data.design_height : undefined, description: item.description
}; ||
} null
,
phase: item.phase
||
null
,
logo_url: item.logo_url
||
null
,
favicon_url: item.favicon_url
||
null
,
og_image_url: item.og_image_url
||
null
,
theme_config_json: item.theme_config_json
||
null
,
custom_css_json: item.custom_css_json
||
null
,
cdn_base_url: item.cdn_base_url
||
null
,
entry_page_slug: item.entry_page_slug
||
null
,
is_deleted: item.is_deleted
||
false
,
deleted_at_time: item.deleted_at_time
||
null
,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
static get DEFAULT_INCLUDES() { // Bulk create items
return []; const projects = await db.projects.bulkCreate(projectsData, { transaction });
}
static get ALL_INCLUDES() { // For each item created, replace relation files
return [
{ association: 'project_memberships_project' },
{ association: 'assets_project' },
{ association: 'presigned_url_requests_project' },
{ association: 'tour_pages_project' },
{ association: 'project_audio_tracks_project' },
{ association: 'publish_events_project' },
{ association: 'pwa_caches_project' },
{ association: 'access_logs_project' },
];
}
static async findBy(where, options = {}) { return projects;
const transaction = options.transaction;
const runtimeProjectSlug = getRuntimeProjectSlug(options);
const queryWhere = { ...where };
// Runtime access: filter by project slug
// Skip if finding by ID (unambiguous lookup)
if (runtimeProjectSlug && !where.id) {
queryWhere.slug = runtimeProjectSlug;
} }
const include = static async update(id, data, options) {
options.include !== undefined ? options.include : this.DEFAULT_INCLUDES; const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const record = await this.MODEL.findOne({ const projects = await db.projects.findByPk(id, {transaction});
where: queryWhere,
transaction,
include,
});
if (!record) return null;
return record.get({ plain: true });
}
/**
* Create a new project and auto-snapshot global element defaults
*/
static async create(data, options = {}) {
const transaction = options.transaction;
// Create the project using parent's create const updatePayload = {};
const project = await super.create(data, options);
if (data.name !== undefined) updatePayload.name = data.name;
if (data.slug !== undefined) updatePayload.slug = data.slug;
if (data.description !== undefined) updatePayload.description = data.description;
if (data.phase !== undefined) updatePayload.phase = data.phase;
if (data.logo_url !== undefined) updatePayload.logo_url = data.logo_url;
if (data.favicon_url !== undefined) updatePayload.favicon_url = data.favicon_url;
if (data.og_image_url !== undefined) updatePayload.og_image_url = data.og_image_url;
if (data.theme_config_json !== undefined) updatePayload.theme_config_json = data.theme_config_json;
if (data.custom_css_json !== undefined) updatePayload.custom_css_json = data.custom_css_json;
if (data.cdn_base_url !== undefined) updatePayload.cdn_base_url = data.cdn_base_url;
if (data.entry_page_slug !== undefined) updatePayload.entry_page_slug = data.entry_page_slug;
if (data.is_deleted !== undefined) updatePayload.is_deleted = data.is_deleted;
if (data.deleted_at_time !== undefined) updatePayload.deleted_at_time = data.deleted_at_time;
updatePayload.updatedById = currentUser.id;
// Auto-snapshot global element defaults to the new project await projects.update(updatePayload, {transaction});
// Errors propagate to service layer → transaction rollback → proper error to client
const Project_element_defaultsDBApi = require('./project_element_defaults');
await Project_element_defaultsDBApi.snapshotGlobalDefaults(project.id, {
...options,
transaction,
});
return project;
}
static async findAll(filter = {}, options = {}) {
filter = filter || {};
const limit = filter.limit || 0;
const currentPage = +filter.page || 0;
const offset = currentPage * limit;
let where = {};
let include = [];
if (filter.id) { return projects;
if (!Utils.isValidUuid(filter.id)) {
return { rows: [], count: 0 };
}
where.id = filter.id;
} }
for (const field of this.SEARCHABLE_FIELDS) { static async deleteByIds(ids, options) {
if (filter[field]) { const currentUser = (options && options.currentUser) || { id: null };
where[Op.and] = Utils.ilike(this.TABLE_NAME, field, filter[field]); const transaction = (options && options.transaction) || undefined;
}
}
for (const field of this.RANGE_FIELDS) { const projects = await db.projects.findAll({
const rangeKey = `${field}Range`; where: {
if (filter[rangeKey]) { id: {
const [start, end] = filter[rangeKey]; [Op.in]: ids,
if (start !== undefined && start !== null && start !== '') { },
where[field] = { ...where[field], [Op.gte]: start }; },
} transaction,
if (end !== undefined && end !== null && end !== '') {
where[field] = { ...where[field], [Op.lte]: end };
}
}
}
for (const field of this.ENUM_FIELDS) {
if (filter[field] !== undefined) {
where[field] = filter[field];
}
}
if (filter.active !== undefined) {
where.active = filter.active === true || filter.active === 'true';
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where.createdAt = { ...where.createdAt, [Op.gte]: start };
}
if (end !== undefined && end !== null && end !== '') {
where.createdAt = { ...where.createdAt, [Op.lte]: end };
}
}
// Runtime access: filter by project slug
const runtimeProjectSlug = getRuntimeProjectSlug(options);
if (runtimeProjectSlug) {
where.slug = runtimeProjectSlug;
}
try {
if (options.countOnly) {
const count = await this.MODEL.count({
where,
include,
distinct: true,
transaction: options.transaction,
}); });
return { await db.sequelize.transaction(async (transaction) => {
rows: [], for (const record of projects) {
count, await record.update(
}; {deletedBy: currentUser.id},
} {transaction}
);
}
for (const record of projects) {
await record.destroy({transaction});
}
});
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options.transaction,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
};
const { rows, count } = await this.MODEL.findAndCountAll(queryOptions); return projects;
return {
rows,
count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
} }
}
}
module.exports = ProjectsDBApi; static async remove(id, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const projects = await db.projects.findByPk(id, options);
await projects.update({
deletedBy: currentUser.id
}, {
transaction,
});
await projects.destroy({
transaction
});
return projects;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const runtimeEnvironment = getRuntimeEnvironment(options);
const runtimeProjectSlug = getRuntimeProjectSlug(options);
const queryWhere = { ...where };
if (runtimeEnvironment) {
queryWhere.phase = runtimeEnvironment === 'production'
? 'production'
: { [Op.in]: ['stage', 'production'] };
}
if (runtimeProjectSlug) {
queryWhere.slug = runtimeProjectSlug;
}
const projects = await db.projects.findOne(
{ where: queryWhere, transaction },
);
if (!projects) {
return projects;
}
const output = projects.get({plain: true});
output.project_memberships_project = await projects.getProject_memberships_project({
transaction
});
output.assets_project = await projects.getAssets_project({
transaction
});
output.presigned_url_requests_project = await projects.getPresigned_url_requests_project({
transaction
});
output.tour_pages_project = await projects.getTour_pages_project({
transaction
});
output.transitions_project = await projects.getTransitions_project({
transaction
});
output.project_audio_tracks_project = await projects.getProject_audio_tracks_project({
transaction
});
output.publish_events_project = await projects.getPublish_events_project({
transaction
});
output.pwa_caches_project = await projects.getPwa_caches_project({
transaction
});
output.access_logs_project = await projects.getAccess_logs_project({
transaction
});
return output;
}
static async findAll(
filter,
options
) {
filter = filter || {};
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
offset = currentPage * limit;
let include = [
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.name) {
where = {
...where,
[Op.and]: Utils.ilike(
'projects',
'name',
filter.name,
),
};
}
if (filter.slug) {
where = {
...where,
[Op.and]: Utils.ilike(
'projects',
'slug',
filter.slug,
),
};
}
if (filter.description) {
where = {
...where,
[Op.and]: Utils.ilike(
'projects',
'description',
filter.description,
),
};
}
if (filter.logo_url) {
where = {
...where,
[Op.and]: Utils.ilike(
'projects',
'logo_url',
filter.logo_url,
),
};
}
if (filter.favicon_url) {
where = {
...where,
[Op.and]: Utils.ilike(
'projects',
'favicon_url',
filter.favicon_url,
),
};
}
if (filter.og_image_url) {
where = {
...where,
[Op.and]: Utils.ilike(
'projects',
'og_image_url',
filter.og_image_url,
),
};
}
if (filter.theme_config_json) {
where = {
...where,
[Op.and]: Utils.ilike(
'projects',
'theme_config_json',
filter.theme_config_json,
),
};
}
if (filter.custom_css_json) {
where = {
...where,
[Op.and]: Utils.ilike(
'projects',
'custom_css_json',
filter.custom_css_json,
),
};
}
if (filter.cdn_base_url) {
where = {
...where,
[Op.and]: Utils.ilike(
'projects',
'cdn_base_url',
filter.cdn_base_url,
),
};
}
if (filter.entry_page_slug) {
where = {
...where,
[Op.and]: Utils.ilike(
'projects',
'entry_page_slug',
filter.entry_page_slug,
),
};
}
if (filter.deleted_at_timeRange) {
const [start, end] = filter.deleted_at_timeRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
deleted_at_time: {
...where.deleted_at_time,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
deleted_at_time: {
...where.deleted_at_time,
[Op.lte]: end,
},
};
}
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true'
};
}
if (filter.phase) {
where = {
...where,
phase: filter.phase,
};
}
if (filter.is_deleted) {
where = {
...where,
is_deleted: filter.is_deleted,
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
const runtimeEnvironment = getRuntimeEnvironment(options);
const runtimeProjectSlug = getRuntimeProjectSlug(options);
if (runtimeEnvironment) {
where = {
...where,
phase: runtimeEnvironment,
};
}
if (runtimeProjectSlug) {
where = {
...where,
slug: runtimeProjectSlug,
};
}
const queryOptions = {
where,
include,
distinct: true,
order: filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.projects.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(query, limit, offset, ) {
let where = {};
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike(
'projects',
'name',
query,
),
],
};
}
const records = await db.projects.findAll({
attributes: [ 'id', 'name' ],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['name', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.name,
}));
}
};

View File

@ -1,105 +1,703 @@
const GenericDBApi = require('./base.api');
const db = require('../models'); const db = require('../models');
const Utils = require('../utils');
class Publish_eventsDBApi extends GenericDBApi {
static get MODEL() {
return db.publish_events;
}
static get TABLE_NAME() {
return 'publish_events';
}
static get SEARCHABLE_FIELDS() { const Sequelize = db.Sequelize;
return ['title', 'description', 'error_message']; const Op = Sequelize.Op;
}
static get RANGE_FIELDS() { module.exports = class Publish_eventsDBApi {
return [
'started_at',
'finished_at',
'pages_copied',
'transitions_copied',
'audios_copied',
];
}
static get ENUM_FIELDS() {
return ['from_environment', 'to_environment', 'status']; static async create(data, options) {
} const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
static get UUID_FIELDS() { const publish_events = await db.publish_events.create(
return ['projectId']; {
} id: data.id || undefined,
title: data.title
||
null
,
description: data.description
||
null
,
from_environment: data.from_environment
||
null
,
to_environment: data.to_environment
||
null
,
started_at: data.started_at
||
null
,
finished_at: data.finished_at
||
null
,
status: data.status
||
null
,
error_message: data.error_message
||
null
,
pages_copied: data.pages_copied
||
null
,
transitions_copied: data.transitions_copied
||
null
,
audios_copied: data.audios_copied
||
null
,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
static get CSV_FIELDS() {
return [ await publish_events.setProject( data.project || null, {
'id', transaction,
'title', });
'description',
'from_environment', await publish_events.setUser( data.user || null, {
'to_environment', transaction,
'status', });
'pages_copied',
'createdAt',
];
}
static get AUTOCOMPLETE_FIELD() {
return 'status';
}
static get ASSOCIATIONS() {
return [
{ field: 'project', setter: 'setProject', isArray: false },
{ field: 'user', setter: 'setUser', isArray: false },
];
}
static get FIND_BY_INCLUDES() { return publish_events;
return [{ association: 'project' }, { association: 'user' }]; }
}
static get FIND_ALL_INCLUDES() { static async bulkImport(data, options) {
return [ const currentUser = (options && options.currentUser) || { id: null };
{ model: db.projects, as: 'project', required: false }, const transaction = (options && options.transaction) || undefined;
{ model: db.users, as: 'user', required: false },
]; // Prepare data - wrapping individual data transformations in a map() method
} const publish_eventsData = data.map((item, index) => ({
id: item.id || undefined,
title: item.title
||
null
,
description: item.description
||
null
,
from_environment: item.from_environment
||
null
,
to_environment: item.to_environment
||
null
,
started_at: item.started_at
||
null
,
finished_at: item.finished_at
||
null
,
status: item.status
||
null
,
error_message: item.error_message
||
null
,
pages_copied: item.pages_copied
||
null
,
transitions_copied: item.transitions_copied
||
null
,
audios_copied: item.audios_copied
||
null
,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const publish_events = await db.publish_events.bulkCreate(publish_eventsData, { transaction });
// For each item created, replace relation files
return publish_events;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const publish_events = await db.publish_events.findByPk(id, {transaction});
const updatePayload = {};
if (data.title !== undefined) updatePayload.title = data.title;
if (data.description !== undefined) updatePayload.description = data.description;
if (data.from_environment !== undefined) updatePayload.from_environment = data.from_environment;
if (data.to_environment !== undefined) updatePayload.to_environment = data.to_environment;
if (data.started_at !== undefined) updatePayload.started_at = data.started_at;
if (data.finished_at !== undefined) updatePayload.finished_at = data.finished_at;
if (data.status !== undefined) updatePayload.status = data.status;
if (data.error_message !== undefined) updatePayload.error_message = data.error_message;
if (data.pages_copied !== undefined) updatePayload.pages_copied = data.pages_copied;
if (data.transitions_copied !== undefined) updatePayload.transitions_copied = data.transitions_copied;
if (data.audios_copied !== undefined) updatePayload.audios_copied = data.audios_copied;
updatePayload.updatedById = currentUser.id;
await publish_events.update(updatePayload, {transaction});
if (data.project !== undefined) {
await publish_events.setProject(
data.project,
{ transaction }
);
}
if (data.user !== undefined) {
await publish_events.setUser(
data.user,
{ transaction }
);
}
return publish_events;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const publish_events = await db.publish_events.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of publish_events) {
await record.update(
{deletedBy: currentUser.id},
{transaction}
);
}
for (const record of publish_events) {
await record.destroy({transaction});
}
});
return publish_events;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const publish_events = await db.publish_events.findByPk(id, options);
await publish_events.update({
deletedBy: currentUser.id
}, {
transaction,
});
await publish_events.destroy({
transaction
});
return publish_events;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const publish_events = await db.publish_events.findOne({
where,
transaction,
});
if (!publish_events) {
return publish_events;
}
const output = publish_events.get({plain: true});
output.project = await publish_events.getProject({
transaction
});
output.user = await publish_events.getUser({
transaction
});
return output;
}
static async findAll(
filter,
options
) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
offset = currentPage * limit;
let include = [
static get RELATION_FILTERS() {
return [
{ {
filterKey: 'project',
model: db.projects, model: db.projects,
as: 'project', as: 'project',
searchField: 'name',
where: filter.project ? {
[Op.or]: [
{ id: { [Op.in]: filter.project.split('|').map(term => Utils.uuid(term)) } },
{
name: {
[Op.or]: filter.project.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
}
},
]
} : {},
}, },
{ {
filterKey: 'user',
model: db.users, model: db.users,
as: 'user', as: 'user',
searchField: 'firstName',
where: filter.user ? {
[Op.or]: [
{ id: { [Op.in]: filter.user.split('|').map(term => Utils.uuid(term)) } },
{
firstName: {
[Op.or]: filter.user.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
}
},
]
} : {},
}, },
]; ];
}
static getFieldMapping(data) { if (filter) {
return { if (filter.id) {
id: data.id || undefined, where = {
title: data.title || null, ...where,
description: data.description || null, ['id']: Utils.uuid(filter.id),
from_environment: data.from_environment || null, };
to_environment: data.to_environment || null, }
started_at: data.started_at || null,
finished_at: data.finished_at || null,
status: data.status || null,
error_message: data.error_message || null,
pages_copied: data.pages_copied || null,
transitions_copied: data.transitions_copied || null,
audios_copied: data.audios_copied || null,
};
}
}
module.exports = Publish_eventsDBApi;
if (filter.title) {
where = {
...where,
[Op.and]: Utils.ilike(
'publish_events',
'title',
filter.title,
),
};
}
if (filter.description) {
where = {
...where,
[Op.and]: Utils.ilike(
'publish_events',
'description',
filter.description,
),
};
}
if (filter.error_message) {
where = {
...where,
[Op.and]: Utils.ilike(
'publish_events',
'error_message',
filter.error_message,
),
};
}
if (filter.started_atRange) {
const [start, end] = filter.started_atRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
started_at: {
...where.started_at,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
started_at: {
...where.started_at,
[Op.lte]: end,
},
};
}
}
if (filter.finished_atRange) {
const [start, end] = filter.finished_atRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
finished_at: {
...where.finished_at,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
finished_at: {
...where.finished_at,
[Op.lte]: end,
},
};
}
}
if (filter.pages_copiedRange) {
const [start, end] = filter.pages_copiedRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
pages_copied: {
...where.pages_copied,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
pages_copied: {
...where.pages_copied,
[Op.lte]: end,
},
};
}
}
if (filter.transitions_copiedRange) {
const [start, end] = filter.transitions_copiedRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
transitions_copied: {
...where.transitions_copied,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
transitions_copied: {
...where.transitions_copied,
[Op.lte]: end,
},
};
}
}
if (filter.audios_copiedRange) {
const [start, end] = filter.audios_copiedRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
audios_copied: {
...where.audios_copied,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
audios_copied: {
...where.audios_copied,
[Op.lte]: end,
},
};
}
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true'
};
}
if (filter.from_environment) {
where = {
...where,
from_environment: filter.from_environment,
};
}
if (filter.to_environment) {
where = {
...where,
to_environment: filter.to_environment,
};
}
if (filter.status) {
where = {
...where,
status: filter.status,
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
const queryOptions = {
where,
include,
distinct: true,
order: filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.publish_events.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(query, limit, offset, ) {
let where = {};
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike(
'publish_events',
'status',
query,
),
],
};
}
const records = await db.publish_events.findAll({
attributes: [ 'id', 'status' ],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['status', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.status,
}));
}
};

View File

@ -1,76 +1,499 @@
const GenericDBApi = require('./base.api');
const db = require('../models'); const db = require('../models');
const Utils = require('../utils');
class Pwa_cachesDBApi extends GenericDBApi {
static get MODEL() {
return db.pwa_caches;
}
static get TABLE_NAME() {
return 'pwa_caches';
}
static get SEARCHABLE_FIELDS() { const Sequelize = db.Sequelize;
return ['cache_version', 'manifest_json', 'asset_list_json']; const Op = Sequelize.Op;
}
static get RANGE_FIELDS() { module.exports = class Pwa_cachesDBApi {
return ['generated_at'];
}
static get ENUM_FIELDS() {
return ['environment', 'is_active']; static async create(data, options) {
} const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
static get CSV_FIELDS() { const pwa_caches = await db.pwa_caches.create(
return [ {
'id', id: data.id || undefined,
'environment',
'cache_version', environment: data.environment
'is_active', ||
'generated_at', null
'createdAt', ,
];
} cache_version: data.cache_version
||
null
,
manifest_json: data.manifest_json
||
null
,
asset_list_json: data.asset_list_json
||
null
,
generated_at: data.generated_at
||
null
,
is_active: data.is_active
||
false
,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
static get AUTOCOMPLETE_FIELD() {
return 'cache_version'; await pwa_caches.setProject( data.project || null, {
} transaction,
});
static get ASSOCIATIONS() {
return [{ field: 'project', setter: 'setProject', isArray: false }];
}
static get FIND_BY_INCLUDES() {
return [{ association: 'project' }];
}
static get FIND_ALL_INCLUDES() { return pwa_caches;
return [{ model: db.projects, as: 'project', required: false }]; }
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const pwa_cachesData = data.map((item, index) => ({
id: item.id || undefined,
environment: item.environment
||
null
,
cache_version: item.cache_version
||
null
,
manifest_json: item.manifest_json
||
null
,
asset_list_json: item.asset_list_json
||
null
,
generated_at: item.generated_at
||
null
,
is_active: item.is_active
||
false
,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const pwa_caches = await db.pwa_caches.bulkCreate(pwa_cachesData, { transaction });
// For each item created, replace relation files
return pwa_caches;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const pwa_caches = await db.pwa_caches.findByPk(id, {transaction});
const updatePayload = {};
if (data.environment !== undefined) updatePayload.environment = data.environment;
if (data.cache_version !== undefined) updatePayload.cache_version = data.cache_version;
if (data.manifest_json !== undefined) updatePayload.manifest_json = data.manifest_json;
if (data.asset_list_json !== undefined) updatePayload.asset_list_json = data.asset_list_json;
if (data.generated_at !== undefined) updatePayload.generated_at = data.generated_at;
if (data.is_active !== undefined) updatePayload.is_active = data.is_active;
updatePayload.updatedById = currentUser.id;
await pwa_caches.update(updatePayload, {transaction});
if (data.project !== undefined) {
await pwa_caches.setProject(
data.project,
{ transaction }
);
}
return pwa_caches;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const pwa_caches = await db.pwa_caches.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of pwa_caches) {
await record.update(
{deletedBy: currentUser.id},
{transaction}
);
}
for (const record of pwa_caches) {
await record.destroy({transaction});
}
});
return pwa_caches;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const pwa_caches = await db.pwa_caches.findByPk(id, options);
await pwa_caches.update({
deletedBy: currentUser.id
}, {
transaction,
});
await pwa_caches.destroy({
transaction
});
return pwa_caches;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const pwa_caches = await db.pwa_caches.findOne({
where,
transaction,
});
if (!pwa_caches) {
return pwa_caches;
}
const output = pwa_caches.get({plain: true});
output.project = await pwa_caches.getProject({
transaction
});
return output;
}
static async findAll(
filter,
options
) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
offset = currentPage * limit;
let include = [
static get RELATION_FILTERS() {
return [
{ {
filterKey: 'project',
model: db.projects, model: db.projects,
as: 'project', as: 'project',
searchField: 'name',
where: filter.project ? {
[Op.or]: [
{ id: { [Op.in]: filter.project.split('|').map(term => Utils.uuid(term)) } },
{
name: {
[Op.or]: filter.project.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
}
},
]
} : {},
}, },
]; ];
}
static getFieldMapping(data) { if (filter) {
return { if (filter.id) {
id: data.id || undefined, where = {
environment: data.environment || null, ...where,
cache_version: data.cache_version || null, ['id']: Utils.uuid(filter.id),
manifest_json: data.manifest_json || null, };
asset_list_json: data.asset_list_json || null, }
generated_at: data.generated_at || null,
is_active: data.is_active || false,
};
}
}
module.exports = Pwa_cachesDBApi;
if (filter.cache_version) {
where = {
...where,
[Op.and]: Utils.ilike(
'pwa_caches',
'cache_version',
filter.cache_version,
),
};
}
if (filter.manifest_json) {
where = {
...where,
[Op.and]: Utils.ilike(
'pwa_caches',
'manifest_json',
filter.manifest_json,
),
};
}
if (filter.asset_list_json) {
where = {
...where,
[Op.and]: Utils.ilike(
'pwa_caches',
'asset_list_json',
filter.asset_list_json,
),
};
}
if (filter.generated_atRange) {
const [start, end] = filter.generated_atRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
generated_at: {
...where.generated_at,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
generated_at: {
...where.generated_at,
[Op.lte]: end,
},
};
}
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true'
};
}
if (filter.environment) {
where = {
...where,
environment: filter.environment,
};
}
if (filter.is_active) {
where = {
...where,
is_active: filter.is_active,
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
const queryOptions = {
where,
include,
distinct: true,
order: filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.pwa_caches.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(query, limit, offset, ) {
let where = {};
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike(
'pwa_caches',
'cache_version',
query,
),
],
};
}
const records = await db.pwa_caches.findAll({
attributes: [ 'id', 'cache_version' ],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['cache_version', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.cache_version,
}));
}
};

View File

@ -1,71 +1,405 @@
const GenericDBApi = require('./base.api');
const db = require('../models'); const db = require('../models');
const Utils = require('../utils');
class RolesDBApi extends GenericDBApi {
static get MODEL() {
return db.roles;
}
static get TABLE_NAME() {
return 'roles';
}
static get SEARCHABLE_FIELDS() { const Sequelize = db.Sequelize;
return ['name', 'role_customization']; const Op = Sequelize.Op;
}
static get RANGE_FIELDS() { module.exports = class RolesDBApi {
return [];
}
static get ENUM_FIELDS() {
return []; static async create(data, options) {
} const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
static get CSV_FIELDS() { const roles = await db.roles.create(
return ['id', 'name', 'role_customization', 'createdAt']; {
} id: data.id || undefined,
name: data.name
||
null
,
role_customization: data.role_customization
||
null
,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
static get AUTOCOMPLETE_FIELD() {
return 'name';
}
static get ASSOCIATIONS() {
return [{ field: 'permissions', setter: 'setPermissions', isArray: true }]; await roles.setPermissions(data.permissions || [], {
} transaction,
});
return roles;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const rolesData = data.map((item, index) => ({
id: item.id || undefined,
name: item.name
||
null
,
role_customization: item.role_customization
||
null
,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const roles = await db.roles.bulkCreate(rolesData, { transaction });
// For each item created, replace relation files
return roles;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const roles = await db.roles.findByPk(id, {transaction});
const updatePayload = {};
if (data.name !== undefined) updatePayload.name = data.name;
if (data.role_customization !== undefined) updatePayload.role_customization = data.role_customization;
updatePayload.updatedById = currentUser.id;
await roles.update(updatePayload, {transaction});
if (data.permissions !== undefined) {
await roles.setPermissions(data.permissions, { transaction });
}
return roles;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const roles = await db.roles.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of roles) {
await record.update(
{deletedBy: currentUser.id},
{transaction}
);
}
for (const record of roles) {
await record.destroy({transaction});
}
});
return roles;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const roles = await db.roles.findByPk(id, options);
await roles.update({
deletedBy: currentUser.id
}, {
transaction,
});
await roles.destroy({
transaction
});
return roles;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const roles = await db.roles.findOne({
where,
transaction,
});
if (!roles) {
return roles;
}
const output = roles.get({plain: true});
output.users_app_role = await roles.getUsers_app_role({
transaction
});
output.permissions = await roles.getPermissions({
transaction
});
return output;
}
static async findAll(
filter,
options
) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
offset = currentPage * limit;
let include = [
static get FIND_BY_INCLUDES() {
return [{ association: 'users_app_role' }, { association: 'permissions' }];
}
static get FIND_ALL_INCLUDES() {
return [
{ {
model: db.permissions, model: db.permissions,
as: 'permissions', as: 'permissions',
required: false, required: false,
}, },
]; ];
}
static get RELATION_FILTERS() { if (filter) {
return [ if (filter.id) {
{ where = {
filterKey: 'permissions', ...where,
model: db.permissions, ['id']: Utils.uuid(filter.id),
as: 'permissions_filter', };
searchField: 'name', }
},
];
}
static getFieldMapping(data) {
return { if (filter.name) {
id: data.id || undefined, where = {
name: data.name || null, ...where,
role_customization: data.role_customization || null, [Op.and]: Utils.ilike(
}; 'roles',
} 'name',
} filter.name,
),
};
}
if (filter.role_customization) {
where = {
...where,
[Op.and]: Utils.ilike(
'roles',
'role_customization',
filter.role_customization,
),
};
}
module.exports = RolesDBApi;
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true'
};
}
if (filter.permissions) {
const searchTerms = filter.permissions.split('|');
include = [
{
model: db.permissions,
as: 'permissions_filter',
required: searchTerms.length > 0,
where: searchTerms.length > 0 ? {
[Op.or]: [
{ id: { [Op.in]: searchTerms.map(term => Utils.uuid(term)) } },
{
name: {
[Op.or]: searchTerms.map(term => ({ [Op.iLike]: `%${term}%` }))
}
}
]
} : undefined
},
...include,
]
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
const queryOptions = {
where,
include,
distinct: true,
order: filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.roles.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(query, limit, offset, ) {
let where = {};
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike(
'roles',
'name',
query,
),
],
};
}
const records = await db.roles.findAll({
attributes: [ 'id', 'name' ],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['name', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.name,
}));
}
};

View File

@ -1,32 +1,25 @@
/**
* Runtime Context Helpers
* For route-based environment access via X-Runtime-Environment header
*/
function getRuntimeContext(options = {}) { function getRuntimeContext(options = {}) {
return (options || {}).runtimeContext || null; return (options || {}).runtimeContext || null;
} }
function getRuntimeEnvironment(options = {}) { function getRuntimeEnvironment(options = {}) {
const runtimeContext = getRuntimeContext(options); const runtimeContext = getRuntimeContext(options);
if (!runtimeContext) return null;
// Read from header (route-based mode) if (!runtimeContext) return null;
// SECURITY: Only allow 'production' and 'stage' from header if (runtimeContext.mode === 'stage') return 'stage';
// to prevent unauthorized access to dev data if (runtimeContext.mode === 'production') return 'production';
if (runtimeContext.headerEnvironment === 'production') return 'production';
if (runtimeContext.headerEnvironment === 'stage') return 'stage';
return null; return null;
} }
function getRuntimeProjectSlug(options = {}) { function getRuntimeProjectSlug(options = {}) {
const runtimeContext = getRuntimeContext(options); const runtimeContext = getRuntimeContext(options);
return runtimeContext?.headerProjectSlug || null; return runtimeContext?.projectSlug || null;
} }
function applyRuntimeEnvironment(where = {}, options = {}) { function applyRuntimeEnvironment(where = {}, options = {}) {
const environment = getRuntimeEnvironment(options); const environment = getRuntimeEnvironment(options);
if (!environment) return where; if (!environment) return where;
return { return {
@ -37,6 +30,7 @@ function applyRuntimeEnvironment(where = {}, options = {}) {
function applyRuntimeProjectFilter(projectInclude = {}, options = {}) { function applyRuntimeProjectFilter(projectInclude = {}, options = {}) {
const projectSlug = getRuntimeProjectSlug(options); const projectSlug = getRuntimeProjectSlug(options);
if (!projectSlug) return projectInclude; if (!projectSlug) return projectInclude;
return { return {

View File

@ -1,4 +1,4 @@
const GenericDBApi = require('./base.api');
const db = require('../models'); const db = require('../models');
const Utils = require('../utils'); const Utils = require('../utils');
const { const {
@ -6,263 +6,640 @@ const {
applyRuntimeProjectFilter, applyRuntimeProjectFilter,
} = require('./runtime-context'); } = require('./runtime-context');
const Sequelize = db.Sequelize; const Sequelize = db.Sequelize;
const Op = Sequelize.Op; const Op = Sequelize.Op;
class Tour_pagesDBApi extends GenericDBApi { module.exports = class Tour_pagesDBApi {
static get MODEL() {
return db.tour_pages;
}
static get TABLE_NAME() {
return 'tour_pages'; static async create(data, options) {
} const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const projectId = data.project || data.projectId || null;
static get SEARCHABLE_FIELDS() { const tour_pages = await db.tour_pages.create(
return [ {
'source_key', id: data.id || undefined,
'name',
'slug', environment: data.environment
'background_image_url', ||
'background_video_url', null
'background_audio_url', ,
'ui_schema_json',
]; source_key: data.source_key
} ||
null
static get RANGE_FIELDS() { ,
return ['sort_order'];
} name: data.name
||
static get ENUM_FIELDS() { null
return ['environment', 'background_loop', 'requires_auth']; ,
}
slug: data.slug
static get UUID_FIELDS() { ||
return ['projectId']; null
} ,
static get CSV_FIELDS() { sort_order: data.sort_order
return [ ||
'id', null
'environment', ,
'source_key',
'name', background_image_url: data.background_image_url
'slug', ||
'sort_order', null
'createdAt', ,
];
} background_video_url: data.background_video_url
||
static get AUTOCOMPLETE_FIELD() { null
return 'name'; ,
}
background_audio_url: data.background_audio_url
static get ASSOCIATIONS() { ||
return [{ field: 'project', setter: 'setProject', isArray: false }]; null
} ,
static getFieldMapping(data) { background_loop: data.background_loop
return { ||
id: data.id || undefined, false
environment: data.environment || null,
source_key: data.source_key || null, ,
name: data.name || null,
slug: data.slug || null, requires_auth: data.requires_auth
sort_order: data.sort_order || null, ||
background_image_url: data.background_image_url || null, false
background_video_url: data.background_video_url || null,
background_audio_url: data.background_audio_url || null, ,
background_loop: data.background_loop || false,
background_video_autoplay: ui_schema_json: data.ui_schema_json
data.background_video_autoplay !== undefined ||
? data.background_video_autoplay null
: true, ,
background_video_loop: projectId,
data.background_video_loop !== undefined
? data.background_video_loop importHash: data.importHash || null,
: true, createdById: currentUser.id,
background_video_muted: updatedById: currentUser.id,
data.background_video_muted !== undefined },
? data.background_video_muted { transaction },
: true,
background_video_start_time:
data.background_video_start_time !== undefined
? data.background_video_start_time
: null,
background_video_end_time:
data.background_video_end_time !== undefined
? data.background_video_end_time
: null,
design_width: data.design_width !== undefined ? data.design_width : null,
design_height:
data.design_height !== undefined ? data.design_height : null,
requires_auth: data.requires_auth || false,
ui_schema_json: data.ui_schema_json || null,
};
}
static async create(data, options = {}) {
const currentUser = options.currentUser || { id: null };
const transaction = options.transaction;
const projectId = data.project || data.projectId || null;
const record = await this.MODEL.create(
{
...this.getFieldMapping(data),
projectId,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
); );
await record.setProject(projectId, { transaction });
await tour_pages.setProject(projectId, {
transaction,
});
return record;
}
static async findBy(where, options = {}) {
const transaction = options.transaction;
const queryWhere = applyRuntimeEnvironment({ ...where }, options);
const projectInclude = applyRuntimeProjectFilter(
{
model: db.projects,
as: 'project',
},
options,
);
const record = await this.MODEL.findOne({ return tour_pages;
where: queryWhere, }
transaction,
include: [projectInclude],
});
if (!record) return null; static async bulkImport(data, options) {
return record.get({ plain: true }); const currentUser = (options && options.currentUser) || { id: null };
} const transaction = (options && options.transaction) || undefined;
static async findAll(filter = {}, options = {}) { // Prepare data - wrapping individual data transformations in a map() method
filter = filter || {}; const tour_pagesData = data.map((item, index) => ({
const limit = filter.limit || 0; id: item.id || undefined,
const currentPage = +filter.page || 0;
const offset = currentPage * limit; environment: item.environment
||
null
,
source_key: item.source_key
||
null
,
name: item.name
||
null
,
slug: item.slug
||
null
,
sort_order: item.sort_order
||
null
,
background_image_url: item.background_image_url
||
null
,
background_video_url: item.background_video_url
||
null
,
background_audio_url: item.background_audio_url
||
null
,
background_loop: item.background_loop
||
false
,
requires_auth: item.requires_auth
||
false
,
ui_schema_json: item.ui_schema_json
||
null
,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
let where = {}; // Bulk create items
const tour_pages = await db.tour_pages.bulkCreate(tour_pagesData, { transaction });
const terms = filter.project ? filter.project.split('|') : []; // For each item created, replace relation files
const validUuids = Utils.filterValidUuids(terms);
return tour_pages;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const tour_pages = await db.tour_pages.findByPk(id, {transaction});
const updatePayload = {};
if (data.environment !== undefined) updatePayload.environment = data.environment;
if (data.source_key !== undefined) updatePayload.source_key = data.source_key;
if (data.name !== undefined) updatePayload.name = data.name;
if (data.slug !== undefined) updatePayload.slug = data.slug;
if (data.sort_order !== undefined) updatePayload.sort_order = data.sort_order;
if (data.background_image_url !== undefined) updatePayload.background_image_url = data.background_image_url;
if (data.background_video_url !== undefined) updatePayload.background_video_url = data.background_video_url;
if (data.background_audio_url !== undefined) updatePayload.background_audio_url = data.background_audio_url;
if (data.background_loop !== undefined) updatePayload.background_loop = data.background_loop;
if (data.requires_auth !== undefined) updatePayload.requires_auth = data.requires_auth;
if (data.ui_schema_json !== undefined) updatePayload.ui_schema_json = data.ui_schema_json;
updatePayload.updatedById = currentUser.id;
await tour_pages.update(updatePayload, {transaction});
if (data.project !== undefined) {
await tour_pages.setProject(
data.project,
{ transaction }
);
}
return tour_pages;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const tour_pages = await db.tour_pages.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of tour_pages) {
await record.update(
{deletedBy: currentUser.id},
{transaction}
);
}
for (const record of tour_pages) {
await record.destroy({transaction});
}
});
return tour_pages;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const tour_pages = await db.tour_pages.findByPk(id, options);
await tour_pages.update({
deletedBy: currentUser.id
}, {
transaction,
});
await tour_pages.destroy({
transaction
});
return tour_pages;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const queryWhere = applyRuntimeEnvironment({ ...where }, options);
const projectInclude = applyRuntimeProjectFilter(
{
model: db.projects,
as: 'project',
},
options,
);
const tour_pages = await db.tour_pages.findOne(
{ where: queryWhere, include: [projectInclude], transaction },
);
if (!tour_pages) {
return tour_pages;
}
const output = tour_pages.get({plain: true});
output.page_elements_page = await tour_pages.getPage_elements_page({
transaction
});
output.page_links_from_page = await tour_pages.getPage_links_from_page({
transaction
});
output.page_links_to_page = await tour_pages.getPage_links_to_page({
transaction
});
output.project = await tour_pages.getProject({
transaction
});
return output;
}
static async findAll(
filter,
options
) {
filter = filter || {};
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
offset = currentPage * limit;
let include = [ let include = [
{ {
model: db.projects, model: db.projects,
as: 'project', as: 'project',
where: filter.project
? { where: filter.project ? {
[Op.or]: [ [Op.or]: [
...(validUuids.length > 0 { id: { [Op.in]: filter.project.split('|').map(term => Utils.uuid(term)) } },
? [{ id: { [Op.in]: validUuids } }] {
: []), name: {
{ [Op.or]: filter.project.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
name: { }
[Op.or]: terms.map((term) => ({ [Op.iLike]: `%${term}%` })), },
}, ]
}, } : {},
],
}
: {},
}, },
]; ];
include[0] = applyRuntimeProjectFilter(include[0], options);
include[0] = applyRuntimeProjectFilter(include[0], options); if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.id) {
if (!Utils.isValidUuid(filter.id)) { if (filter.source_key) {
return { rows: [], count: 0 }; where = {
} ...where,
where.id = filter.id; [Op.and]: Utils.ilike(
} 'tour_pages',
'source_key',
filter.source_key,
),
};
}
if (filter.name) {
where = {
...where,
[Op.and]: Utils.ilike(
'tour_pages',
'name',
filter.name,
),
};
}
if (filter.slug) {
where = {
...where,
[Op.and]: Utils.ilike(
'tour_pages',
'slug',
filter.slug,
),
};
}
if (filter.background_image_url) {
where = {
...where,
[Op.and]: Utils.ilike(
'tour_pages',
'background_image_url',
filter.background_image_url,
),
};
}
if (filter.background_video_url) {
where = {
...where,
[Op.and]: Utils.ilike(
'tour_pages',
'background_video_url',
filter.background_video_url,
),
};
}
if (filter.background_audio_url) {
where = {
...where,
[Op.and]: Utils.ilike(
'tour_pages',
'background_audio_url',
filter.background_audio_url,
),
};
}
if (filter.ui_schema_json) {
where = {
...where,
[Op.and]: Utils.ilike(
'tour_pages',
'ui_schema_json',
filter.ui_schema_json,
),
};
}
for (const field of this.SEARCHABLE_FIELDS) {
if (filter[field]) {
where[Op.and] = Utils.ilike(this.TABLE_NAME, field, filter[field]);
}
}
for (const field of this.RANGE_FIELDS) {
const rangeKey = `${field}Range`; if (filter.sort_orderRange) {
if (filter[rangeKey]) { const [start, end] = filter.sort_orderRange;
const [start, end] = filter[rangeKey];
if (start !== undefined && start !== null && start !== '') { if (start !== undefined && start !== null && start !== '') {
where[field] = { ...where[field], [Op.gte]: start }; where = {
...where,
sort_order: {
...where.sort_order,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
sort_order: {
...where.sort_order,
[Op.lte]: end,
},
};
}
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true'
};
}
if (filter.environment) {
where = {
...where,
environment: filter.environment,
};
}
if (filter.background_loop) {
where = {
...where,
background_loop: filter.background_loop,
};
}
if (filter.requires_auth) {
where = {
...where,
requires_auth: filter.requires_auth,
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
} }
if (end !== undefined && end !== null && end !== '') {
where[field] = { ...where[field], [Op.lte]: end }; where = applyRuntimeEnvironment(where, options);
const queryOptions = {
where,
include,
distinct: true,
order: filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
} }
}
}
for (const field of this.ENUM_FIELDS) { try {
if (filter[field] !== undefined) { const { rows, count } = await db.tour_pages.findAndCountAll(queryOptions);
where[field] = filter[field];
}
}
// Validate and filter by UUID fields (e.g., projectId) return {
for (const field of this.UUID_FIELDS) { rows: options?.countOnly ? [] : rows,
if (filter[field] !== undefined) { count: count
if (!Utils.isValidUuid(filter[field])) { };
return { rows: [], count: 0 }; } catch (error) {
console.error('Error executing query:', error);
throw error;
} }
where[field] = filter[field];
}
} }
if (filter.active !== undefined) { static async findAllAutocomplete(query, limit, offset, ) {
where.active = filter.active === true || filter.active === 'true'; let where = {};
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike(
'tour_pages',
'name',
query,
),
],
};
}
const records = await db.tour_pages.findAll({
attributes: [ 'id', 'name' ],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['name', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.name,
}));
} }
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange; };
if (start !== undefined && start !== null && start !== '') {
where.createdAt = { ...where.createdAt, [Op.gte]: start };
}
if (end !== undefined && end !== null && end !== '') {
where.createdAt = { ...where.createdAt, [Op.lte]: end };
}
}
where = applyRuntimeEnvironment(where, options);
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options.transaction,
};
if (!options.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await this.MODEL.findAndCountAll(queryOptions);
return {
rows: options.countOnly ? [] : rows,
count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
}
module.exports = Tour_pagesDBApi;

View File

@ -0,0 +1,565 @@
const db = require('../models');
const Utils = require('../utils');
const {
applyRuntimeEnvironment,
applyRuntimeProjectFilter,
} = require('./runtime-context');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class TransitionsDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const transitions = await db.transitions.create(
{
id: data.id || undefined,
environment: data.environment
||
null
,
source_key: data.source_key
||
null
,
name: data.name
||
null
,
slug: data.slug
||
null
,
video_url: data.video_url
||
null
,
audio_url: data.audio_url
||
null
,
supports_reverse: data.supports_reverse
||
false
,
duration_sec: data.duration_sec
||
null
,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await transitions.setProject( data.project || null, {
transaction,
});
return transitions;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const transitionsData = data.map((item, index) => ({
id: item.id || undefined,
environment: item.environment
||
null
,
source_key: item.source_key
||
null
,
name: item.name
||
null
,
slug: item.slug
||
null
,
video_url: item.video_url
||
null
,
audio_url: item.audio_url
||
null
,
supports_reverse: item.supports_reverse
||
false
,
duration_sec: item.duration_sec
||
null
,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const transitions = await db.transitions.bulkCreate(transitionsData, { transaction });
// For each item created, replace relation files
return transitions;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const transitions = await db.transitions.findByPk(id, {transaction});
const updatePayload = {};
if (data.environment !== undefined) updatePayload.environment = data.environment;
if (data.source_key !== undefined) updatePayload.source_key = data.source_key;
if (data.name !== undefined) updatePayload.name = data.name;
if (data.slug !== undefined) updatePayload.slug = data.slug;
if (data.video_url !== undefined) updatePayload.video_url = data.video_url;
if (data.audio_url !== undefined) updatePayload.audio_url = data.audio_url;
if (data.supports_reverse !== undefined) updatePayload.supports_reverse = data.supports_reverse;
if (data.duration_sec !== undefined) updatePayload.duration_sec = data.duration_sec;
updatePayload.updatedById = currentUser.id;
await transitions.update(updatePayload, {transaction});
if (data.project !== undefined) {
await transitions.setProject(
data.project,
{ transaction }
);
}
return transitions;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const transitions = await db.transitions.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of transitions) {
await record.update(
{deletedBy: currentUser.id},
{transaction}
);
}
for (const record of transitions) {
await record.destroy({transaction});
}
});
return transitions;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || {id: null};
const transaction = (options && options.transaction) || undefined;
const transitions = await db.transitions.findByPk(id, options);
await transitions.update({
deletedBy: currentUser.id
}, {
transaction,
});
await transitions.destroy({
transaction
});
return transitions;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const queryWhere = applyRuntimeEnvironment({ ...where }, options);
const projectInclude = applyRuntimeProjectFilter(
{
model: db.projects,
as: 'project',
},
options,
);
const transitions = await db.transitions.findOne(
{ where: queryWhere, include: [projectInclude], transaction },
);
if (!transitions) {
return transitions;
}
const output = transitions.get({plain: true});
output.page_links_transition = await transitions.getPage_links_transition({
transaction
});
output.project = await transitions.getProject({
transaction
});
return output;
}
static async findAll(
filter,
options
) {
filter = filter || {};
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
offset = currentPage * limit;
let include = [
{
model: db.projects,
as: 'project',
where: filter.project ? {
[Op.or]: [
{ id: { [Op.in]: filter.project.split('|').map(term => Utils.uuid(term)) } },
{
name: {
[Op.or]: filter.project.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
}
},
]
} : {},
},
];
include[0] = applyRuntimeProjectFilter(include[0], options);
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.source_key) {
where = {
...where,
[Op.and]: Utils.ilike(
'transitions',
'source_key',
filter.source_key,
),
};
}
if (filter.name) {
where = {
...where,
[Op.and]: Utils.ilike(
'transitions',
'name',
filter.name,
),
};
}
if (filter.slug) {
where = {
...where,
[Op.and]: Utils.ilike(
'transitions',
'slug',
filter.slug,
),
};
}
if (filter.video_url) {
where = {
...where,
[Op.and]: Utils.ilike(
'transitions',
'video_url',
filter.video_url,
),
};
}
if (filter.audio_url) {
where = {
...where,
[Op.and]: Utils.ilike(
'transitions',
'audio_url',
filter.audio_url,
),
};
}
if (filter.duration_secRange) {
const [start, end] = filter.duration_secRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
duration_sec: {
...where.duration_sec,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
duration_sec: {
...where.duration_sec,
[Op.lte]: end,
},
};
}
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true'
};
}
if (filter.environment) {
where = {
...where,
environment: filter.environment,
};
}
if (filter.supports_reverse) {
where = {
...where,
supports_reverse: filter.supports_reverse,
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
where = applyRuntimeEnvironment(where, options);
const queryOptions = {
where,
include,
distinct: true,
order: filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.transitions.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(query, limit, offset, ) {
let where = {};
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike(
'transitions',
'name',
query,
),
],
};
}
const records = await db.transitions.findAll({
attributes: [ 'id', 'name' ],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['name', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.name,
}));
}
};

View File

@ -0,0 +1,233 @@
const db = require('../models');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class Ui_elementsDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const ui_elements = await db.ui_elements.create(
{
id: data.id || undefined,
element_type: data.element_type ?? null,
name: data.name ?? null,
settings_json: data.settings_json ?? null,
sort_order: data.sort_order ?? 0,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
return ui_elements;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const uiElementsData = data.map((item, index) => ({
id: item.id || undefined,
element_type: item.element_type ?? null,
name: item.name ?? null,
settings_json: item.settings_json ?? null,
sort_order: item.sort_order ?? 0,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
const ui_elements = await db.ui_elements.bulkCreate(uiElementsData, { transaction });
return ui_elements;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const ui_elements = await db.ui_elements.findByPk(id, { transaction });
const updatePayload = {};
if (data.element_type !== undefined) updatePayload.element_type = data.element_type;
if (data.name !== undefined) updatePayload.name = data.name;
if (data.settings_json !== undefined) updatePayload.settings_json = data.settings_json;
if (data.sort_order !== undefined) updatePayload.sort_order = data.sort_order;
updatePayload.updatedById = currentUser.id;
await ui_elements.update(updatePayload, { transaction });
return ui_elements;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const ui_elements = await db.ui_elements.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (innerTransaction) => {
for (const record of ui_elements) {
await record.update({ deletedBy: currentUser.id }, { transaction: innerTransaction });
}
for (const record of ui_elements) {
await record.destroy({ transaction: innerTransaction });
}
});
return ui_elements;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const ui_elements = await db.ui_elements.findByPk(id, options);
await ui_elements.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await ui_elements.destroy({
transaction,
});
return ui_elements;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const ui_elements = await db.ui_elements.findOne({ where, transaction });
if (!ui_elements) {
return ui_elements;
}
return ui_elements.get({ plain: true });
}
static async findAll(filter, options) {
filter = filter || {};
const limit = Number(filter.limit) || 0;
const currentPage = Number(filter.page) || 0;
const offset = limit ? currentPage * limit : undefined;
let where = {};
if (filter.id) {
where = {
...where,
id: Utils.uuid(filter.id),
};
}
if (filter.name) {
where = {
...where,
[Op.and]: Utils.ilike('ui_elements', 'name', filter.name),
};
}
if (filter.element_type) {
where = {
...where,
element_type: filter.element_type,
};
}
if (filter.sort_orderRange) {
const [start, end] = filter.sort_orderRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
sort_order: {
...where.sort_order,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
sort_order: {
...where.sort_order,
[Op.lte]: end,
},
};
}
}
let { orderBy = null } = options || {};
if (!orderBy) {
const sort = filter.sort || 'desc';
const field = filter.field || 'createdAt';
orderBy = [[field, sort]];
}
const { rows, count } = await db.ui_elements.findAndCountAll({
where,
limit: limit || undefined,
offset,
order: orderBy,
});
return {
rows,
count,
};
}
static async findAllAutocomplete(query, limit) {
let where = {};
if (query) {
where = {
[Op.or]: [
{
id: {
[Op.eq]: Utils.uuid(query),
},
},
{
name: {
[Op.iLike]: `%${query}%`,
},
},
],
};
}
const records = await db.ui_elements.findAll({
attributes: ['id', 'name'],
where,
limit: Number(limit) || undefined,
order: [['name', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.name,
}));
}
};

File diff suppressed because it is too large Load Diff

View File

@ -1,3 +1,5 @@
module.exports = { module.exports = {
production: { production: {
dialect: 'postgres', dialect: 'postgres',
@ -6,10 +8,8 @@ module.exports = {
database: process.env.DB_NAME, database: process.env.DB_NAME,
host: process.env.DB_HOST, host: process.env.DB_HOST,
port: process.env.DB_PORT, port: process.env.DB_PORT,
logging: false, logging: console.log,
seederStorage: 'sequelize', seederStorage: 'sequelize',
migrationStorage: 'sequelize',
migrationStorageTableName: 'SequelizeMeta',
}, },
development: { development: {
username: 'postgres', username: 'postgres',
@ -19,19 +19,15 @@ module.exports = {
host: process.env.DB_HOST || 'localhost', host: process.env.DB_HOST || 'localhost',
logging: console.log, logging: console.log,
seederStorage: 'sequelize', seederStorage: 'sequelize',
migrationStorage: 'sequelize',
migrationStorageTableName: 'SequelizeMeta',
},
dev_stage: {
dialect: 'postgres',
username: process.env.DB_USER,
password: process.env.DB_PASS,
database: process.env.DB_NAME,
host: process.env.DB_HOST,
port: process.env.DB_PORT,
logging: console.log,
seederStorage: 'sequelize',
migrationStorage: 'sequelize',
migrationStorageTableName: 'SequelizeMeta',
}, },
dev_stage: {
dialect: 'postgres',
username: process.env.DB_USER,
password: process.env.DB_PASS,
database: process.env.DB_NAME,
host: process.env.DB_HOST,
port: process.env.DB_PORT,
logging: console.log,
seederStorage: 'sequelize',
}
}; };

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,124 @@
module.exports = {
async up(queryInterface, Sequelize) {
const transaction = await queryInterface.sequelize.transaction();
try {
const rows = await queryInterface.sequelize.query(
"SELECT to_regclass('public.files') AS regclass_name;",
{
transaction,
type: Sequelize.QueryTypes.SELECT,
},
);
const tableName = rows[0].regclass_name;
if (tableName) {
await transaction.commit();
return;
}
await queryInterface.createTable(
'files',
{
id: {
type: Sequelize.DataTypes.UUID,
defaultValue: Sequelize.DataTypes.UUIDV4,
primaryKey: true,
},
belongsTo: {
type: Sequelize.DataTypes.STRING(255),
allowNull: true,
},
belongsToId: {
type: Sequelize.DataTypes.UUID,
allowNull: true,
},
belongsToColumn: {
type: Sequelize.DataTypes.STRING(255),
allowNull: true,
},
name: {
type: Sequelize.DataTypes.STRING(2083),
allowNull: false,
},
sizeInBytes: {
type: Sequelize.DataTypes.INTEGER,
allowNull: true,
},
privateUrl: {
type: Sequelize.DataTypes.STRING(2083),
allowNull: true,
},
publicUrl: {
type: Sequelize.DataTypes.STRING(2083),
allowNull: false,
},
createdAt: {
type: Sequelize.DataTypes.DATE,
allowNull: false,
},
updatedAt: {
type: Sequelize.DataTypes.DATE,
allowNull: false,
},
deletedAt: {
type: Sequelize.DataTypes.DATE,
allowNull: true,
},
createdById: {
type: Sequelize.DataTypes.UUID,
allowNull: true,
references: {
key: 'id',
model: 'users',
},
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
},
updatedById: {
type: Sequelize.DataTypes.UUID,
allowNull: true,
references: {
key: 'id',
model: 'users',
},
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
},
},
{ transaction },
);
await transaction.commit();
} catch (err) {
await transaction.rollback();
throw err;
}
},
async down(queryInterface, Sequelize) {
const transaction = await queryInterface.sequelize.transaction();
try {
const rows = await queryInterface.sequelize.query(
"SELECT to_regclass('public.files') AS regclass_name;",
{
transaction,
type: Sequelize.QueryTypes.SELECT,
},
);
const tableName = rows[0].regclass_name;
if (!tableName) {
await transaction.commit();
return;
}
await queryInterface.dropTable('files', { transaction });
await transaction.commit();
} catch (err) {
await transaction.rollback();
throw err;
}
},
};

View File

@ -0,0 +1,95 @@
module.exports = {
async up(queryInterface, Sequelize) {
const transaction = await queryInterface.sequelize.transaction();
try {
const rows = await queryInterface.sequelize.query(
"SELECT to_regclass('public.\"usersCustom_permissionsPermissions\"') AS regclass_name;",
{
transaction,
type: Sequelize.QueryTypes.SELECT,
},
);
const tableName = rows[0].regclass_name;
if (tableName) {
await transaction.commit();
return;
}
await queryInterface.createTable(
'usersCustom_permissionsPermissions',
{
createdAt: {
type: Sequelize.DataTypes.DATE,
allowNull: false,
},
updatedAt: {
type: Sequelize.DataTypes.DATE,
allowNull: false,
},
users_custom_permissionsId: {
type: Sequelize.DataTypes.UUID,
allowNull: false,
primaryKey: true,
references: {
model: 'users',
key: 'id',
},
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
},
permissionId: {
type: Sequelize.DataTypes.UUID,
allowNull: false,
primaryKey: true,
references: {
model: 'permissions',
key: 'id',
},
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
},
},
{ transaction },
);
await queryInterface.addIndex(
'usersCustom_permissionsPermissions',
['permissionId'],
{ transaction },
);
await transaction.commit();
} catch (err) {
await transaction.rollback();
throw err;
}
},
async down(queryInterface, Sequelize) {
const transaction = await queryInterface.sequelize.transaction();
try {
const rows = await queryInterface.sequelize.query(
"SELECT to_regclass('public.\"usersCustom_permissionsPermissions\"') AS regclass_name;",
{
transaction,
type: Sequelize.QueryTypes.SELECT,
},
);
const tableName = rows[0].regclass_name;
if (!tableName) {
await transaction.commit();
return;
}
await queryInterface.dropTable('usersCustom_permissionsPermissions', { transaction });
await transaction.commit();
} catch (err) {
await transaction.rollback();
throw err;
}
},
};

View File

@ -0,0 +1,123 @@
module.exports = {
async up(queryInterface, Sequelize) {
const transaction = await queryInterface.sequelize.transaction();
try {
const tableRows = await queryInterface.sequelize.query(
"SELECT to_regclass('public.page_elements') AS regclass_name;",
{
transaction,
type: Sequelize.QueryTypes.SELECT,
},
);
if (!tableRows[0]?.regclass_name) {
await transaction.commit();
return;
}
const nullableRows = await queryInterface.sequelize.query(
`SELECT is_nullable
FROM information_schema.columns
WHERE table_schema = 'public'
AND table_name = 'page_elements'
AND column_name = 'pageId';`,
{
transaction,
type: Sequelize.QueryTypes.SELECT,
},
);
if (!nullableRows.length || nullableRows[0].is_nullable === 'YES') {
await transaction.commit();
return;
}
await queryInterface.changeColumn(
'page_elements',
'pageId',
{
type: Sequelize.DataTypes.UUID,
allowNull: true,
references: {
model: 'tour_pages',
key: 'id',
},
},
{ transaction },
);
await transaction.commit();
} catch (err) {
await transaction.rollback();
throw err;
}
},
async down(queryInterface, Sequelize) {
const transaction = await queryInterface.sequelize.transaction();
try {
const tableRows = await queryInterface.sequelize.query(
"SELECT to_regclass('public.page_elements') AS regclass_name;",
{
transaction,
type: Sequelize.QueryTypes.SELECT,
},
);
if (!tableRows[0]?.regclass_name) {
await transaction.commit();
return;
}
const nullableRows = await queryInterface.sequelize.query(
`SELECT is_nullable
FROM information_schema.columns
WHERE table_schema = 'public'
AND table_name = 'page_elements'
AND column_name = 'pageId';`,
{
transaction,
type: Sequelize.QueryTypes.SELECT,
},
);
if (!nullableRows.length || nullableRows[0].is_nullable === 'NO') {
await transaction.commit();
return;
}
const nullCountRows = await queryInterface.sequelize.query(
'SELECT COUNT(*)::int AS count FROM "page_elements" WHERE "pageId" IS NULL;',
{
transaction,
type: Sequelize.QueryTypes.SELECT,
},
);
if (Number(nullCountRows[0]?.count || 0) > 0) {
throw new Error('Cannot make page_elements.pageId NOT NULL because NULL values exist.');
}
await queryInterface.changeColumn(
'page_elements',
'pageId',
{
type: Sequelize.DataTypes.UUID,
allowNull: false,
references: {
model: 'tour_pages',
key: 'id',
},
},
{ transaction },
);
await transaction.commit();
} catch (err) {
await transaction.rollback();
throw err;
}
},
};

View File

@ -0,0 +1,105 @@
module.exports = {
async up(queryInterface, Sequelize) {
const transaction = await queryInterface.sequelize.transaction();
try {
const tableRows = await queryInterface.sequelize.query(
"SELECT to_regclass('public.page_elements') AS regclass_name;",
{
transaction,
type: Sequelize.QueryTypes.SELECT,
},
);
if (!tableRows[0]?.regclass_name) {
await transaction.commit();
return;
}
const nullableRows = await queryInterface.sequelize.query(
`SELECT is_nullable
FROM information_schema.columns
WHERE table_schema = 'public'
AND table_name = 'page_elements'
AND column_name = 'pageId';`,
{
transaction,
type: Sequelize.QueryTypes.SELECT,
},
);
if (!nullableRows.length || nullableRows[0].is_nullable === 'YES') {
await transaction.commit();
return;
}
await queryInterface.sequelize.query(
'ALTER TABLE "page_elements" ALTER COLUMN "pageId" DROP NOT NULL;',
{ transaction },
);
await transaction.commit();
} catch (err) {
await transaction.rollback();
throw err;
}
},
async down(queryInterface, Sequelize) {
const transaction = await queryInterface.sequelize.transaction();
try {
const tableRows = await queryInterface.sequelize.query(
"SELECT to_regclass('public.page_elements') AS regclass_name;",
{
transaction,
type: Sequelize.QueryTypes.SELECT,
},
);
if (!tableRows[0]?.regclass_name) {
await transaction.commit();
return;
}
const nullableRows = await queryInterface.sequelize.query(
`SELECT is_nullable
FROM information_schema.columns
WHERE table_schema = 'public'
AND table_name = 'page_elements'
AND column_name = 'pageId';`,
{
transaction,
type: Sequelize.QueryTypes.SELECT,
},
);
if (!nullableRows.length || nullableRows[0].is_nullable === 'NO') {
await transaction.commit();
return;
}
const nullCountRows = await queryInterface.sequelize.query(
'SELECT COUNT(*)::int AS count FROM "page_elements" WHERE "pageId" IS NULL;',
{
transaction,
type: Sequelize.QueryTypes.SELECT,
},
);
if (Number(nullCountRows[0]?.count || 0) > 0) {
throw new Error('Cannot make page_elements.pageId NOT NULL because NULL values exist.');
}
await queryInterface.sequelize.query(
'ALTER TABLE "page_elements" ALTER COLUMN "pageId" SET NOT NULL;',
{ transaction },
);
await transaction.commit();
} catch (err) {
await transaction.rollback();
throw err;
}
},
};

View File

@ -0,0 +1,108 @@
module.exports = {
async up(queryInterface, Sequelize) {
const transaction = await queryInterface.sequelize.transaction();
try {
const tableRows = await queryInterface.sequelize.query(
"SELECT to_regclass('public.ui_elements') AS regclass_name;",
{
transaction,
type: Sequelize.QueryTypes.SELECT,
},
);
if (tableRows[0]?.regclass_name) {
await transaction.commit();
return;
}
await queryInterface.createTable(
'ui_elements',
{
id: {
type: Sequelize.DataTypes.UUID,
defaultValue: Sequelize.DataTypes.UUIDV4,
primaryKey: true,
},
element_type: {
type: Sequelize.DataTypes.TEXT,
allowNull: false,
},
name: {
type: Sequelize.DataTypes.TEXT,
allowNull: true,
},
settings_json: {
type: Sequelize.DataTypes.TEXT,
allowNull: true,
},
sort_order: {
type: Sequelize.DataTypes.INTEGER,
allowNull: false,
defaultValue: 0,
},
createdById: {
type: Sequelize.DataTypes.UUID,
allowNull: true,
references: {
model: 'users',
key: 'id',
},
},
updatedById: {
type: Sequelize.DataTypes.UUID,
allowNull: true,
references: {
model: 'users',
key: 'id',
},
},
createdAt: { type: Sequelize.DataTypes.DATE },
updatedAt: { type: Sequelize.DataTypes.DATE },
deletedAt: { type: Sequelize.DataTypes.DATE },
importHash: {
type: Sequelize.DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{ transaction },
);
await queryInterface.addIndex('ui_elements', ['element_type'], { transaction });
await queryInterface.addIndex('ui_elements', ['sort_order'], { transaction });
await queryInterface.addIndex('ui_elements', ['deletedAt'], { transaction });
await transaction.commit();
} catch (error) {
await transaction.rollback();
throw error;
}
},
async down(queryInterface, Sequelize) {
const transaction = await queryInterface.sequelize.transaction();
try {
const tableRows = await queryInterface.sequelize.query(
"SELECT to_regclass('public.ui_elements') AS regclass_name;",
{
transaction,
type: Sequelize.QueryTypes.SELECT,
},
);
if (!tableRows[0]?.regclass_name) {
await transaction.commit();
return;
}
await queryInterface.dropTable('ui_elements', { transaction });
await transaction.commit();
} catch (error) {
await transaction.rollback();
throw error;
}
},
};

View File

@ -0,0 +1,85 @@
module.exports = {
async up(queryInterface, Sequelize) {
const transaction = await queryInterface.sequelize.transaction();
try {
const tableRows = await queryInterface.sequelize.query(
"SELECT to_regclass('public.publish_events') AS regclass_name;",
{
transaction,
type: Sequelize.QueryTypes.SELECT,
},
);
if (!tableRows[0]?.regclass_name) {
await transaction.commit();
return;
}
const tableDefinition = await queryInterface.describeTable('publish_events', { transaction });
if (!tableDefinition.title) {
await queryInterface.addColumn(
'publish_events',
'title',
{
type: Sequelize.DataTypes.STRING,
allowNull: true,
},
{ transaction },
);
}
if (!tableDefinition.description) {
await queryInterface.addColumn(
'publish_events',
'description',
{
type: Sequelize.DataTypes.TEXT,
allowNull: true,
},
{ transaction },
);
}
await transaction.commit();
} catch (error) {
await transaction.rollback();
throw error;
}
},
async down(queryInterface, Sequelize) {
const transaction = await queryInterface.sequelize.transaction();
try {
const tableRows = await queryInterface.sequelize.query(
"SELECT to_regclass('public.publish_events') AS regclass_name;",
{
transaction,
type: Sequelize.QueryTypes.SELECT,
},
);
if (!tableRows[0]?.regclass_name) {
await transaction.commit();
return;
}
const tableDefinition = await queryInterface.describeTable('publish_events', { transaction });
if (tableDefinition.description) {
await queryInterface.removeColumn('publish_events', 'description', { transaction });
}
if (tableDefinition.title) {
await queryInterface.removeColumn('publish_events', 'title', { transaction });
}
await transaction.commit();
} catch (error) {
await transaction.rollback();
throw error;
}
},
};

View File

@ -1,274 +0,0 @@
'use strict';
/**
* Migration to add foreign key constraints to all model associations.
* This enforces referential integrity at the database level.
*/
module.exports = {
async up(queryInterface) {
const transaction = await queryInterface.sequelize.transaction();
try {
// Helper to add FK constraint safely (checks if exists first)
const addForeignKey = async (
tableName,
columnName,
references,
onDelete = 'CASCADE',
onUpdate = 'CASCADE',
) => {
const constraintName = `${tableName}_${columnName}_fkey`;
// Check if constraint already exists
const [results] = await queryInterface.sequelize.query(
`SELECT constraint_name FROM information_schema.table_constraints
WHERE table_name = '${tableName}' AND constraint_name = '${constraintName}'`,
{ transaction },
);
if (results.length === 0) {
await queryInterface.addConstraint(tableName, {
fields: [columnName],
type: 'foreign key',
name: constraintName,
references: {
table: references.table,
field: references.field,
},
onDelete,
onUpdate,
transaction,
});
console.log(`Added FK constraint: ${constraintName}`);
} else {
console.log(`FK constraint already exists: ${constraintName}`);
}
};
// asset_variants -> assets
await addForeignKey(
'asset_variants',
'assetId',
{ table: 'assets', field: 'id' },
'CASCADE',
'CASCADE',
);
// page_elements -> tour_pages
await addForeignKey(
'page_elements',
'pageId',
{ table: 'tour_pages', field: 'id' },
'CASCADE',
'CASCADE',
);
// page_links -> tour_pages (from_page)
await addForeignKey(
'page_links',
'from_pageId',
{ table: 'tour_pages', field: 'id' },
'CASCADE',
'CASCADE',
);
// page_links -> tour_pages (to_page)
await addForeignKey(
'page_links',
'to_pageId',
{ table: 'tour_pages', field: 'id' },
'SET NULL',
'CASCADE',
);
// page_links -> transitions
await addForeignKey(
'page_links',
'transitionId',
{ table: 'transitions', field: 'id' },
'SET NULL',
'CASCADE',
);
// assets -> projects
await addForeignKey(
'assets',
'projectId',
{ table: 'projects', field: 'id' },
'CASCADE',
'CASCADE',
);
// tour_pages -> projects
await addForeignKey(
'tour_pages',
'projectId',
{ table: 'projects', field: 'id' },
'CASCADE',
'CASCADE',
);
// transitions -> projects
await addForeignKey(
'transitions',
'projectId',
{ table: 'projects', field: 'id' },
'CASCADE',
'CASCADE',
);
// project_memberships -> projects
await addForeignKey(
'project_memberships',
'projectId',
{ table: 'projects', field: 'id' },
'CASCADE',
'CASCADE',
);
// project_memberships -> users
await addForeignKey(
'project_memberships',
'userId',
{ table: 'users', field: 'id' },
'CASCADE',
'CASCADE',
);
// presigned_url_requests -> projects
await addForeignKey(
'presigned_url_requests',
'projectId',
{ table: 'projects', field: 'id' },
'CASCADE',
'CASCADE',
);
// presigned_url_requests -> users
await addForeignKey(
'presigned_url_requests',
'userId',
{ table: 'users', field: 'id' },
'CASCADE',
'CASCADE',
);
// project_audio_tracks -> projects
await addForeignKey(
'project_audio_tracks',
'projectId',
{ table: 'projects', field: 'id' },
'CASCADE',
'CASCADE',
);
// publish_events -> projects
await addForeignKey(
'publish_events',
'projectId',
{ table: 'projects', field: 'id' },
'CASCADE',
'CASCADE',
);
// publish_events -> users (SET NULL to preserve audit trail)
await addForeignKey(
'publish_events',
'userId',
{ table: 'users', field: 'id' },
'SET NULL',
'CASCADE',
);
// pwa_caches -> projects
await addForeignKey(
'pwa_caches',
'projectId',
{ table: 'projects', field: 'id' },
'CASCADE',
'CASCADE',
);
// access_logs -> projects
await addForeignKey(
'access_logs',
'projectId',
{ table: 'projects', field: 'id' },
'CASCADE',
'CASCADE',
);
// access_logs -> users (SET NULL to preserve audit trail)
await addForeignKey(
'access_logs',
'userId',
{ table: 'users', field: 'id' },
'SET NULL',
'CASCADE',
);
// users -> roles (SET NULL so deleting role doesn't delete users)
await addForeignKey(
'users',
'app_roleId',
{ table: 'roles', field: 'id' },
'SET NULL',
'CASCADE',
);
await transaction.commit();
console.log('All FK constraints added successfully');
} catch (error) {
await transaction.rollback();
throw error;
}
},
async down(queryInterface) {
const transaction = await queryInterface.sequelize.transaction();
try {
const dropForeignKey = async (tableName, columnName) => {
const constraintName = `${tableName}_${columnName}_fkey`;
try {
await queryInterface.removeConstraint(tableName, constraintName, {
transaction,
});
console.log(`Removed FK constraint: ${constraintName}`);
} catch (error) {
console.log(
`FK constraint not found (may not exist): ${constraintName}`,
);
}
};
// Remove all FK constraints in reverse order
await dropForeignKey('users', 'app_roleId');
await dropForeignKey('access_logs', 'userId');
await dropForeignKey('access_logs', 'projectId');
await dropForeignKey('pwa_caches', 'projectId');
await dropForeignKey('publish_events', 'userId');
await dropForeignKey('publish_events', 'projectId');
await dropForeignKey('project_audio_tracks', 'projectId');
await dropForeignKey('presigned_url_requests', 'userId');
await dropForeignKey('presigned_url_requests', 'projectId');
await dropForeignKey('project_memberships', 'userId');
await dropForeignKey('project_memberships', 'projectId');
await dropForeignKey('transitions', 'projectId');
await dropForeignKey('tour_pages', 'projectId');
await dropForeignKey('assets', 'projectId');
await dropForeignKey('page_links', 'transitionId');
await dropForeignKey('page_links', 'to_pageId');
await dropForeignKey('page_links', 'from_pageId');
await dropForeignKey('page_elements', 'pageId');
await dropForeignKey('asset_variants', 'assetId');
await transaction.commit();
console.log('All FK constraints removed successfully');
} catch (error) {
await transaction.rollback();
throw error;
}
},
};

View File

@ -1,126 +0,0 @@
'use strict';
/**
* Migration to remove redundant deletion tracking columns.
*
* The `is_deleted` and `deleted_at_time` columns are redundant because:
* - Sequelize's `paranoid: true` mode already uses `deletedAt` for soft-delete
* - These columns were set but never queried for filtering
*
* IMPORTANT: This migration should only be run after verifying no external
* systems depend on these columns. Consider backing up data first.
*/
module.exports = {
async up(queryInterface) {
const transaction = await queryInterface.sequelize.transaction();
try {
// Helper to safely remove column if it exists
const removeColumnIfExists = async (tableName, columnName) => {
const [results] = await queryInterface.sequelize.query(
`SELECT column_name FROM information_schema.columns
WHERE table_name = '${tableName}' AND column_name = '${columnName}'`,
{ transaction },
);
if (results.length > 0) {
await queryInterface.removeColumn(tableName, columnName, {
transaction,
});
console.log(`Removed column: ${tableName}.${columnName}`);
} else {
console.log(
`Column does not exist (skipping): ${tableName}.${columnName}`,
);
}
};
// Remove is_deleted index from assets first (if exists)
try {
await queryInterface.removeIndex('assets', 'assets_is_deleted', {
transaction,
});
console.log('Removed index: assets_is_deleted');
} catch (error) {
console.log('Index assets_is_deleted not found (may not exist)');
}
// Remove redundant columns from assets table
await removeColumnIfExists('assets', 'is_deleted');
await removeColumnIfExists('assets', 'deleted_at_time');
// Remove redundant columns from projects table
await removeColumnIfExists('projects', 'is_deleted');
await removeColumnIfExists('projects', 'deleted_at_time');
await transaction.commit();
console.log('Redundant deletion columns removed successfully');
} catch (error) {
await transaction.rollback();
throw error;
}
},
async down(queryInterface, Sequelize) {
const transaction = await queryInterface.sequelize.transaction();
try {
// Re-add columns to assets table
await queryInterface.addColumn(
'assets',
'is_deleted',
{
type: Sequelize.BOOLEAN,
allowNull: false,
defaultValue: false,
},
{ transaction },
);
await queryInterface.addColumn(
'assets',
'deleted_at_time',
{
type: Sequelize.DATE,
allowNull: true,
},
{ transaction },
);
// Re-add index
await queryInterface.addIndex('assets', ['is_deleted'], {
name: 'assets_is_deleted',
transaction,
});
// Re-add columns to projects table
await queryInterface.addColumn(
'projects',
'is_deleted',
{
type: Sequelize.BOOLEAN,
allowNull: false,
defaultValue: false,
},
{ transaction },
);
await queryInterface.addColumn(
'projects',
'deleted_at_time',
{
type: Sequelize.DATE,
allowNull: true,
},
{ transaction },
);
await transaction.commit();
console.log('Redundant deletion columns restored successfully');
} catch (error) {
await transaction.rollback();
throw error;
}
},
};

View File

@ -1,79 +0,0 @@
'use strict';
/**
* Migration: Rename ui_elements table to element_type_defaults
*
* This migration renames the table for better clarity:
* - ui_elements contained GLOBAL platform-wide default settings
* - The new name element_type_defaults better describes this purpose
* - Adds index on deletedAt for soft delete queries
*/
module.exports = {
async up(queryInterface, Sequelize) {
// Check if old table exists
const tableExists = await queryInterface.sequelize.query(
`SELECT EXISTS (
SELECT FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = 'ui_elements'
);`,
{ type: Sequelize.QueryTypes.SELECT },
);
if (!tableExists[0]?.exists) {
console.log('Table ui_elements does not exist, skipping rename');
return;
}
// Check if new table already exists (migration may have been partially run)
const newTableExists = await queryInterface.sequelize.query(
`SELECT EXISTS (
SELECT FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = 'element_type_defaults'
);`,
{ type: Sequelize.QueryTypes.SELECT },
);
if (newTableExists[0]?.exists) {
console.log(
'Table element_type_defaults already exists, skipping rename',
);
return;
}
// Rename table
await queryInterface.renameTable('ui_elements', 'element_type_defaults');
// Update any sequences (PostgreSQL auto-creates these for SERIAL columns, but UUID doesn't need them)
// No sequence updates needed since we use UUID primary keys
console.log('Successfully renamed ui_elements to element_type_defaults');
},
async down(queryInterface, Sequelize) {
// Check if new table exists
const tableExists = await queryInterface.sequelize.query(
`SELECT EXISTS (
SELECT FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = 'element_type_defaults'
);`,
{ type: Sequelize.QueryTypes.SELECT },
);
if (!tableExists[0]?.exists) {
console.log(
'Table element_type_defaults does not exist, skipping rollback',
);
return;
}
// Rename table back
await queryInterface.renameTable('element_type_defaults', 'ui_elements');
console.log(
'Successfully rolled back: renamed element_type_defaults to ui_elements',
);
},
};

View File

@ -1,178 +0,0 @@
'use strict';
/**
* Migration: Convert page_elements.element_type from ENUM to TEXT
*
* This migration:
* 1. Converts element_type column from ENUM to TEXT for flexibility
* 2. Maps nav_button to navigation_next or navigation_prev based on content_json.navType
* 3. Drops the old ENUM type
*
* Benefits of TEXT over ENUM:
* - Flexibility to add new element types without migrations
* - No ENUM sync issues between environments
* - Application-level validation ensures type safety
*/
module.exports = {
async up(queryInterface, Sequelize) {
const transaction = await queryInterface.sequelize.transaction();
try {
// Step 1: Create a temporary TEXT column
await queryInterface.addColumn(
'page_elements',
'element_type_text',
{
type: Sequelize.TEXT,
allowNull: true,
},
{ transaction },
);
// Step 2: Copy ENUM values to TEXT column
await queryInterface.sequelize.query(
`UPDATE page_elements SET element_type_text = element_type::TEXT`,
{ transaction },
);
// Step 3: Drop the old ENUM column
await queryInterface.removeColumn('page_elements', 'element_type', {
transaction,
});
// Step 4: Rename TEXT column to element_type
await queryInterface.renameColumn(
'page_elements',
'element_type_text',
'element_type',
{ transaction },
);
// Step 5: Add NOT NULL constraint
await queryInterface.changeColumn(
'page_elements',
'element_type',
{
type: Sequelize.TEXT,
allowNull: false,
},
{ transaction },
);
// Step 6: Now map nav_button to specific navigation types (column is TEXT now)
// Forward navigation (default if navType not specified)
await queryInterface.sequelize.query(
`UPDATE page_elements
SET element_type = 'navigation_next'
WHERE element_type = 'nav_button'
AND (
content_json IS NULL
OR content_json::jsonb->>'navType' = 'forward'
OR content_json::jsonb->>'navType' IS NULL
)`,
{ transaction },
);
// Back navigation
await queryInterface.sequelize.query(
`UPDATE page_elements
SET element_type = 'navigation_prev'
WHERE element_type = 'nav_button'
AND content_json IS NOT NULL
AND content_json::jsonb->>'navType' = 'back'`,
{ transaction },
);
// Step 7: Drop the old ENUM type if it exists
await queryInterface.sequelize.query(
`DROP TYPE IF EXISTS "enum_page_elements_element_type"`,
{ transaction },
);
await transaction.commit();
console.log(
'Successfully converted element_type from ENUM to TEXT and mapped nav_button types',
);
} catch (error) {
await transaction.rollback();
throw error;
}
},
async down(queryInterface, _Sequelize) {
const transaction = await queryInterface.sequelize.transaction();
try {
// Step 1: Map navigation types back to nav_button (before creating ENUM)
await queryInterface.sequelize.query(
`UPDATE page_elements
SET element_type = 'nav_button'
WHERE element_type IN ('navigation_next', 'navigation_prev')`,
{ transaction },
);
// Step 2: Drop any existing ENUM types that might conflict
await queryInterface.sequelize.query(
`DROP TYPE IF EXISTS "enum_page_elements_element_type" CASCADE`,
{ transaction },
);
await queryInterface.sequelize.query(
`DROP TYPE IF EXISTS "enum_page_elements_element_type_enum" CASCADE`,
{ transaction },
);
// Step 3: Create the ENUM type with original values
await queryInterface.sequelize.query(
`CREATE TYPE "enum_page_elements_element_type" AS ENUM (
'nav_button',
'spot',
'description',
'tooltip',
'gallery',
'carousel',
'logo',
'video_player',
'popup'
)`,
{ transaction },
);
// Step 4: Add ENUM column directly via raw SQL to avoid Sequelize creating another type
await queryInterface.sequelize.query(
`ALTER TABLE page_elements ADD COLUMN element_type_enum "enum_page_elements_element_type"`,
{ transaction },
);
// Step 5: Copy TEXT values to ENUM column
await queryInterface.sequelize.query(
`UPDATE page_elements SET element_type_enum = element_type::"enum_page_elements_element_type"`,
{ transaction },
);
// Step 6: Drop TEXT column
await queryInterface.removeColumn('page_elements', 'element_type', {
transaction,
});
// Step 7: Rename ENUM column
await queryInterface.renameColumn(
'page_elements',
'element_type_enum',
'element_type',
{ transaction },
);
// Step 8: Add NOT NULL constraint
await queryInterface.sequelize.query(
`ALTER TABLE page_elements ALTER COLUMN element_type SET NOT NULL`,
{ transaction },
);
await transaction.commit();
console.log('Successfully reverted element_type from TEXT to ENUM');
} catch (error) {
await transaction.rollback();
throw error;
}
},
};

View File

@ -1,187 +0,0 @@
'use strict';
/**
* Migration: Create project_element_defaults table
*
* This table stores project-specific element default settings that override
* the global element_type_defaults. Key design decisions:
*
* - element_type is TEXT (not ENUM) for flexibility
* - source_element_id is optional FK for audit trail (SET NULL on global delete)
* - snapshot_version tracks generations for "check for updates" feature
* - NO environment field - applies across all environments for consistent branding
* - Unique constraint on (projectId, element_type) ensures one override per type per project
*/
module.exports = {
async up(queryInterface, Sequelize) {
// Check if table already exists
const tableExists = await queryInterface.sequelize.query(
`SELECT EXISTS (
SELECT FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = 'project_element_defaults'
);`,
{ type: Sequelize.QueryTypes.SELECT },
);
if (tableExists[0]?.exists) {
console.log(
'Table project_element_defaults already exists, skipping creation',
);
return;
}
await queryInterface.createTable('project_element_defaults', {
id: {
type: Sequelize.UUID,
defaultValue: Sequelize.UUIDV4,
primaryKey: true,
},
element_type: {
type: Sequelize.TEXT,
allowNull: false,
},
name: {
type: Sequelize.TEXT,
allowNull: true,
},
sort_order: {
type: Sequelize.INTEGER,
allowNull: false,
defaultValue: 0,
},
settings_json: {
type: Sequelize.TEXT,
allowNull: true,
},
source_element_id: {
type: Sequelize.UUID,
allowNull: true,
references: {
model: 'element_type_defaults',
key: 'id',
},
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
},
snapshot_version: {
type: Sequelize.INTEGER,
allowNull: false,
defaultValue: 1,
},
projectId: {
type: Sequelize.UUID,
allowNull: false,
references: {
model: 'projects',
key: 'id',
},
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
},
createdById: {
type: Sequelize.UUID,
allowNull: true,
references: {
model: 'users',
key: 'id',
},
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
},
updatedById: {
type: Sequelize.UUID,
allowNull: true,
references: {
model: 'users',
key: 'id',
},
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
},
importHash: {
type: Sequelize.STRING(255),
allowNull: true,
unique: true,
},
createdAt: {
type: Sequelize.DATE,
allowNull: false,
},
updatedAt: {
type: Sequelize.DATE,
allowNull: false,
},
deletedAt: {
type: Sequelize.DATE,
allowNull: true,
},
});
// Add indexes
await queryInterface.addIndex('project_element_defaults', ['projectId'], {
name: 'project_element_defaults_projectId',
});
await queryInterface.addIndex(
'project_element_defaults',
['projectId', 'element_type'],
{
name: 'project_element_defaults_projectId_element_type',
unique: true,
where: { deletedAt: null },
},
);
await queryInterface.addIndex(
'project_element_defaults',
['element_type'],
{
name: 'project_element_defaults_element_type',
},
);
await queryInterface.addIndex(
'project_element_defaults',
['source_element_id'],
{
name: 'project_element_defaults_source_element_id',
},
);
await queryInterface.addIndex('project_element_defaults', ['deletedAt'], {
name: 'project_element_defaults_deletedAt',
});
console.log('Successfully created project_element_defaults table');
},
async down(queryInterface, _Sequelize) {
// Drop indexes first
await queryInterface.removeIndex(
'project_element_defaults',
'project_element_defaults_projectId',
);
await queryInterface.removeIndex(
'project_element_defaults',
'project_element_defaults_projectId_element_type',
);
await queryInterface.removeIndex(
'project_element_defaults',
'project_element_defaults_element_type',
);
await queryInterface.removeIndex(
'project_element_defaults',
'project_element_defaults_source_element_id',
);
await queryInterface.removeIndex(
'project_element_defaults',
'project_element_defaults_deletedAt',
);
// Drop table
await queryInterface.dropTable('project_element_defaults');
console.log('Successfully dropped project_element_defaults table');
},
};

View File

@ -1,276 +0,0 @@
'use strict';
/**
* Migration: Backfill project_element_defaults for existing projects
*
* For each existing project that doesn't have project_element_defaults,
* create a snapshot of the current global element_type_defaults.
*/
// Default element types to ensure they exist before backfilling
const DEFAULT_ELEMENT_TYPES = [
{
element_type: 'navigation_next',
name: 'Navigation Forward Button',
sort_order: 1,
settings_json: JSON.stringify({
label: 'Navigation: Forward',
navLabel: 'Forward',
navType: 'forward',
navDisabled: false,
transitionReverseMode: 'auto_reverse',
transitionDurationSec: 0.7,
appearDelaySec: 0,
appearDurationSec: null,
}),
},
{
element_type: 'navigation_prev',
name: 'Navigation Back Button',
sort_order: 2,
settings_json: JSON.stringify({
label: 'Navigation: Back',
navLabel: 'Back',
navType: 'back',
navDisabled: false,
transitionReverseMode: 'auto_reverse',
transitionDurationSec: 0.7,
appearDelaySec: 0,
appearDurationSec: null,
}),
},
{
element_type: 'tooltip',
name: 'Tooltip',
sort_order: 3,
settings_json: JSON.stringify({
label: 'Tooltip',
tooltipTitle: 'Tooltip title',
tooltipText: 'Tooltip text',
appearDelaySec: 0,
appearDurationSec: null,
}),
},
{
element_type: 'description',
name: 'Description',
sort_order: 4,
settings_json: JSON.stringify({
label: 'Description',
descriptionTitle: 'TITLE',
descriptionText: '',
descriptionTitleFontSize: '48px',
descriptionTextFontSize: '36px',
descriptionTitleFontFamily: 'inherit',
descriptionTextFontFamily: 'inherit',
descriptionTitleColor: '#000000',
descriptionTextColor: '#4B5563',
descriptionBackgroundColor: 'transparent',
appearDelaySec: 0,
appearDurationSec: null,
}),
},
{
element_type: 'gallery',
name: 'Gallery',
sort_order: 5,
settings_json: JSON.stringify({
label: 'Gallery',
galleryCards: [{ imageUrl: '', title: 'Card 1', description: '' }],
appearDelaySec: 0,
appearDurationSec: null,
}),
},
{
element_type: 'carousel',
name: 'Carousel',
sort_order: 6,
settings_json: JSON.stringify({
label: 'Carousel',
carouselSlides: [{ imageUrl: '', caption: 'Slide 1' }],
carouselPrevIconUrl: '',
carouselNextIconUrl: '',
appearDelaySec: 0,
appearDurationSec: null,
}),
},
{
element_type: 'video_player',
name: 'Video Player',
sort_order: 7,
settings_json: JSON.stringify({
label: 'Video Player',
mediaUrl: '',
mediaAutoplay: true,
mediaLoop: true,
mediaMuted: true,
appearDelaySec: 0,
appearDurationSec: null,
}),
},
{
element_type: 'audio_player',
name: 'Audio Player',
sort_order: 8,
settings_json: JSON.stringify({
label: 'Audio Player',
mediaUrl: '',
mediaAutoplay: true,
mediaLoop: true,
mediaMuted: false,
appearDelaySec: 0,
appearDurationSec: null,
}),
},
];
module.exports = {
async up(queryInterface, Sequelize) {
// First, ensure element_type_defaults has all default rows
// This is needed because the API's lazy initialization won't have run yet during migration
const [existingTypes] = await queryInterface.sequelize.query(
`SELECT element_type FROM element_type_defaults WHERE "deletedAt" IS NULL`,
{ type: Sequelize.QueryTypes.SELECT },
);
const existingTypeSet = new Set(
Array.isArray(existingTypes)
? existingTypes.map((t) => t.element_type)
: existingTypes
? [existingTypes.element_type]
: [],
);
// Insert missing element types
for (const defaultType of DEFAULT_ELEMENT_TYPES) {
if (!existingTypeSet.has(defaultType.element_type)) {
await queryInterface.sequelize.query(
`INSERT INTO element_type_defaults (id, element_type, name, sort_order, settings_json, "createdAt", "updatedAt")
VALUES (gen_random_uuid(), :element_type, :name, :sort_order, :settings_json, NOW(), NOW())`,
{
replacements: {
element_type: defaultType.element_type,
name: defaultType.name,
sort_order: defaultType.sort_order,
settings_json: defaultType.settings_json,
},
},
);
console.log(
`Created missing element_type_default: ${defaultType.element_type}`,
);
}
}
// Get all existing projects
const [projects] = await queryInterface.sequelize.query(
`SELECT id FROM projects WHERE "deletedAt" IS NULL`,
{ type: Sequelize.QueryTypes.SELECT },
);
if (!projects || projects.length === 0) {
console.log('No projects found, skipping backfill');
return;
}
// Get all global element type defaults (now guaranteed to have all types)
const [globalDefaults] = await queryInterface.sequelize.query(
`SELECT id, element_type, name, sort_order, settings_json
FROM element_type_defaults
WHERE "deletedAt" IS NULL`,
{ type: Sequelize.QueryTypes.SELECT },
);
if (!globalDefaults || globalDefaults.length === 0) {
console.log('No global element type defaults found, skipping backfill');
return;
}
const projectIds = Array.isArray(projects)
? projects.map((p) => p.id)
: [projects.id];
const globalDefaultRows = Array.isArray(globalDefaults)
? globalDefaults
: [globalDefaults];
// For each project, add any missing element type defaults
for (const projectId of projectIds) {
// Get existing element types for this project
const [existingDefaults] = await queryInterface.sequelize.query(
`SELECT element_type FROM project_element_defaults
WHERE "projectId" = :projectId AND "deletedAt" IS NULL`,
{
replacements: { projectId },
type: Sequelize.QueryTypes.SELECT,
},
);
const existingProjectTypes = new Set(
Array.isArray(existingDefaults)
? existingDefaults.map((d) => d.element_type)
: existingDefaults
? [existingDefaults.element_type]
: [],
);
// Create project element defaults for missing types
let addedCount = 0;
for (const globalDefault of globalDefaultRows) {
if (existingProjectTypes.has(globalDefault.element_type)) {
continue; // Already has this type
}
await queryInterface.sequelize.query(
`INSERT INTO project_element_defaults
(id, element_type, name, sort_order, settings_json, source_element_id, snapshot_version, "projectId", "createdAt", "updatedAt")
VALUES (
gen_random_uuid(),
:element_type,
:name,
:sort_order,
:settings_json,
:source_element_id,
1,
:projectId,
NOW(),
NOW()
)`,
{
replacements: {
element_type: globalDefault.element_type,
name: globalDefault.name,
sort_order: globalDefault.sort_order,
settings_json: globalDefault.settings_json,
source_element_id: globalDefault.id,
projectId,
},
type: Sequelize.QueryTypes.INSERT,
},
);
addedCount++;
}
if (addedCount > 0) {
console.log(
`Backfilled ${addedCount} element defaults for project ${projectId}`,
);
} else {
console.log(`Project ${projectId} already has all element defaults`);
}
}
console.log(
'Successfully backfilled project_element_defaults for existing projects',
);
},
async down(queryInterface, _Sequelize) {
// Delete all project_element_defaults with snapshot_version = 1
// (only the ones we created during backfill)
await queryInterface.sequelize.query(
`DELETE FROM project_element_defaults WHERE snapshot_version = 1`,
);
console.log('Successfully removed backfilled project_element_defaults');
},
};

View File

@ -1,52 +0,0 @@
'use strict';
/**
* Migration: Fix project_audio_tracks.environment NULL constraint
*
* Unlike tour_pages and transitions, project_audio_tracks.environment allows NULL.
* This migration fixes it to match other models - NOT NULL with default 'dev'.
*/
module.exports = {
async up(queryInterface, Sequelize) {
// Check if column exists
const [columns] = await queryInterface.sequelize.query(
`SELECT column_name FROM information_schema.columns
WHERE table_name = 'project_audio_tracks' AND column_name = 'environment'`,
{ type: Sequelize.QueryTypes.SELECT },
);
if (!columns) {
console.log(
'Column project_audio_tracks.environment does not exist, skipping',
);
return;
}
// Set NULL values to 'dev'
await queryInterface.sequelize.query(
`UPDATE project_audio_tracks SET environment = 'dev' WHERE environment IS NULL`,
);
// Alter column to NOT NULL with default
await queryInterface.changeColumn('project_audio_tracks', 'environment', {
type: Sequelize.ENUM('dev', 'stage', 'production'),
allowNull: false,
defaultValue: 'dev',
});
console.log(
'Successfully fixed project_audio_tracks.environment to NOT NULL with default dev',
);
},
async down(queryInterface, Sequelize) {
// Revert to allow NULL
await queryInterface.changeColumn('project_audio_tracks', 'environment', {
type: Sequelize.ENUM('dev', 'stage', 'production'),
allowNull: true,
defaultValue: 'dev',
});
console.log('Reverted project_audio_tracks.environment to allow NULL');
},
};

View File

@ -1,208 +0,0 @@
'use strict';
/**
* Migration: Copy existing dev content to stage environment
*
* This migration initializes the stage environment for existing projects
* by copying all dev content to stage. This establishes the new workflow
* where constructor edits dev, then explicitly saves to stage.
*/
module.exports = {
async up(queryInterface, Sequelize) {
// Get all projects
const projects = await queryInterface.sequelize.query(
`SELECT id FROM projects WHERE "deletedAt" IS NULL`,
{ type: Sequelize.QueryTypes.SELECT },
);
if (!projects || projects.length === 0) {
console.log('No projects found, skipping dev to stage copy');
return;
}
for (const project of projects) {
const projectId = project.id;
// Check if stage content already exists
const [stageCheck] = await queryInterface.sequelize.query(
`SELECT COUNT(*)::int as count FROM tour_pages
WHERE "projectId" = '${projectId}' AND environment = 'stage' AND "deletedAt" IS NULL`,
{ type: Sequelize.QueryTypes.SELECT },
);
if (stageCheck?.count > 0) {
console.log(`Project ${projectId} already has stage content, skipping`);
continue;
}
// Get dev pages count first
const [devPageCount] = await queryInterface.sequelize.query(
`SELECT COUNT(*)::int as count FROM tour_pages
WHERE "projectId" = '${projectId}' AND environment = 'dev' AND "deletedAt" IS NULL`,
{ type: Sequelize.QueryTypes.SELECT },
);
if (!devPageCount || devPageCount.count === 0) {
console.log(`Project ${projectId} has no dev content, skipping`);
continue;
}
// Copy pages with direct INSERT...SELECT
await queryInterface.sequelize.query(`
INSERT INTO tour_pages
(id, slug, name, sort_order, background_image_url, background_video_url, background_audio_url, background_loop, requires_auth, ui_schema_json, "projectId", environment, source_key, "createdAt", "updatedAt", "createdById", "updatedById")
SELECT
gen_random_uuid(),
slug,
name,
sort_order,
background_image_url,
background_video_url,
background_audio_url,
background_loop,
requires_auth,
ui_schema_json,
"projectId",
'stage',
id::text,
NOW(),
NOW(),
"createdById",
"updatedById"
FROM tour_pages
WHERE "projectId" = '${projectId}' AND environment = 'dev' AND "deletedAt" IS NULL
`);
// Copy transitions
await queryInterface.sequelize.query(`
INSERT INTO transitions
(id, name, slug, video_url, audio_url, supports_reverse, duration_sec, "projectId", environment, source_key, "createdAt", "updatedAt", "createdById", "updatedById")
SELECT
gen_random_uuid(),
name,
slug,
video_url,
audio_url,
supports_reverse,
duration_sec,
"projectId",
'stage',
id::text,
NOW(),
NOW(),
"createdById",
"updatedById"
FROM transitions
WHERE "projectId" = '${projectId}' AND environment = 'dev' AND "deletedAt" IS NULL
`);
// Copy audio tracks
await queryInterface.sequelize.query(`
INSERT INTO project_audio_tracks
(id, name, slug, url, "loop", volume, sort_order, is_enabled, "projectId", environment, source_key, "createdAt", "updatedAt", "createdById", "updatedById")
SELECT
gen_random_uuid(),
name,
slug,
url,
"loop",
volume,
sort_order,
is_enabled,
"projectId",
'stage',
id::text,
NOW(),
NOW(),
"createdById",
"updatedById"
FROM project_audio_tracks
WHERE "projectId" = '${projectId}' AND environment = 'dev' AND "deletedAt" IS NULL
`);
// Copy page elements using a subquery to map page IDs
await queryInterface.sequelize.query(`
INSERT INTO page_elements
(id, element_type, name, sort_order, is_visible, x_percent, y_percent, width_percent, height_percent, rotation_deg, style_json, content_json, "pageId", "createdAt", "updatedAt", "createdById", "updatedById")
SELECT
gen_random_uuid(),
pe.element_type,
pe.name,
pe.sort_order,
pe.is_visible,
pe.x_percent,
pe.y_percent,
pe.width_percent,
pe.height_percent,
pe.rotation_deg,
pe.style_json,
pe.content_json,
stage_page.id,
NOW(),
NOW(),
pe."createdById",
pe."updatedById"
FROM page_elements pe
INNER JOIN tour_pages dev_page ON pe."pageId" = dev_page.id
INNER JOIN tour_pages stage_page ON stage_page.source_key = dev_page.id::text AND stage_page.environment = 'stage'
WHERE dev_page."projectId" = '${projectId}'
AND dev_page.environment = 'dev'
AND dev_page."deletedAt" IS NULL
AND pe."deletedAt" IS NULL
`);
// Copy page links using subqueries to map page and transition IDs
await queryInterface.sequelize.query(`
INSERT INTO page_links
(id, trigger_selector, external_url, "from_pageId", "to_pageId", "transitionId", "createdAt", "updatedAt", "createdById", "updatedById")
SELECT
gen_random_uuid(),
pl.trigger_selector,
pl.external_url,
stage_from.id,
stage_to.id,
stage_transition.id,
NOW(),
NOW(),
pl."createdById",
pl."updatedById"
FROM page_links pl
INNER JOIN tour_pages dev_from ON pl."from_pageId" = dev_from.id
INNER JOIN tour_pages stage_from ON stage_from.source_key = dev_from.id::text AND stage_from.environment = 'stage'
LEFT JOIN tour_pages dev_to ON pl."to_pageId" = dev_to.id
LEFT JOIN tour_pages stage_to ON stage_to.source_key = dev_to.id::text AND stage_to.environment = 'stage'
LEFT JOIN transitions dev_transition ON pl."transitionId" = dev_transition.id
LEFT JOIN transitions stage_transition ON stage_transition.source_key = dev_transition.id::text AND stage_transition.environment = 'stage'
WHERE dev_from."projectId" = '${projectId}'
AND dev_from.environment = 'dev'
AND dev_from."deletedAt" IS NULL
AND pl."deletedAt" IS NULL
`);
console.log(`Copied dev content to stage for project ${projectId}`);
}
console.log('Successfully copied dev content to stage for all projects');
},
async down(queryInterface, _Sequelize) {
// Delete all stage content that has a source_key (meaning it was created by this migration)
await queryInterface.sequelize.query(
`DELETE FROM page_links WHERE "from_pageId" IN (SELECT id FROM tour_pages WHERE environment = 'stage' AND source_key IS NOT NULL)`,
);
await queryInterface.sequelize.query(
`DELETE FROM page_elements WHERE "pageId" IN (SELECT id FROM tour_pages WHERE environment = 'stage' AND source_key IS NOT NULL)`,
);
await queryInterface.sequelize.query(
`DELETE FROM tour_pages WHERE environment = 'stage' AND source_key IS NOT NULL`,
);
await queryInterface.sequelize.query(
`DELETE FROM transitions WHERE environment = 'stage' AND source_key IS NOT NULL`,
);
await queryInterface.sequelize.query(
`DELETE FROM project_audio_tracks WHERE environment = 'stage' AND source_key IS NOT NULL`,
);
console.log('Removed stage content created by migration');
},
};

View File

@ -1,58 +0,0 @@
'use strict';
/**
* Migration: Enforce environment NOT NULL on all environment-aware tables
*
* This migration ensures that:
* 1. All NULL environment values are set to 'dev'
* 2. environment column is NOT NULL with default 'dev'
*
* This prevents data leaks where pages without environment could appear in production.
*/
module.exports = {
async up(queryInterface, _Sequelize) {
// Fix any NULL environments in tour_pages
await queryInterface.sequelize.query(
`UPDATE tour_pages SET environment = 'dev' WHERE environment IS NULL`,
);
// Fix any NULL environments in transitions
await queryInterface.sequelize.query(
`UPDATE transitions SET environment = 'dev' WHERE environment IS NULL`,
);
// Add NOT NULL constraint with default to tour_pages.environment
await queryInterface.sequelize.query(`
ALTER TABLE tour_pages
ALTER COLUMN environment SET NOT NULL,
ALTER COLUMN environment SET DEFAULT 'dev'
`);
// Add NOT NULL constraint with default to transitions.environment
await queryInterface.sequelize.query(`
ALTER TABLE transitions
ALTER COLUMN environment SET NOT NULL,
ALTER COLUMN environment SET DEFAULT 'dev'
`);
console.log('Successfully enforced NOT NULL on environment columns');
},
async down(queryInterface, _Sequelize) {
// Remove NOT NULL constraint from tour_pages.environment
await queryInterface.sequelize.query(`
ALTER TABLE tour_pages
ALTER COLUMN environment DROP NOT NULL,
ALTER COLUMN environment DROP DEFAULT
`);
// Remove NOT NULL constraint from transitions.environment
await queryInterface.sequelize.query(`
ALTER TABLE transitions
ALTER COLUMN environment DROP NOT NULL,
ALTER COLUMN environment DROP DEFAULT
`);
console.log('Removed NOT NULL constraint from environment columns');
},
};

View File

@ -1,31 +0,0 @@
'use strict';
/**
* Remove project.phase column - it's redundant.
* Runtime access is controlled by tour_pages.environment, not project.phase.
*/
module.exports = {
async up(queryInterface, _Sequelize) {
// Drop the phase column
await queryInterface.removeColumn('projects', 'phase');
// Drop the ENUM type
await queryInterface.sequelize.query(
`DROP TYPE IF EXISTS "enum_projects_phase";`,
);
},
async down(queryInterface, Sequelize) {
// Recreate the ENUM type
await queryInterface.sequelize.query(`
CREATE TYPE "enum_projects_phase" AS ENUM ('dev', 'stage', 'production');
`);
// Recreate the column with default 'dev'
await queryInterface.addColumn('projects', 'phase', {
type: Sequelize.ENUM('dev', 'stage', 'production'),
allowNull: false,
defaultValue: 'dev',
});
},
};

View File

@ -1,20 +0,0 @@
'use strict';
/**
* Migration: Remove entry_page_slug from projects table
*
* The entry page is now determined by the first page by sort_order,
* making entry_page_slug redundant.
*/
module.exports = {
async up(queryInterface, _Sequelize) {
await queryInterface.removeColumn('projects', 'entry_page_slug');
},
async down(queryInterface, Sequelize) {
await queryInterface.addColumn('projects', 'entry_page_slug', {
type: Sequelize.TEXT,
allowNull: true,
});
},
};

View File

@ -1,171 +0,0 @@
'use strict';
/**
* Migration: Convert targetPageId to targetPageSlug in ui_schema_json
*
* This migration converts navigation elements from using page UUIDs (targetPageId)
* to using page slugs (targetPageSlug). This fixes the ID remapping issue when
* pages are copied between environments (dev -> stage -> production).
*
* Slugs are unique within project+environment and identical across environments,
* eliminating the need for ID remapping during publish.
*/
module.exports = {
async up(queryInterface, Sequelize) {
const transaction = await queryInterface.sequelize.transaction();
try {
// Get all tour pages with their ui_schema_json
const [tourPages] = await queryInterface.sequelize.query(
`SELECT id, "projectId", environment, slug, ui_schema_json FROM tour_pages WHERE ui_schema_json IS NOT NULL`,
{ transaction },
);
// Build a lookup map: pageId -> { projectId, environment, slug }
const pageInfoById = new Map();
tourPages.forEach((page) => {
pageInfoById.set(page.id, {
projectId: page.projectId,
environment: page.environment,
slug: page.slug,
});
});
// Process each page and convert targetPageId to targetPageSlug
for (const page of tourPages) {
try {
const uiSchema =
typeof page.ui_schema_json === 'string'
? JSON.parse(page.ui_schema_json)
: page.ui_schema_json;
if (!uiSchema || !Array.isArray(uiSchema.elements)) {
continue;
}
let hasChanges = false;
uiSchema.elements.forEach((element) => {
// Convert targetPageId to targetPageSlug
if (
element.targetPageId &&
typeof element.targetPageId === 'string'
) {
const targetPageInfo = pageInfoById.get(element.targetPageId);
if (targetPageInfo && targetPageInfo.slug) {
// Only convert if target page is in the same project and environment
if (
targetPageInfo.projectId === page.projectId &&
targetPageInfo.environment === page.environment
) {
element.targetPageSlug = targetPageInfo.slug;
delete element.targetPageId;
hasChanges = true;
}
}
}
});
if (hasChanges) {
await queryInterface.sequelize.query(
`UPDATE tour_pages SET ui_schema_json = :json WHERE id = :id`,
{
replacements: {
json: JSON.stringify(uiSchema),
id: page.id,
},
type: Sequelize.QueryTypes.UPDATE,
transaction,
},
);
}
} catch (parseError) {
// Skip pages with invalid JSON
console.warn(`Skipping page ${page.id}: ${parseError.message}`);
}
}
await transaction.commit();
console.log(
'Migration complete: Converted targetPageId to targetPageSlug',
);
} catch (error) {
await transaction.rollback();
throw error;
}
},
async down(queryInterface, Sequelize) {
const transaction = await queryInterface.sequelize.transaction();
try {
// Get all tour pages
const [tourPages] = await queryInterface.sequelize.query(
`SELECT id, "projectId", environment, slug, ui_schema_json FROM tour_pages WHERE ui_schema_json IS NOT NULL`,
{ transaction },
);
// Build lookup: (projectId, environment, slug) -> pageId
const pageIdByKey = new Map();
tourPages.forEach((page) => {
const key = `${page.projectId}:${page.environment}:${page.slug}`;
pageIdByKey.set(key, page.id);
});
// Process each page and convert targetPageSlug back to targetPageId
for (const page of tourPages) {
try {
const uiSchema =
typeof page.ui_schema_json === 'string'
? JSON.parse(page.ui_schema_json)
: page.ui_schema_json;
if (!uiSchema || !Array.isArray(uiSchema.elements)) {
continue;
}
let hasChanges = false;
uiSchema.elements.forEach((element) => {
if (
element.targetPageSlug &&
typeof element.targetPageSlug === 'string'
) {
const key = `${page.projectId}:${page.environment}:${element.targetPageSlug}`;
const targetPageId = pageIdByKey.get(key);
if (targetPageId) {
element.targetPageId = targetPageId;
delete element.targetPageSlug;
hasChanges = true;
}
}
});
if (hasChanges) {
await queryInterface.sequelize.query(
`UPDATE tour_pages SET ui_schema_json = :json WHERE id = :id`,
{
replacements: {
json: JSON.stringify(uiSchema),
id: page.id,
},
type: Sequelize.QueryTypes.UPDATE,
transaction,
},
);
}
} catch (parseError) {
console.warn(`Skipping page ${page.id}: ${parseError.message}`);
}
}
await transaction.commit();
console.log(
'Rollback complete: Converted targetPageSlug back to targetPageId',
);
} catch (error) {
await transaction.rollback();
throw error;
}
},
};

View File

@ -1,100 +0,0 @@
'use strict';
/**
* Migration: Drop page_elements table
*
* This table was designed for storing individual page elements but was never used.
* All element data is stored in tour_pages.ui_schema_json instead.
*/
module.exports = {
async up(queryInterface, _Sequelize) {
// Verify the table is empty before dropping
const [results] = await queryInterface.sequelize.query(
'SELECT COUNT(*) as count FROM page_elements',
);
const count = parseInt(results[0].count, 10);
if (count > 0) {
throw new Error(
`Cannot drop page_elements table: it contains ${count} records. Please migrate or delete them first.`,
);
}
await queryInterface.dropTable('page_elements');
console.log('Dropped page_elements table (was empty)');
},
async down(queryInterface, Sequelize) {
// Recreate the page_elements table
await queryInterface.createTable('page_elements', {
id: {
type: Sequelize.UUID,
defaultValue: Sequelize.UUIDV4,
primaryKey: true,
},
pageId: {
type: Sequelize.UUID,
allowNull: false,
references: {
model: 'tour_pages',
key: 'id',
},
onUpdate: 'CASCADE',
onDelete: 'CASCADE',
},
element_type: {
type: Sequelize.STRING,
allowNull: false,
},
xPercent: {
type: Sequelize.DECIMAL(10, 6),
allowNull: true,
},
yPercent: {
type: Sequelize.DECIMAL(10, 6),
allowNull: true,
},
content_json: {
type: Sequelize.TEXT,
allowNull: true,
},
createdAt: {
type: Sequelize.DATE,
allowNull: false,
},
updatedAt: {
type: Sequelize.DATE,
allowNull: false,
},
deletedAt: {
type: Sequelize.DATE,
allowNull: true,
},
createdById: {
type: Sequelize.UUID,
allowNull: true,
references: {
model: 'users',
key: 'id',
},
onUpdate: 'CASCADE',
onDelete: 'SET NULL',
},
updatedById: {
type: Sequelize.UUID,
allowNull: true,
references: {
model: 'users',
key: 'id',
},
onUpdate: 'CASCADE',
onDelete: 'SET NULL',
},
importHash: {
type: Sequelize.STRING(255),
allowNull: true,
unique: true,
},
});
},
};

View File

@ -1,108 +0,0 @@
'use strict';
/**
* Migration: Drop page_links table
*
* This table was designed for storing navigation links between pages but was never used.
* Navigation targets are stored in tour_pages.ui_schema_json as targetPageSlug instead.
*/
module.exports = {
async up(queryInterface, _Sequelize) {
// Verify the table is empty before dropping
const [results] = await queryInterface.sequelize.query(
'SELECT COUNT(*) as count FROM page_links',
);
const count = parseInt(results[0].count, 10);
if (count > 0) {
throw new Error(
`Cannot drop page_links table: it contains ${count} records. Please migrate or delete them first.`,
);
}
await queryInterface.dropTable('page_links');
console.log('Dropped page_links table (was empty)');
},
async down(queryInterface, Sequelize) {
// Recreate the page_links table
await queryInterface.createTable('page_links', {
id: {
type: Sequelize.UUID,
defaultValue: Sequelize.UUIDV4,
primaryKey: true,
},
from_pageId: {
type: Sequelize.UUID,
allowNull: true,
references: {
model: 'tour_pages',
key: 'id',
},
onUpdate: 'CASCADE',
onDelete: 'CASCADE',
},
to_pageId: {
type: Sequelize.UUID,
allowNull: true,
references: {
model: 'tour_pages',
key: 'id',
},
onUpdate: 'CASCADE',
onDelete: 'SET NULL',
},
transitionId: {
type: Sequelize.UUID,
allowNull: true,
references: {
model: 'transitions',
key: 'id',
},
onUpdate: 'CASCADE',
onDelete: 'SET NULL',
},
is_active: {
type: Sequelize.BOOLEAN,
defaultValue: true,
},
createdAt: {
type: Sequelize.DATE,
allowNull: false,
},
updatedAt: {
type: Sequelize.DATE,
allowNull: false,
},
deletedAt: {
type: Sequelize.DATE,
allowNull: true,
},
createdById: {
type: Sequelize.UUID,
allowNull: true,
references: {
model: 'users',
key: 'id',
},
onUpdate: 'CASCADE',
onDelete: 'SET NULL',
},
updatedById: {
type: Sequelize.UUID,
allowNull: true,
references: {
model: 'users',
key: 'id',
},
onUpdate: 'CASCADE',
onDelete: 'SET NULL',
},
importHash: {
type: Sequelize.STRING(255),
allowNull: true,
unique: true,
},
});
},
};

View File

@ -1,105 +0,0 @@
'use strict';
/**
* Migration: Drop transitions table
*
* This table was designed for storing transition video metadata but was never used.
* Transition video URLs are stored directly in tour_pages.ui_schema_json as transitionVideoUrl.
*/
module.exports = {
async up(queryInterface, _Sequelize) {
// Verify the table is empty before dropping
const [results] = await queryInterface.sequelize.query(
'SELECT COUNT(*) as count FROM transitions',
);
const count = parseInt(results[0].count, 10);
if (count > 0) {
throw new Error(
`Cannot drop transitions table: it contains ${count} records. Please migrate or delete them first.`,
);
}
await queryInterface.dropTable('transitions');
console.log('Dropped transitions table (was empty)');
},
async down(queryInterface, Sequelize) {
// Recreate the transitions table
await queryInterface.createTable('transitions', {
id: {
type: Sequelize.UUID,
defaultValue: Sequelize.UUIDV4,
primaryKey: true,
},
projectId: {
type: Sequelize.UUID,
allowNull: false,
references: {
model: 'projects',
key: 'id',
},
onUpdate: 'CASCADE',
onDelete: 'CASCADE',
},
environment: {
type: Sequelize.STRING,
allowNull: false,
defaultValue: 'dev',
},
name: {
type: Sequelize.STRING,
allowNull: true,
},
video_url: {
type: Sequelize.TEXT,
allowNull: true,
},
duration_ms: {
type: Sequelize.INTEGER,
allowNull: true,
},
source_key: {
type: Sequelize.UUID,
allowNull: true,
},
createdAt: {
type: Sequelize.DATE,
allowNull: false,
},
updatedAt: {
type: Sequelize.DATE,
allowNull: false,
},
deletedAt: {
type: Sequelize.DATE,
allowNull: true,
},
createdById: {
type: Sequelize.UUID,
allowNull: true,
references: {
model: 'users',
key: 'id',
},
onUpdate: 'CASCADE',
onDelete: 'SET NULL',
},
updatedById: {
type: Sequelize.UUID,
allowNull: true,
references: {
model: 'users',
key: 'id',
},
onUpdate: 'CASCADE',
onDelete: 'SET NULL',
},
importHash: {
type: Sequelize.STRING(255),
allowNull: true,
unique: true,
},
});
},
};

View File

@ -1,150 +0,0 @@
'use strict';
const { v4: uuidv4 } = require('uuid');
/**
* Add missing element type defaults (spot, logo, popup)
* These were missing from the original DEFAULT_ROWS and need to be added to existing databases.
* Also backfills project_element_defaults for existing projects.
*/
module.exports = {
async up(queryInterface, Sequelize) {
const now = new Date();
// Define the missing element types
const missingTypes = [
{
id: uuidv4(),
element_type: 'spot',
name: 'Hotspot',
sort_order: 9,
settings_json: JSON.stringify({
label: 'Hotspot',
iconUrl: '',
appearDelaySec: 0,
appearDurationSec: null,
}),
createdAt: now,
updatedAt: now,
},
{
id: uuidv4(),
element_type: 'logo',
name: 'Logo',
sort_order: 10,
settings_json: JSON.stringify({
label: 'Logo',
iconUrl: '',
backgroundImageUrl: '',
appearDelaySec: 0,
appearDurationSec: null,
}),
createdAt: now,
updatedAt: now,
},
{
id: uuidv4(),
element_type: 'popup',
name: 'Popup',
sort_order: 11,
settings_json: JSON.stringify({
label: 'Popup',
iconUrl: '',
popupTitle: '',
popupContent: '',
appearDelaySec: 0,
appearDurationSec: null,
}),
createdAt: now,
updatedAt: now,
},
];
// Insert missing global defaults (skip if they already exist)
for (const elementType of missingTypes) {
const [existing] = await queryInterface.sequelize.query(
`SELECT id FROM element_type_defaults WHERE element_type = :element_type AND "deletedAt" IS NULL`,
{
replacements: { element_type: elementType.element_type },
type: Sequelize.QueryTypes.SELECT,
},
);
if (!existing) {
await queryInterface.bulkInsert('element_type_defaults', [elementType]);
console.log(`Added global default for: ${elementType.element_type}`);
} else {
console.log(
`Global default already exists for: ${elementType.element_type}`,
);
}
}
// Get all inserted/existing global defaults for the missing types
const globalDefaults = await queryInterface.sequelize.query(
`SELECT id, element_type, name, sort_order, settings_json
FROM element_type_defaults
WHERE element_type IN ('spot', 'logo', 'popup') AND "deletedAt" IS NULL`,
{ type: Sequelize.QueryTypes.SELECT },
);
// Get all projects
const projects = await queryInterface.sequelize.query(
`SELECT id FROM projects WHERE "deletedAt" IS NULL`,
{ type: Sequelize.QueryTypes.SELECT },
);
console.log(
`Backfilling ${globalDefaults.length} element types to ${projects.length} projects...`,
);
// Backfill project_element_defaults for each project
for (const project of projects) {
for (const globalDefault of globalDefaults) {
// Check if project already has this element type
const [existing] = await queryInterface.sequelize.query(
`SELECT id FROM project_element_defaults
WHERE "projectId" = :projectId AND element_type = :element_type AND "deletedAt" IS NULL`,
{
replacements: {
projectId: project.id,
element_type: globalDefault.element_type,
},
type: Sequelize.QueryTypes.SELECT,
},
);
if (!existing) {
await queryInterface.bulkInsert('project_element_defaults', [
{
id: uuidv4(),
projectId: project.id,
element_type: globalDefault.element_type,
name: globalDefault.name,
sort_order: globalDefault.sort_order,
settings_json: globalDefault.settings_json,
source_element_id: globalDefault.id,
snapshot_version: 1,
createdAt: now,
updatedAt: now,
},
]);
}
}
}
console.log('Backfill complete.');
},
async down(queryInterface, _Sequelize) {
// Remove the added element types from project_element_defaults
await queryInterface.sequelize.query(
`DELETE FROM project_element_defaults WHERE element_type IN ('spot', 'logo', 'popup')`,
);
// Remove from element_type_defaults
await queryInterface.sequelize.query(
`DELETE FROM element_type_defaults WHERE element_type IN ('spot', 'logo', 'popup')`,
);
},
};

View File

@ -1,295 +0,0 @@
'use strict';
const { v4: uuidv4 } = require('uuid');
/**
* Sync all 11 element type defaults with correct sort_order.
* This migration ensures all element types exist in element_type_defaults
* and backfills any missing project_element_defaults for existing projects.
*/
module.exports = {
async up(queryInterface, Sequelize) {
const now = new Date();
// Define all 11 element types with correct sort_order
const DEFAULT_ELEMENT_TYPES = [
{
element_type: 'navigation_next',
name: 'Navigation Forward Button',
sort_order: 1,
default_settings_json: {
label: 'Navigation: Forward',
navLabel: 'Forward',
navType: 'forward',
navDisabled: false,
transitionReverseMode: 'auto_reverse',
transitionDurationSec: 0.7,
appearDelaySec: 0,
appearDurationSec: null,
},
},
{
element_type: 'navigation_prev',
name: 'Navigation Back Button',
sort_order: 2,
default_settings_json: {
label: 'Navigation: Back',
navLabel: 'Back',
navType: 'back',
navDisabled: false,
transitionReverseMode: 'auto_reverse',
transitionDurationSec: 0.7,
appearDelaySec: 0,
appearDurationSec: null,
},
},
{
element_type: 'tooltip',
name: 'Tooltip',
sort_order: 3,
default_settings_json: {
label: 'Tooltip',
tooltipTitle: 'Tooltip title',
tooltipText: 'Tooltip text',
appearDelaySec: 0,
appearDurationSec: null,
},
},
{
element_type: 'description',
name: 'Description',
sort_order: 4,
default_settings_json: {
label: 'Description',
descriptionTitle: 'TITLE',
descriptionText: '',
descriptionTitleFontSize: '48px',
descriptionTextFontSize: '36px',
descriptionTitleFontFamily: 'inherit',
descriptionTextFontFamily: 'inherit',
descriptionTitleColor: '#000000',
descriptionTextColor: '#4B5563',
descriptionBackgroundColor: 'transparent',
appearDelaySec: 0,
appearDurationSec: null,
},
},
{
element_type: 'gallery',
name: 'Gallery',
sort_order: 5,
default_settings_json: {
label: 'Gallery',
galleryCards: [{ imageUrl: '', title: 'Card 1', description: '' }],
appearDelaySec: 0,
appearDurationSec: null,
},
},
{
element_type: 'carousel',
name: 'Carousel',
sort_order: 6,
default_settings_json: {
label: 'Carousel',
carouselSlides: [{ imageUrl: '', caption: 'Slide 1' }],
carouselPrevIconUrl: '',
carouselNextIconUrl: '',
appearDelaySec: 0,
appearDurationSec: null,
},
},
{
element_type: 'video_player',
name: 'Video Player',
sort_order: 7,
default_settings_json: {
label: 'Video Player',
mediaUrl: '',
mediaAutoplay: true,
mediaLoop: true,
mediaMuted: true,
appearDelaySec: 0,
appearDurationSec: null,
},
},
{
element_type: 'audio_player',
name: 'Audio Player',
sort_order: 8,
default_settings_json: {
label: 'Audio Player',
mediaUrl: '',
mediaAutoplay: true,
mediaLoop: true,
mediaMuted: false,
appearDelaySec: 0,
appearDurationSec: null,
},
},
{
element_type: 'spot',
name: 'Hotspot',
sort_order: 9,
default_settings_json: {
label: 'Hotspot',
iconUrl: '',
appearDelaySec: 0,
appearDurationSec: null,
},
},
{
element_type: 'logo',
name: 'Logo',
sort_order: 10,
default_settings_json: {
label: 'Logo',
iconUrl: '',
backgroundImageUrl: '',
appearDelaySec: 0,
appearDurationSec: null,
},
},
{
element_type: 'popup',
name: 'Popup',
sort_order: 11,
default_settings_json: {
label: 'Popup',
iconUrl: '',
popupTitle: '',
popupContent: '',
appearDelaySec: 0,
appearDurationSec: null,
},
},
];
console.log('Syncing all 11 element type defaults...');
// Track inserted/updated global defaults for backfill
const globalDefaultIds = new Map();
// For each element type: insert if not exists, update sort_order if wrong
for (const elementType of DEFAULT_ELEMENT_TYPES) {
const [existing] = await queryInterface.sequelize.query(
`SELECT id, sort_order FROM element_type_defaults
WHERE element_type = :element_type AND "deletedAt" IS NULL`,
{
replacements: { element_type: elementType.element_type },
type: Sequelize.QueryTypes.SELECT,
},
);
if (!existing) {
// Insert new element type
const newId = uuidv4();
await queryInterface.bulkInsert('element_type_defaults', [
{
id: newId,
element_type: elementType.element_type,
name: elementType.name,
sort_order: elementType.sort_order,
settings_json: JSON.stringify(elementType.default_settings_json),
createdAt: now,
updatedAt: now,
},
]);
globalDefaultIds.set(elementType.element_type, newId);
console.log(
`Inserted: ${elementType.element_type} (sort_order: ${elementType.sort_order})`,
);
} else {
globalDefaultIds.set(elementType.element_type, existing.id);
// Update sort_order if different
if (existing.sort_order !== elementType.sort_order) {
await queryInterface.sequelize.query(
`UPDATE element_type_defaults
SET sort_order = :sort_order, "updatedAt" = :now
WHERE id = :id`,
{
replacements: {
sort_order: elementType.sort_order,
now,
id: existing.id,
},
},
);
console.log(
`Updated sort_order for ${elementType.element_type}: ${existing.sort_order} -> ${elementType.sort_order}`,
);
} else {
console.log(
`Already exists: ${elementType.element_type} (sort_order: ${elementType.sort_order})`,
);
}
}
}
// Get all projects
const projects = await queryInterface.sequelize.query(
`SELECT id FROM projects WHERE "deletedAt" IS NULL`,
{ type: Sequelize.QueryTypes.SELECT },
);
console.log(
`Backfilling missing project_element_defaults for ${projects.length} projects...`,
);
// Get all global defaults for backfill
const globalDefaults = await queryInterface.sequelize.query(
`SELECT id, element_type, name, sort_order, settings_json
FROM element_type_defaults
WHERE "deletedAt" IS NULL`,
{ type: Sequelize.QueryTypes.SELECT },
);
let backfillCount = 0;
// Backfill project_element_defaults for each project
for (const project of projects) {
for (const globalDefault of globalDefaults) {
// Check if project already has this element type
const [existing] = await queryInterface.sequelize.query(
`SELECT id FROM project_element_defaults
WHERE "projectId" = :projectId AND element_type = :element_type AND "deletedAt" IS NULL`,
{
replacements: {
projectId: project.id,
element_type: globalDefault.element_type,
},
type: Sequelize.QueryTypes.SELECT,
},
);
if (!existing) {
await queryInterface.bulkInsert('project_element_defaults', [
{
id: uuidv4(),
projectId: project.id,
element_type: globalDefault.element_type,
name: globalDefault.name,
sort_order: globalDefault.sort_order,
settings_json: globalDefault.settings_json,
source_element_id: globalDefault.id,
snapshot_version: 1,
createdAt: now,
updatedAt: now,
},
]);
backfillCount++;
}
}
}
console.log(`Backfilled ${backfillCount} project element defaults.`);
console.log('Sync complete.');
},
async down(_queryInterface, _Sequelize) {
// This migration is safe - it only adds missing data
// No destructive down migration needed
console.log(
'No down migration needed - this migration only adds missing data.',
);
},
};

View File

@ -1,25 +0,0 @@
'use strict';
/** @type {import('sequelize-cli').Migration} */
module.exports = {
async up(queryInterface, _Sequelize) {
await queryInterface.removeColumn('projects', 'theme_config_json');
await queryInterface.removeColumn('projects', 'custom_css_json');
await queryInterface.removeColumn('projects', 'cdn_base_url');
},
async down(queryInterface, Sequelize) {
await queryInterface.addColumn('projects', 'theme_config_json', {
type: Sequelize.JSON,
allowNull: true,
});
await queryInterface.addColumn('projects', 'custom_css_json', {
type: Sequelize.JSON,
allowNull: true,
});
await queryInterface.addColumn('projects', 'cdn_base_url', {
type: Sequelize.TEXT,
allowNull: true,
});
},
};

View File

@ -1,67 +0,0 @@
'use strict';
/**
* Remove duplicate element_type_defaults rows.
* Keeps the oldest entry (by createdAt) for each element_type.
* This fixes the unique constraint violation during project creation.
*/
module.exports = {
async up(queryInterface, Sequelize) {
// Find duplicate element_types
const duplicates = await queryInterface.sequelize.query(
`SELECT element_type, COUNT(*) as count
FROM element_type_defaults
WHERE "deletedAt" IS NULL
GROUP BY element_type
HAVING COUNT(*) > 1`,
{ type: Sequelize.QueryTypes.SELECT },
);
if (duplicates.length === 0) {
console.log('No duplicate element_type_defaults found.');
return;
}
console.log(
`Found ${duplicates.length} element_types with duplicates:`,
duplicates.map((d) => d.element_type).join(', '),
);
// For each duplicate element_type, keep oldest and delete others
for (const dup of duplicates) {
// Get all rows for this element_type, ordered by createdAt
const rows = await queryInterface.sequelize.query(
`SELECT id, "createdAt"
FROM element_type_defaults
WHERE element_type = :element_type AND "deletedAt" IS NULL
ORDER BY "createdAt" ASC`,
{
replacements: { element_type: dup.element_type },
type: Sequelize.QueryTypes.SELECT,
},
);
// Keep the first (oldest), delete the rest
const idsToDelete = rows.slice(1).map((r) => r.id);
if (idsToDelete.length > 0) {
await queryInterface.sequelize.query(
`DELETE FROM element_type_defaults WHERE id IN (:ids)`,
{ replacements: { ids: idsToDelete } },
);
console.log(
`Deleted ${idsToDelete.length} duplicate(s) for element_type: ${dup.element_type}`,
);
}
}
console.log('Duplicate removal complete.');
},
async down(_queryInterface, _Sequelize) {
// Cannot restore deleted duplicates
console.log(
'Down migration not applicable - duplicates cannot be restored.',
);
},
};

View File

@ -1,78 +0,0 @@
'use strict';
/**
* Remove invalid element_type_defaults entries.
* Only valid element types defined in DEFAULT_ROWS should exist.
*/
module.exports = {
async up(queryInterface, Sequelize) {
// Valid element types as defined in element_type_defaults.js DEFAULT_ROWS
const validTypes = [
'navigation_next',
'navigation_prev',
'tooltip',
'description',
'gallery',
'carousel',
'video_player',
'audio_player',
'spot',
'logo',
'popup',
];
// Find invalid entries
const invalidEntries = await queryInterface.sequelize.query(
`SELECT id, element_type, name
FROM element_type_defaults
WHERE element_type NOT IN (:validTypes)
AND "deletedAt" IS NULL`,
{
replacements: { validTypes },
type: Sequelize.QueryTypes.SELECT,
},
);
if (invalidEntries.length === 0) {
console.log('No invalid element_type_defaults found.');
return;
}
console.log(
`Found ${invalidEntries.length} invalid element_type_defaults:`,
);
invalidEntries.forEach((entry) => {
console.log(` - ${entry.name} (${entry.element_type})`);
});
// Delete invalid entries
const idsToDelete = invalidEntries.map((e) => e.id);
await queryInterface.sequelize.query(
`DELETE FROM element_type_defaults WHERE id IN (:ids)`,
{ replacements: { ids: idsToDelete } },
);
// Also delete from project_element_defaults
const deletedProjectDefaults = await queryInterface.sequelize.query(
`DELETE FROM project_element_defaults
WHERE element_type NOT IN (:validTypes)
RETURNING id, element_type`,
{
replacements: { validTypes },
type: Sequelize.QueryTypes.SELECT,
},
);
console.log(`Deleted ${idsToDelete.length} invalid element_type_defaults.`);
console.log(
`Deleted ${deletedProjectDefaults.length} invalid project_element_defaults.`,
);
},
async down(_queryInterface, _Sequelize) {
// Cannot restore deleted invalid entries
console.log(
'Down migration not applicable - invalid entries cannot be restored.',
);
},
};

View File

@ -1,53 +0,0 @@
'use strict';
/** @type {import('sequelize-cli').Migration} */
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.addColumn('tour_pages', 'background_video_autoplay', {
type: Sequelize.BOOLEAN,
allowNull: false,
defaultValue: true,
});
await queryInterface.addColumn('tour_pages', 'background_video_loop', {
type: Sequelize.BOOLEAN,
allowNull: false,
defaultValue: true,
});
await queryInterface.addColumn('tour_pages', 'background_video_muted', {
type: Sequelize.BOOLEAN,
allowNull: false,
defaultValue: true,
});
await queryInterface.addColumn(
'tour_pages',
'background_video_start_time',
{
type: Sequelize.DECIMAL(10, 1),
allowNull: true,
defaultValue: null,
},
);
await queryInterface.addColumn('tour_pages', 'background_video_end_time', {
type: Sequelize.DECIMAL(10, 1),
allowNull: true,
defaultValue: null,
});
},
async down(queryInterface, _Sequelize) {
await queryInterface.removeColumn(
'tour_pages',
'background_video_autoplay',
);
await queryInterface.removeColumn('tour_pages', 'background_video_loop');
await queryInterface.removeColumn('tour_pages', 'background_video_muted');
await queryInterface.removeColumn(
'tour_pages',
'background_video_start_time',
);
await queryInterface.removeColumn(
'tour_pages',
'background_video_end_time',
);
},
};

View File

@ -1,29 +0,0 @@
'use strict';
/**
* Migration: Add design canvas dimensions to projects
*
* Adds design_width and design_height columns to support
* responsive canvas scaling with project-specific aspect ratios.
*
* @type {import('sequelize-cli').Migration}
*/
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.addColumn('projects', 'design_width', {
type: Sequelize.INTEGER,
allowNull: true,
defaultValue: 1920,
});
await queryInterface.addColumn('projects', 'design_height', {
type: Sequelize.INTEGER,
allowNull: true,
defaultValue: 1080,
});
},
async down(queryInterface, _Sequelize) {
await queryInterface.removeColumn('projects', 'design_width');
await queryInterface.removeColumn('projects', 'design_height');
},
};

View File

@ -1,30 +0,0 @@
'use strict';
/**
* Migration: Add design_width and design_height to tour_pages
*
* These fields store the canvas dimensions for presentations.
* They are copied from the project's design dimensions when pages are saved/published.
* This ensures presentations use the dimensions that were active at save time,
* not the current project dimensions (safe migration pattern).
*/
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.addColumn('tour_pages', 'design_width', {
type: Sequelize.INTEGER,
allowNull: true,
defaultValue: null,
});
await queryInterface.addColumn('tour_pages', 'design_height', {
type: Sequelize.INTEGER,
allowNull: true,
defaultValue: null,
});
},
async down(queryInterface) {
await queryInterface.removeColumn('tour_pages', 'design_width');
await queryInterface.removeColumn('tour_pages', 'design_height');
},
};

View File

@ -1,33 +0,0 @@
'use strict';
/**
* Migration: Add 'reversed' variant type to asset_variants
*
* This enables storing pre-reversed videos for back navigation transitions.
* Also adds storage_key column to track the S3/local storage path.
*/
/** @type {import('sequelize-cli').Migration} */
module.exports = {
async up(queryInterface, Sequelize) {
// Add 'reversed' to the enum_asset_variants_variant_type enum
await queryInterface.sequelize.query(`
ALTER TYPE "enum_asset_variants_variant_type"
ADD VALUE IF NOT EXISTS 'reversed';
`);
// Add storage_key column if it doesn't exist
const tableInfo = await queryInterface.describeTable('asset_variants');
if (!tableInfo.storage_key) {
await queryInterface.addColumn('asset_variants', 'storage_key', {
type: Sequelize.TEXT,
allowNull: true,
});
}
},
async down() {
// PostgreSQL doesn't support removing enum values
// storage_key column is safe to leave (no data loss)
},
};

View File

@ -1,103 +0,0 @@
'use strict';
const { v4: uuidv4 } = require('uuid');
/**
* Migration: Add hierarchical transition settings
*
* Creates global_transition_defaults table for platform-wide transition settings
* and adds transition_settings JSONB column to projects table for project-level overrides.
*
* Cascade: Element Project Global (fallback)
*
* @type {import('sequelize-cli').Migration}
*/
module.exports = {
async up(queryInterface, Sequelize) {
// Create global_transition_defaults table (single-row pattern)
await queryInterface.createTable('global_transition_defaults', {
id: {
type: Sequelize.UUID,
defaultValue: Sequelize.UUIDV4,
primaryKey: true,
},
transition_type: {
type: Sequelize.TEXT,
allowNull: false,
defaultValue: 'fade',
},
duration_ms: {
type: Sequelize.INTEGER,
allowNull: false,
defaultValue: 700,
},
easing: {
type: Sequelize.TEXT,
allowNull: false,
defaultValue: 'ease-in-out',
},
createdAt: {
type: Sequelize.DATE,
allowNull: false,
defaultValue: Sequelize.literal('CURRENT_TIMESTAMP'),
},
updatedAt: {
type: Sequelize.DATE,
allowNull: false,
defaultValue: Sequelize.literal('CURRENT_TIMESTAMP'),
},
deletedAt: {
type: Sequelize.DATE,
allowNull: true,
},
createdById: {
type: Sequelize.UUID,
allowNull: true,
references: {
model: 'users',
key: 'id',
},
onUpdate: 'CASCADE',
onDelete: 'SET NULL',
},
updatedById: {
type: Sequelize.UUID,
allowNull: true,
references: {
model: 'users',
key: 'id',
},
onUpdate: 'CASCADE',
onDelete: 'SET NULL',
},
});
// Seed the default row
const now = new Date();
await queryInterface.bulkInsert('global_transition_defaults', [
{
id: uuidv4(),
transition_type: 'fade',
duration_ms: 700,
easing: 'ease-in-out',
createdAt: now,
updatedAt: now,
},
]);
// Add transition_settings JSONB column to projects
await queryInterface.addColumn('projects', 'transition_settings', {
type: Sequelize.JSONB,
allowNull: true,
defaultValue: null,
});
},
async down(queryInterface, _Sequelize) {
// Remove transition_settings from projects
await queryInterface.removeColumn('projects', 'transition_settings');
// Drop global_transition_defaults table
await queryInterface.dropTable('global_transition_defaults');
},
};

View File

@ -1,76 +0,0 @@
'use strict';
/**
* Migration: Simplify transitions and add overlay color
*
* 1. Add overlay_color column to global_transition_defaults
* 2. Update global_transition_defaults: change slide-left/slide-right/zoom to 'fade'
* 3. Update projects.transition_settings JSONB where transitionType is slide/zoom
*/
module.exports = {
async up(queryInterface, Sequelize) {
const transaction = await queryInterface.sequelize.transaction();
try {
// 1. Add overlay_color column to global_transition_defaults
await queryInterface.addColumn(
'global_transition_defaults',
'overlay_color',
{
type: Sequelize.TEXT,
allowNull: false,
defaultValue: '#000000',
},
{ transaction },
);
// 2. Update global_transition_defaults: change slide-left/slide-right/zoom to 'fade'
await queryInterface.sequelize.query(
`UPDATE global_transition_defaults
SET transition_type = 'fade'
WHERE transition_type IN ('slide-left', 'slide-right', 'zoom')`,
{ transaction },
);
// 3. Update projects.transition_settings JSONB where transitionType is slide/zoom
// Convert slide-left, slide-right, zoom to 'fade'
await queryInterface.sequelize.query(
`UPDATE projects
SET transition_settings = jsonb_set(
COALESCE(transition_settings, '{}'::jsonb),
'{transitionType}',
'"fade"'
)
WHERE transition_settings IS NOT NULL
AND transition_settings->>'transitionType' IN ('slide-left', 'slide-right', 'zoom')`,
{ transaction },
);
await transaction.commit();
} catch (error) {
await transaction.rollback();
throw error;
}
},
async down(queryInterface, _Sequelize) {
const transaction = await queryInterface.sequelize.transaction();
try {
// Remove overlay_color column
await queryInterface.removeColumn(
'global_transition_defaults',
'overlay_color',
{ transaction },
);
// Note: We cannot restore the original slide/zoom values as they are lost
// The data migration is one-way
await transaction.commit();
} catch (error) {
await transaction.rollback();
throw error;
}
},
};

View File

@ -1,231 +0,0 @@
'use strict';
/**
* Migration: Create project_transition_settings table
*
* Creates environment-aware project transition settings following the
* project_audio_tracks pattern. This allows transition settings to be
* isolated per environment and participate in the publishing workflow.
*
* Data migration:
* - Existing projects.transition_settings values are copied to 'dev' environment records
* - The column is dropped after migration to avoid dual storage
*/
const { v4: uuidv4 } = require('uuid');
module.exports = {
async up(queryInterface, Sequelize) {
// Step 1: Create the project_transition_settings table
await queryInterface.createTable('project_transition_settings', {
id: {
type: Sequelize.UUID,
defaultValue: Sequelize.UUIDV4,
primaryKey: true,
},
projectId: {
type: Sequelize.UUID,
allowNull: false,
references: {
model: 'projects',
key: 'id',
},
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
},
environment: {
type: Sequelize.ENUM('dev', 'stage', 'production'),
allowNull: false,
},
source_key: {
type: Sequelize.TEXT,
allowNull: true,
},
transition_type: {
type: Sequelize.TEXT,
allowNull: false,
defaultValue: 'fade',
},
duration_ms: {
type: Sequelize.INTEGER,
allowNull: false,
defaultValue: 700,
},
easing: {
type: Sequelize.TEXT,
allowNull: false,
defaultValue: 'ease-in-out',
},
overlay_color: {
type: Sequelize.TEXT,
allowNull: false,
defaultValue: '#000000',
},
createdById: {
type: Sequelize.UUID,
references: {
model: 'users',
key: 'id',
},
allowNull: true,
},
updatedById: {
type: Sequelize.UUID,
references: {
model: 'users',
key: 'id',
},
allowNull: true,
},
createdAt: {
allowNull: false,
type: Sequelize.DATE,
},
updatedAt: {
allowNull: false,
type: Sequelize.DATE,
},
deletedAt: {
type: Sequelize.DATE,
allowNull: true,
},
importHash: {
type: Sequelize.STRING(255),
allowNull: true,
unique: true,
},
});
// Add unique constraint on (projectId, environment)
// Use IF NOT EXISTS to avoid errors if index already exists
await queryInterface.sequelize.query(`
CREATE UNIQUE INDEX IF NOT EXISTS project_transition_settings_project_env_unique
ON project_transition_settings ("projectId", environment)
WHERE "deletedAt" IS NULL
`);
// Add index on deletedAt for soft delete queries
await queryInterface.sequelize.query(`
CREATE INDEX IF NOT EXISTS project_transition_settings_deleted_at
ON project_transition_settings ("deletedAt")
`);
// Step 2: Migrate existing project.transition_settings data to 'dev' records
const [projects] = await queryInterface.sequelize.query(`
SELECT id, transition_settings, "createdById", "updatedById"
FROM projects
WHERE transition_settings IS NOT NULL
AND transition_settings != 'null'
AND "deletedAt" IS NULL
`);
const now = new Date();
const records = [];
for (const project of projects) {
let settings = project.transition_settings;
// Parse JSONB if it's a string
if (typeof settings === 'string') {
try {
settings = JSON.parse(settings);
} catch (e) {
console.warn(
`Failed to parse transition_settings for project ${project.id}:`,
e,
);
continue;
}
}
// Skip if settings is null, empty object, or has no actual values
if (
!settings ||
typeof settings !== 'object' ||
Object.keys(settings).length === 0
) {
continue;
}
records.push({
id: uuidv4(),
projectId: project.id,
environment: 'dev',
source_key: null,
transition_type: settings.transitionType || 'fade',
duration_ms: settings.durationMs || 700,
easing: settings.easing || 'ease-in-out',
overlay_color: settings.overlayColor || '#000000',
createdById: project.createdById || null,
updatedById: project.updatedById || null,
createdAt: now,
updatedAt: now,
deletedAt: null,
importHash: null,
});
}
if (records.length > 0) {
await queryInterface.bulkInsert('project_transition_settings', records);
console.log(
`Migrated ${records.length} project transition settings to 'dev' environment`,
);
}
// Step 3: Drop the transition_settings column from projects table
await queryInterface.removeColumn('projects', 'transition_settings');
console.log('Dropped transition_settings column from projects table');
},
async down(queryInterface, Sequelize) {
// Step 1: Re-add the transition_settings column to projects
await queryInterface.addColumn('projects', 'transition_settings', {
type: Sequelize.JSONB,
allowNull: true,
defaultValue: null,
});
// Step 2: Migrate 'dev' records back to projects.transition_settings
const [settings] = await queryInterface.sequelize.query(`
SELECT "projectId", transition_type, duration_ms, easing, overlay_color
FROM project_transition_settings
WHERE environment = 'dev'
AND "deletedAt" IS NULL
`);
for (const setting of settings) {
const jsonValue = JSON.stringify({
transitionType: setting.transition_type,
durationMs: setting.duration_ms,
easing: setting.easing,
overlayColor: setting.overlay_color,
});
await queryInterface.sequelize.query(
`
UPDATE projects
SET transition_settings = :settings::jsonb
WHERE id = :projectId
`,
{
replacements: {
settings: jsonValue,
projectId: setting.projectId,
},
},
);
}
// Step 3: Drop indexes and table
await queryInterface.sequelize.query(`
DROP INDEX IF EXISTS project_transition_settings_project_env_unique;
DROP INDEX IF EXISTS project_transition_settings_deleted_at;
`);
await queryInterface.dropTable('project_transition_settings');
// Drop the ENUM type
await queryInterface.sequelize.query(
'DROP TYPE IF EXISTS "enum_project_transition_settings_environment";',
);
},
};

View File

@ -1,4 +1,4 @@
module.exports = function (sequelize, DataTypes) { module.exports = function(sequelize, DataTypes) {
const access_logs = sequelize.define( const access_logs = sequelize.define(
'access_logs', 'access_logs',
{ {
@ -8,44 +8,50 @@ module.exports = function (sequelize, DataTypes) {
primaryKey: true, primaryKey: true,
}, },
environment: { environment: {
type: DataTypes.ENUM, type: DataTypes.ENUM,
allowNull: false, allowNull: false,
values: ['admin', 'stage', 'production'], values: [
"admin",
"stage",
"production"
],
}, },
path: { path: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
validate: {
len: { args: [0, 2048], msg: 'Path must be at most 2048 characters' },
},
}, },
ip_address: { ip_address: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
validate: {
len: {
args: [0, 45],
msg: 'IP address must be at most 45 characters',
},
},
}, },
user_agent: { user_agent: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
validate: {
len: {
args: [0, 1024],
msg: 'User agent must be at most 1024 characters',
},
},
}, },
accessed_at: { accessed_at: {
type: DataTypes.DATE, type: DataTypes.DATE,
allowNull: false, allowNull: false,
defaultValue: DataTypes.NOW, defaultValue: DataTypes.NOW,
}, },
importHash: { importHash: {
@ -68,18 +74,38 @@ module.exports = function (sequelize, DataTypes) {
); );
access_logs.associate = (db) => { access_logs.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.access_logs.belongsTo(db.projects, { db.access_logs.belongsTo(db.projects, {
as: 'project', as: 'project',
foreignKey: { foreignKey: {
name: 'projectId', name: 'projectId',
}, },
constraints: true, constraints: false,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
}); });
db.access_logs.belongsTo(db.users, { db.access_logs.belongsTo(db.users, {
@ -87,11 +113,12 @@ module.exports = function (sequelize, DataTypes) {
foreignKey: { foreignKey: {
name: 'userId', name: 'userId',
}, },
constraints: true, constraints: false,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
}); });
db.access_logs.belongsTo(db.users, { db.access_logs.belongsTo(db.users, {
as: 'createdBy', as: 'createdBy',
}); });
@ -101,5 +128,8 @@ module.exports = function (sequelize, DataTypes) {
}); });
}; };
return access_logs; return access_logs;
}; };

View File

@ -1,4 +1,4 @@
module.exports = function (sequelize, DataTypes) { module.exports = function(sequelize, DataTypes) {
const asset_variants = sequelize.define( const asset_variants = sequelize.define(
'asset_variants', 'asset_variants',
{ {
@ -8,65 +8,60 @@ module.exports = function (sequelize, DataTypes) {
primaryKey: true, primaryKey: true,
}, },
variant_type: { variant_type: {
type: DataTypes.ENUM, type: DataTypes.ENUM,
values: [ values: [
'thumbnail',
'preview', "thumbnail",
'webp',
'mp4_low', "preview",
'mp4_high',
'original', "webp",
"mp4_low",
"mp4_high",
"original"
'reversed',
], ],
}, },
storage_key: { cdn_url: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
allowNull: true,
}, },
cdn_url: { width_px: {
type: DataTypes.TEXT,
validate: {
len: {
args: [0, 2048],
msg: 'CDN URL must be at most 2048 characters',
},
isUrlOrEmpty(value) {
if (value && value.length > 0 && !/^https?:\/\/.+/.test(value)) {
throw new Error('CDN URL must be a valid URL');
}
},
},
},
width_px: {
type: DataTypes.INTEGER, type: DataTypes.INTEGER,
validate: {
min: { args: [0], msg: 'Width must be a non-negative integer' },
},
}, },
height_px: { height_px: {
type: DataTypes.INTEGER, type: DataTypes.INTEGER,
validate: {
min: { args: [0], msg: 'Height must be a non-negative integer' },
},
}, },
size_mb: { size_mb: {
type: DataTypes.DECIMAL, type: DataTypes.DECIMAL,
validate: {
min: { args: [0], msg: 'Size must be a non-negative number' },
},
}, },
importHash: { importHash: {
@ -83,20 +78,43 @@ module.exports = function (sequelize, DataTypes) {
); );
asset_variants.associate = (db) => { asset_variants.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.asset_variants.belongsTo(db.assets, { db.asset_variants.belongsTo(db.assets, {
as: 'asset', as: 'asset',
foreignKey: { foreignKey: {
name: 'assetId', name: 'assetId',
}, },
constraints: true, constraints: false,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
}); });
db.asset_variants.belongsTo(db.users, { db.asset_variants.belongsTo(db.users, {
as: 'createdBy', as: 'createdBy',
}); });
@ -106,5 +124,9 @@ module.exports = function (sequelize, DataTypes) {
}); });
}; };
return asset_variants; return asset_variants;
}; };

View File

@ -1,4 +1,4 @@
module.exports = function (sequelize, DataTypes) { module.exports = function(sequelize, DataTypes) {
const assets = sequelize.define( const assets = sequelize.define(
'assets', 'assets',
{ {
@ -8,92 +8,115 @@ module.exports = function (sequelize, DataTypes) {
primaryKey: true, primaryKey: true,
}, },
name: { name: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
validate: {
len: {
args: [0, 255],
msg: 'Asset name must be at most 255 characters',
},
},
}, },
asset_type: { asset_type: {
type: DataTypes.ENUM, type: DataTypes.ENUM,
allowNull: false, allowNull: false,
values: ['image', 'video', 'audio', 'file'],
},
type: {
type: DataTypes.ENUM,
allowNull: false,
defaultValue: 'general',
values: [ values: [
'icon',
'background_image', "image",
'audio',
'video', "video",
'transition',
'logo', "audio",
'favicon',
'document', "file"
'general',
], ],
}, },
cdn_url: { cdn_url: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
}, },
storage_key: { storage_key: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
}, },
mime_type: { mime_type: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
validate: {
is: {
args: /^[a-z0-9]+\/[a-z0-9.+-]+$/i,
msg: 'Invalid MIME type format',
},
},
}, },
size_mb: { size_mb: {
type: DataTypes.DECIMAL, type: DataTypes.DECIMAL,
}, },
width_px: { width_px: {
type: DataTypes.INTEGER, type: DataTypes.INTEGER,
}, },
height_px: { height_px: {
type: DataTypes.INTEGER, type: DataTypes.INTEGER,
}, },
duration_sec: { duration_sec: {
type: DataTypes.DECIMAL, type: DataTypes.DECIMAL,
}, },
checksum: { checksum: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
}, },
is_public: { is_public: {
type: DataTypes.BOOLEAN, type: DataTypes.BOOLEAN,
allowNull: false, allowNull: false,
defaultValue: false, defaultValue: false,
},
is_deleted: {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: false,
},
deleted_at_time: {
type: DataTypes.DATE,
}, },
importHash: { importHash: {
@ -109,38 +132,59 @@ module.exports = function (sequelize, DataTypes) {
indexes: [ indexes: [
{ fields: ['projectId'] }, { fields: ['projectId'] },
{ fields: ['asset_type'] }, { fields: ['asset_type'] },
{ fields: ['type'] },
{ fields: ['is_public'] }, { fields: ['is_public'] },
{ fields: ['is_deleted'] },
{ fields: ['deletedAt'] }, { fields: ['deletedAt'] },
], ],
}, },
); );
assets.associate = (db) => { assets.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
db.assets.hasMany(db.asset_variants, { db.assets.hasMany(db.asset_variants, {
as: 'asset_variants_asset', as: 'asset_variants_asset',
foreignKey: { foreignKey: {
name: 'assetId', name: 'assetId',
}, },
constraints: true, constraints: false,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
}); });
//end loop
//end loop
db.assets.belongsTo(db.projects, { db.assets.belongsTo(db.projects, {
as: 'project', as: 'project',
foreignKey: { foreignKey: {
name: 'projectId', name: 'projectId',
}, },
constraints: true, constraints: false,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
}); });
db.assets.belongsTo(db.users, { db.assets.belongsTo(db.users, {
as: 'createdBy', as: 'createdBy',
}); });
@ -150,5 +194,8 @@ module.exports = function (sequelize, DataTypes) {
}); });
}; };
return assets; return assets;
}; };

View File

@ -1,91 +0,0 @@
module.exports = function (sequelize, DataTypes) {
const element_type_defaults = sequelize.define(
'element_type_defaults',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
element_type: {
type: DataTypes.TEXT,
allowNull: false,
unique: true,
validate: {
notEmpty: { msg: 'Element type is required' },
len: {
args: [1, 100],
msg: 'Element type must be between 1 and 100 characters',
},
},
},
name: {
type: DataTypes.TEXT,
allowNull: false,
validate: {
notEmpty: { msg: 'Name is required' },
len: {
args: [1, 255],
msg: 'Name must be between 1 and 255 characters',
},
},
},
sort_order: {
type: DataTypes.INTEGER,
allowNull: false,
defaultValue: 0,
},
is_active: {
type: DataTypes.VIRTUAL,
get() {
return true;
},
},
default_settings_json: {
type: DataTypes.TEXT,
field: 'settings_json',
allowNull: true,
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
indexes: [
{ fields: ['element_type'] },
{ fields: ['sort_order'] },
{ fields: ['deletedAt'] },
],
},
);
element_type_defaults.associate = (db) => {
db.element_type_defaults.belongsTo(db.users, {
as: 'createdBy',
});
db.element_type_defaults.belongsTo(db.users, {
as: 'updatedBy',
});
// Add hasMany relationship to project_element_defaults
if (db.project_element_defaults) {
db.element_type_defaults.hasMany(db.project_element_defaults, {
as: 'project_defaults',
foreignKey: {
name: 'source_element_id',
},
constraints: true,
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
});
}
};
return element_type_defaults;
};

View File

@ -1,4 +1,4 @@
module.exports = function (sequelize, DataTypes) { module.exports = function(sequelize, DataTypes) {
const file = sequelize.define( const file = sequelize.define(
'file', 'file',
{ {

View File

@ -1,73 +0,0 @@
module.exports = function (sequelize, DataTypes) {
const global_transition_defaults = sequelize.define(
'global_transition_defaults',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
transition_type: {
type: DataTypes.TEXT,
allowNull: false,
defaultValue: 'fade',
validate: {
notEmpty: { msg: 'Transition type is required' },
isIn: {
args: [['fade', 'none']],
msg: 'Invalid transition type',
},
},
},
overlay_color: {
type: DataTypes.TEXT,
allowNull: false,
defaultValue: '#000000',
validate: {
notEmpty: { msg: 'Overlay color is required' },
},
},
duration_ms: {
type: DataTypes.INTEGER,
allowNull: false,
defaultValue: 700,
validate: {
isInt: { msg: 'Duration must be an integer' },
min: {
args: [0],
msg: 'Duration must be at least 0ms',
},
},
},
easing: {
type: DataTypes.TEXT,
allowNull: false,
defaultValue: 'ease-in-out',
validate: {
notEmpty: { msg: 'Easing is required' },
isIn: {
args: [['ease-in-out', 'ease-in', 'ease-out', 'linear']],
msg: 'Invalid easing function',
},
},
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
},
);
global_transition_defaults.associate = (db) => {
db.global_transition_defaults.belongsTo(db.users, {
as: 'createdBy',
});
db.global_transition_defaults.belongsTo(db.users, {
as: 'updatedBy',
});
};
return global_transition_defaults;
};

View File

@ -5,7 +5,7 @@ const path = require('path');
const Sequelize = require('sequelize'); const Sequelize = require('sequelize');
const basename = path.basename(__filename); const basename = path.basename(__filename);
const env = process.env.NODE_ENV || 'development'; const env = process.env.NODE_ENV || 'development';
const config = require('../db.config')[env]; const config = require("../db.config")[env];
const db = {}; const db = {};
let sequelize; let sequelize;
@ -13,29 +13,20 @@ console.log(env);
if (config.use_env_variable) { if (config.use_env_variable) {
sequelize = new Sequelize(process.env[config.use_env_variable], config); sequelize = new Sequelize(process.env[config.use_env_variable], config);
} else { } else {
sequelize = new Sequelize( sequelize = new Sequelize(config.database, config.username, config.password, config);
config.database,
config.username,
config.password,
config,
);
} }
fs.readdirSync(__dirname) fs
.filter((file) => { .readdirSync(__dirname)
return ( .filter(file => {
file.indexOf('.') !== 0 && file !== basename && file.slice(-3) === '.js' return (file.indexOf('.') !== 0) && (file !== basename) && (file.slice(-3) === '.js');
);
}) })
.forEach((file) => { .forEach(file => {
const model = require(path.join(__dirname, file))( const model = require(path.join(__dirname, file))(sequelize, Sequelize.DataTypes)
sequelize,
Sequelize.DataTypes,
);
db[model.name] = model; db[model.name] = model;
}); });
Object.keys(db).forEach((modelName) => { Object.keys(db).forEach(modelName => {
if (db[modelName].associate) { if (db[modelName].associate) {
db[modelName].associate(db); db[modelName].associate(db);
} }

View File

@ -0,0 +1,190 @@
module.exports = function(sequelize, DataTypes) {
const page_elements = sequelize.define(
'page_elements',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
element_type: {
type: DataTypes.ENUM,
allowNull: false,
values: [
"nav_button",
"spot",
"description",
"tooltip",
"gallery",
"carousel",
"logo",
"video_player",
"popup"
],
},
name: {
type: DataTypes.TEXT,
},
sort_order: {
type: DataTypes.INTEGER,
allowNull: false,
defaultValue: 0,
},
is_visible: {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: false,
},
x_percent: {
type: DataTypes.DECIMAL,
},
y_percent: {
type: DataTypes.DECIMAL,
},
width_percent: {
type: DataTypes.DECIMAL,
},
height_percent: {
type: DataTypes.DECIMAL,
},
rotation_deg: {
type: DataTypes.DECIMAL,
},
style_json: {
type: DataTypes.TEXT,
},
content_json: {
type: DataTypes.TEXT,
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
indexes: [
{ fields: ['pageId'] },
{ fields: ['pageId', 'sort_order'] },
{ fields: ['is_visible'] },
{ fields: ['deletedAt'] },
],
},
);
page_elements.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.page_elements.belongsTo(db.tour_pages, {
as: 'page',
foreignKey: {
name: 'pageId',
},
constraints: false,
});
db.page_elements.belongsTo(db.users, {
as: 'createdBy',
});
db.page_elements.belongsTo(db.users, {
as: 'updatedBy',
});
};
return page_elements;
};

View File

@ -0,0 +1,141 @@
module.exports = function(sequelize, DataTypes) {
const page_links = sequelize.define(
'page_links',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
direction: {
type: DataTypes.ENUM,
allowNull: false,
defaultValue: 'forward',
values: [
"forward",
"back",
"external"
],
},
external_url: {
type: DataTypes.TEXT,
},
is_active: {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: false,
},
trigger_selector: {
type: DataTypes.TEXT,
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
indexes: [
{ fields: ['from_pageId'] },
{ fields: ['to_pageId'] },
{ fields: ['transitionId'] },
{ fields: ['is_active'] },
{ fields: ['deletedAt'] },
],
},
);
page_links.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.page_links.belongsTo(db.tour_pages, {
as: 'from_page',
foreignKey: {
name: 'from_pageId',
},
constraints: false,
});
db.page_links.belongsTo(db.tour_pages, {
as: 'to_page',
foreignKey: {
name: 'to_pageId',
},
constraints: false,
});
db.page_links.belongsTo(db.transitions, {
as: 'transition',
foreignKey: {
name: 'transitionId',
},
constraints: false,
});
db.page_links.belongsTo(db.users, {
as: 'createdBy',
});
db.page_links.belongsTo(db.users, {
as: 'updatedBy',
});
};
return page_links;
};

View File

@ -1,4 +1,4 @@
module.exports = function (sequelize, DataTypes) { module.exports = function(sequelize, DataTypes) {
const permissions = sequelize.define( const permissions = sequelize.define(
'permissions', 'permissions',
{ {
@ -8,17 +8,11 @@ module.exports = function (sequelize, DataTypes) {
primaryKey: true, primaryKey: true,
}, },
name: { name: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
allowNull: false,
unique: true,
validate: {
notEmpty: { msg: 'Permission name is required' },
len: {
args: [1, 100],
msg: 'Permission name must be between 1 and 100 characters',
},
},
}, },
importHash: { importHash: {
@ -35,9 +29,34 @@ module.exports = function (sequelize, DataTypes) {
); );
permissions.associate = (db) => { permissions.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.permissions.belongsTo(db.users, { db.permissions.belongsTo(db.users, {
as: 'createdBy', as: 'createdBy',
@ -48,5 +67,9 @@ module.exports = function (sequelize, DataTypes) {
}); });
}; };
return permissions; return permissions;
}; };

View File

@ -1,4 +1,4 @@
module.exports = function (sequelize, DataTypes) { module.exports = function(sequelize, DataTypes) {
const presigned_url_requests = sequelize.define( const presigned_url_requests = sequelize.define(
'presigned_url_requests', 'presigned_url_requests',
{ {
@ -8,63 +8,77 @@ module.exports = function (sequelize, DataTypes) {
primaryKey: true, primaryKey: true,
}, },
purpose: { purpose: {
type: DataTypes.ENUM, type: DataTypes.ENUM,
values: [
"upload",
"download"
],
values: ['upload', 'download'],
}, },
asset_type: { asset_type: {
type: DataTypes.ENUM, type: DataTypes.ENUM,
values: [
"image",
"video",
"audio",
"file"
],
values: ['image', 'video', 'audio', 'file'],
}, },
requested_key: { requested_key: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
validate: {
len: {
args: [0, 1024],
msg: 'Requested key must be at most 1024 characters',
},
},
}, },
mime_type: { mime_type: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
validate: {
len: {
args: [0, 255],
msg: 'MIME type must be at most 255 characters',
},
isMimeTypeOrEmpty(value) {
if (
value &&
value.length > 0 &&
!/^[\w.-]+\/[\w.+-]+$/.test(value)
) {
throw new Error('MIME type must be in format type/subtype');
}
},
},
}, },
requested_size_mb: { requested_size_mb: {
type: DataTypes.DECIMAL, type: DataTypes.DECIMAL,
validate: {
min: {
args: [0],
msg: 'Requested size must be a non-negative number',
},
},
}, },
expires_at: { expires_at: {
type: DataTypes.DATE, type: DataTypes.DATE,
}, },
status: { status: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
}, },
importHash: { importHash: {
@ -81,18 +95,38 @@ module.exports = function (sequelize, DataTypes) {
); );
presigned_url_requests.associate = (db) => { presigned_url_requests.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.presigned_url_requests.belongsTo(db.projects, { db.presigned_url_requests.belongsTo(db.projects, {
as: 'project', as: 'project',
foreignKey: { foreignKey: {
name: 'projectId', name: 'projectId',
}, },
constraints: true, constraints: false,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
}); });
db.presigned_url_requests.belongsTo(db.users, { db.presigned_url_requests.belongsTo(db.users, {
@ -100,11 +134,12 @@ module.exports = function (sequelize, DataTypes) {
foreignKey: { foreignKey: {
name: 'userId', name: 'userId',
}, },
constraints: true, constraints: false,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
}); });
db.presigned_url_requests.belongsTo(db.users, { db.presigned_url_requests.belongsTo(db.users, {
as: 'createdBy', as: 'createdBy',
}); });
@ -114,5 +149,9 @@ module.exports = function (sequelize, DataTypes) {
}); });
}; };
return presigned_url_requests; return presigned_url_requests;
}; };

View File

@ -1,4 +1,4 @@
module.exports = function (sequelize, DataTypes) { module.exports = function(sequelize, DataTypes) {
const project_audio_tracks = sequelize.define( const project_audio_tracks = sequelize.define(
'project_audio_tracks', 'project_audio_tracks',
{ {
@ -8,58 +8,85 @@ module.exports = function (sequelize, DataTypes) {
primaryKey: true, primaryKey: true,
}, },
environment: { environment: {
type: DataTypes.ENUM, type: DataTypes.ENUM,
values: [
"dev",
"stage",
"production"
],
values: ['dev', 'stage', 'production'],
}, },
source_key: { source_key: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
}, },
name: { name: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
validate: {
len: {
args: [0, 255],
msg: 'Audio track name must be at most 255 characters',
},
},
}, },
slug: { slug: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
}, },
url: { url: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
}, },
loop: { loop: {
type: DataTypes.BOOLEAN, type: DataTypes.BOOLEAN,
allowNull: false, allowNull: false,
defaultValue: false, defaultValue: false,
}, },
volume: { volume: {
type: DataTypes.DECIMAL, type: DataTypes.DECIMAL,
validate: {
min: { args: [0], msg: 'Volume must be at least 0' },
max: { args: [1], msg: 'Volume must be at most 1' },
},
}, },
sort_order: { sort_order: {
type: DataTypes.INTEGER, type: DataTypes.INTEGER,
}, },
is_enabled: { is_enabled: {
type: DataTypes.BOOLEAN, type: DataTypes.BOOLEAN,
allowNull: false, allowNull: false,
defaultValue: false, defaultValue: false,
}, },
importHash: { importHash: {
@ -76,20 +103,43 @@ module.exports = function (sequelize, DataTypes) {
); );
project_audio_tracks.associate = (db) => { project_audio_tracks.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.project_audio_tracks.belongsTo(db.projects, { db.project_audio_tracks.belongsTo(db.projects, {
as: 'project', as: 'project',
foreignKey: { foreignKey: {
name: 'projectId', name: 'projectId',
}, },
constraints: true, constraints: false,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
}); });
db.project_audio_tracks.belongsTo(db.users, { db.project_audio_tracks.belongsTo(db.users, {
as: 'createdBy', as: 'createdBy',
}); });
@ -99,5 +149,9 @@ module.exports = function (sequelize, DataTypes) {
}); });
}; };
return project_audio_tracks; return project_audio_tracks;
}; };

View File

@ -1,101 +0,0 @@
module.exports = function (sequelize, DataTypes) {
const project_element_defaults = sequelize.define(
'project_element_defaults',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
element_type: {
// TEXT for flexibility - matches element_type_defaults and page_elements
type: DataTypes.TEXT,
allowNull: false,
validate: {
notEmpty: { msg: 'Element type is required' },
len: {
args: [1, 100],
msg: 'Element type must be between 1 and 100 characters',
},
},
},
name: {
type: DataTypes.TEXT,
validate: {
len: { args: [0, 255], msg: 'Name must be at most 255 characters' },
},
},
sort_order: {
type: DataTypes.INTEGER,
allowNull: false,
defaultValue: 0,
},
settings_json: {
type: DataTypes.TEXT,
allowNull: true,
},
source_element_id: {
// Optional FK - tracks which global default this was snapshotted from
// SET NULL on global delete to preserve project overrides
type: DataTypes.UUID,
allowNull: true,
},
snapshot_version: {
// Increments when resetting from global - enables "check for updates" feature
type: DataTypes.INTEGER,
allowNull: false,
defaultValue: 1,
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
indexes: [
{ fields: ['projectId'] },
{ fields: ['projectId', 'element_type'], unique: true },
{ fields: ['element_type'] },
{ fields: ['source_element_id'] },
{ fields: ['deletedAt'] },
],
},
);
project_element_defaults.associate = (db) => {
db.project_element_defaults.belongsTo(db.projects, {
as: 'project',
foreignKey: {
name: 'projectId',
allowNull: false,
},
constraints: true,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
});
db.project_element_defaults.belongsTo(db.element_type_defaults, {
as: 'source_element',
foreignKey: {
name: 'source_element_id',
},
constraints: true,
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
});
db.project_element_defaults.belongsTo(db.users, {
as: 'createdBy',
});
db.project_element_defaults.belongsTo(db.users, {
as: 'updatedBy',
});
};
return project_element_defaults;
};

View File

@ -1,4 +1,4 @@
module.exports = function (sequelize, DataTypes) { module.exports = function(sequelize, DataTypes) {
const project_memberships = sequelize.define( const project_memberships = sequelize.define(
'project_memberships', 'project_memberships',
{ {
@ -8,27 +8,50 @@ module.exports = function (sequelize, DataTypes) {
primaryKey: true, primaryKey: true,
}, },
access_level: { access_level: {
type: DataTypes.ENUM, type: DataTypes.ENUM,
allowNull: false, allowNull: false,
defaultValue: 'viewer', defaultValue: 'viewer',
values: ['owner', 'editor', 'reviewer', 'viewer'], values: [
"owner",
"editor",
"reviewer",
"viewer"
],
}, },
is_active: { is_active: {
type: DataTypes.BOOLEAN, type: DataTypes.BOOLEAN,
allowNull: false, allowNull: false,
defaultValue: false, defaultValue: false,
}, },
invited_at: { invited_at: {
type: DataTypes.DATE, type: DataTypes.DATE,
}, },
accepted_at: { accepted_at: {
type: DataTypes.DATE, type: DataTypes.DATE,
}, },
importHash: { importHash: {
@ -52,18 +75,38 @@ module.exports = function (sequelize, DataTypes) {
); );
project_memberships.associate = (db) => { project_memberships.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.project_memberships.belongsTo(db.projects, { db.project_memberships.belongsTo(db.projects, {
as: 'project', as: 'project',
foreignKey: { foreignKey: {
name: 'projectId', name: 'projectId',
}, },
constraints: true, constraints: false,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
}); });
db.project_memberships.belongsTo(db.users, { db.project_memberships.belongsTo(db.users, {
@ -71,11 +114,12 @@ module.exports = function (sequelize, DataTypes) {
foreignKey: { foreignKey: {
name: 'userId', name: 'userId',
}, },
constraints: true, constraints: false,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
}); });
db.project_memberships.belongsTo(db.users, { db.project_memberships.belongsTo(db.users, {
as: 'createdBy', as: 'createdBy',
}); });
@ -85,5 +129,8 @@ module.exports = function (sequelize, DataTypes) {
}); });
}; };
return project_memberships; return project_memberships;
}; };

View File

@ -1,103 +0,0 @@
module.exports = function (sequelize, DataTypes) {
const project_transition_settings = sequelize.define(
'project_transition_settings',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
environment: {
type: DataTypes.ENUM,
values: ['dev', 'stage', 'production'],
allowNull: false,
},
source_key: {
type: DataTypes.TEXT,
allowNull: true,
},
transition_type: {
type: DataTypes.TEXT,
allowNull: false,
defaultValue: 'fade',
validate: {
isIn: {
args: [['fade', 'none', 'video']],
msg: 'Transition type must be fade, none, or video',
},
},
},
duration_ms: {
type: DataTypes.INTEGER,
allowNull: false,
defaultValue: 700,
validate: {
min: { args: [0], msg: 'Duration must be at least 0ms' },
max: { args: [10000], msg: 'Duration must be at most 10000ms' },
},
},
easing: {
type: DataTypes.TEXT,
allowNull: false,
defaultValue: 'ease-in-out',
validate: {
isIn: {
args: [['ease-in-out', 'ease-in', 'ease-out', 'linear']],
msg: 'Easing must be ease-in-out, ease-in, ease-out, or linear',
},
},
},
overlay_color: {
type: DataTypes.TEXT,
allowNull: false,
defaultValue: '#000000',
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
indexes: [
{
fields: ['projectId', 'environment'],
unique: true,
where: { deletedAt: null },
},
],
},
);
project_transition_settings.associate = (db) => {
db.project_transition_settings.belongsTo(db.projects, {
as: 'project',
foreignKey: {
name: 'projectId',
},
constraints: true,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
});
db.project_transition_settings.belongsTo(db.users, {
as: 'createdBy',
});
db.project_transition_settings.belongsTo(db.users, {
as: 'updatedBy',
});
};
return project_transition_settings;
};

View File

@ -1,4 +1,4 @@
module.exports = function (sequelize, DataTypes) { module.exports = function(sequelize, DataTypes) {
const projects = sequelize.define( const projects = sequelize.define(
'projects', 'projects',
{ {
@ -8,65 +8,110 @@ module.exports = function (sequelize, DataTypes) {
primaryKey: true, primaryKey: true,
}, },
name: { name: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
allowNull: false, allowNull: false,
validate: {
notEmpty: { msg: 'Project name is required' },
len: {
args: [1, 255],
msg: 'Project name must be between 1 and 255 characters',
},
},
}, },
slug: { slug: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
allowNull: false, allowNull: false,
unique: true, unique: true,
validate: {
notEmpty: { msg: 'Slug is required' },
is: {
args: /^[a-z0-9_-]+$/i,
msg: 'Slug can only contain letters, numbers, dashes, and underscores',
},
len: {
args: [1, 255],
msg: 'Slug must be between 1 and 255 characters',
},
},
}, },
description: { description: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
}, },
logo_url: { phase: {
type: DataTypes.ENUM,
allowNull: false,
defaultValue: 'dev',
values: [
"dev",
"stage",
"production"
],
},
logo_url: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
}, },
favicon_url: { favicon_url: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
}, },
og_image_url: { og_image_url: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
}, },
design_width: { theme_config_json: {
type: DataTypes.INTEGER, type: DataTypes.TEXT,
allowNull: true,
defaultValue: 1920,
}, },
design_height: { custom_css_json: {
type: DataTypes.INTEGER, type: DataTypes.TEXT,
allowNull: true,
defaultValue: 1080,
}, },
// Note: transition_settings moved to project_transition_settings table cdn_base_url: {
// for environment-aware storage (dev, stage, production) type: DataTypes.TEXT,
},
entry_page_slug: {
type: DataTypes.TEXT,
},
is_deleted: {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: false,
},
deleted_at_time: {
type: DataTypes.DATE,
},
importHash: { importHash: {
type: DataTypes.STRING(255), type: DataTypes.STRING(255),
@ -78,114 +123,115 @@ module.exports = function (sequelize, DataTypes) {
timestamps: true, timestamps: true,
paranoid: true, paranoid: true,
freezeTableName: true, freezeTableName: true,
indexes: [{ fields: ['slug'], unique: true }, { fields: ['deletedAt'] }], indexes: [
{ fields: ['slug'], unique: true },
{ fields: ['phase'] },
{ fields: ['deletedAt'] },
],
}, },
); );
projects.associate = (db) => { projects.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
db.projects.hasMany(db.project_memberships, { db.projects.hasMany(db.project_memberships, {
as: 'project_memberships_project', as: 'project_memberships_project',
foreignKey: { foreignKey: {
name: 'projectId', name: 'projectId',
}, },
constraints: true, constraints: false,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
}); });
db.projects.hasMany(db.assets, { db.projects.hasMany(db.assets, {
as: 'assets_project', as: 'assets_project',
foreignKey: { foreignKey: {
name: 'projectId', name: 'projectId',
}, },
constraints: true, constraints: false,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
}); });
db.projects.hasMany(db.presigned_url_requests, { db.projects.hasMany(db.presigned_url_requests, {
as: 'presigned_url_requests_project', as: 'presigned_url_requests_project',
foreignKey: { foreignKey: {
name: 'projectId', name: 'projectId',
}, },
constraints: true, constraints: false,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
}); });
db.projects.hasMany(db.tour_pages, { db.projects.hasMany(db.tour_pages, {
as: 'tour_pages_project', as: 'tour_pages_project',
foreignKey: { foreignKey: {
name: 'projectId', name: 'projectId',
}, },
constraints: true, constraints: false,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
}); });
db.projects.hasMany(db.transitions, {
as: 'transitions_project',
foreignKey: {
name: 'projectId',
},
constraints: false,
});
db.projects.hasMany(db.project_audio_tracks, { db.projects.hasMany(db.project_audio_tracks, {
as: 'project_audio_tracks_project', as: 'project_audio_tracks_project',
foreignKey: { foreignKey: {
name: 'projectId', name: 'projectId',
}, },
constraints: true, constraints: false,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
}); });
db.projects.hasMany(db.publish_events, { db.projects.hasMany(db.publish_events, {
as: 'publish_events_project', as: 'publish_events_project',
foreignKey: { foreignKey: {
name: 'projectId', name: 'projectId',
}, },
constraints: true, constraints: false,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
}); });
db.projects.hasMany(db.pwa_caches, { db.projects.hasMany(db.pwa_caches, {
as: 'pwa_caches_project', as: 'pwa_caches_project',
foreignKey: { foreignKey: {
name: 'projectId', name: 'projectId',
}, },
constraints: true, constraints: false,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
}); });
db.projects.hasMany(db.access_logs, { db.projects.hasMany(db.access_logs, {
as: 'access_logs_project', as: 'access_logs_project',
foreignKey: { foreignKey: {
name: 'projectId', name: 'projectId',
}, },
constraints: true, constraints: false,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
}); });
db.projects.hasMany(db.project_element_defaults, {
as: 'project_element_defaults_project',
foreignKey: {
name: 'projectId',
},
constraints: true,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
});
db.projects.hasMany(db.project_transition_settings, {
as: 'project_transition_settings_project',
foreignKey: {
name: 'projectId',
},
constraints: true,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
});
//end loop //end loop
db.projects.belongsTo(db.users, { db.projects.belongsTo(db.users, {
as: 'createdBy', as: 'createdBy',
@ -196,5 +242,8 @@ module.exports = function (sequelize, DataTypes) {
}); });
}; };
return projects; return projects;
}; };

View File

@ -1,4 +1,4 @@
module.exports = function (sequelize, DataTypes) { module.exports = function(sequelize, DataTypes) {
const publish_events = sequelize.define( const publish_events = sequelize.define(
'publish_events', 'publish_events',
{ {
@ -8,87 +8,116 @@ module.exports = function (sequelize, DataTypes) {
primaryKey: true, primaryKey: true,
}, },
title: { title: {
type: DataTypes.STRING, type: DataTypes.STRING,
allowNull: true, allowNull: true,
validate: {
len: { args: [0, 255], msg: 'Title must be at most 255 characters' },
},
}, },
description: { description: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
allowNull: true, allowNull: true,
validate: {
len: {
args: [0, 5000],
msg: 'Description must be at most 5000 characters',
},
},
}, },
from_environment: { from_environment: {
type: DataTypes.ENUM, type: DataTypes.ENUM,
allowNull: false, allowNull: false,
values: ['dev', 'stage', 'production'], values: [
"dev",
"stage",
"production"
],
}, },
to_environment: { to_environment: {
type: DataTypes.ENUM, type: DataTypes.ENUM,
allowNull: false, allowNull: false,
values: ['dev', 'stage', 'production'], values: [
"dev",
"stage",
"production"
],
}, },
started_at: { started_at: {
type: DataTypes.DATE, type: DataTypes.DATE,
}, },
finished_at: { finished_at: {
type: DataTypes.DATE, type: DataTypes.DATE,
}, },
status: { status: {
type: DataTypes.ENUM, type: DataTypes.ENUM,
allowNull: false, allowNull: false,
defaultValue: 'queued', defaultValue: 'queued',
values: ['queued', 'running', 'success', 'failed'], values: [
"queued",
"running",
"success",
"failed"
],
}, },
error_message: { error_message: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
}, },
pages_copied: { pages_copied: {
type: DataTypes.INTEGER, type: DataTypes.INTEGER,
validate: {
min: {
args: [0],
msg: 'Pages copied must be a non-negative integer',
},
},
}, },
transitions_copied: { transitions_copied: {
type: DataTypes.INTEGER, type: DataTypes.INTEGER,
validate: {
min: {
args: [0],
msg: 'Transitions copied must be a non-negative integer',
},
},
}, },
audios_copied: { audios_copied: {
type: DataTypes.INTEGER, type: DataTypes.INTEGER,
validate: {
min: {
args: [0],
msg: 'Audios copied must be a non-negative integer',
},
},
}, },
importHash: { importHash: {
@ -111,18 +140,38 @@ module.exports = function (sequelize, DataTypes) {
); );
publish_events.associate = (db) => { publish_events.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.publish_events.belongsTo(db.projects, { db.publish_events.belongsTo(db.projects, {
as: 'project', as: 'project',
foreignKey: { foreignKey: {
name: 'projectId', name: 'projectId',
}, },
constraints: true, constraints: false,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
}); });
db.publish_events.belongsTo(db.users, { db.publish_events.belongsTo(db.users, {
@ -130,11 +179,12 @@ module.exports = function (sequelize, DataTypes) {
foreignKey: { foreignKey: {
name: 'userId', name: 'userId',
}, },
constraints: true, constraints: false,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
}); });
db.publish_events.belongsTo(db.users, { db.publish_events.belongsTo(db.users, {
as: 'createdBy', as: 'createdBy',
}); });
@ -144,5 +194,7 @@ module.exports = function (sequelize, DataTypes) {
}); });
}; };
return publish_events; return publish_events;
}; };

View File

@ -1,4 +1,4 @@
module.exports = function (sequelize, DataTypes) { module.exports = function(sequelize, DataTypes) {
const pwa_caches = sequelize.define( const pwa_caches = sequelize.define(
'pwa_caches', 'pwa_caches',
{ {
@ -8,39 +8,58 @@ module.exports = function (sequelize, DataTypes) {
primaryKey: true, primaryKey: true,
}, },
environment: { environment: {
type: DataTypes.ENUM, type: DataTypes.ENUM,
values: [
"stage",
"production"
],
values: ['dev', 'stage', 'production'],
}, },
cache_version: { cache_version: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
validate: {
len: {
args: [0, 255],
msg: 'Cache version must be at most 255 characters',
},
},
}, },
manifest_json: { manifest_json: {
type: DataTypes.JSON, type: DataTypes.TEXT,
}, },
asset_list_json: { asset_list_json: {
type: DataTypes.JSON, type: DataTypes.TEXT,
}, },
generated_at: { generated_at: {
type: DataTypes.DATE, type: DataTypes.DATE,
}, },
is_active: { is_active: {
type: DataTypes.BOOLEAN, type: DataTypes.BOOLEAN,
allowNull: false, allowNull: false,
defaultValue: false, defaultValue: false,
}, },
importHash: { importHash: {
@ -57,20 +76,43 @@ module.exports = function (sequelize, DataTypes) {
); );
pwa_caches.associate = (db) => { pwa_caches.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.pwa_caches.belongsTo(db.projects, { db.pwa_caches.belongsTo(db.projects, {
as: 'project', as: 'project',
foreignKey: { foreignKey: {
name: 'projectId', name: 'projectId',
}, },
constraints: true, constraints: false,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
}); });
db.pwa_caches.belongsTo(db.users, { db.pwa_caches.belongsTo(db.users, {
as: 'createdBy', as: 'createdBy',
}); });
@ -80,5 +122,9 @@ module.exports = function (sequelize, DataTypes) {
}); });
}; };
return pwa_caches; return pwa_caches;
}; };

View File

@ -1,4 +1,4 @@
module.exports = function (sequelize, DataTypes) { module.exports = function(sequelize, DataTypes) {
const roles = sequelize.define( const roles = sequelize.define(
'roles', 'roles',
{ {
@ -8,20 +8,18 @@ module.exports = function (sequelize, DataTypes) {
primaryKey: true, primaryKey: true,
}, },
name: { name: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
allowNull: false,
validate: {
notEmpty: { msg: 'Role name is required' },
len: {
args: [1, 100],
msg: 'Role name must be between 1 and 100 characters',
},
},
}, },
role_customization: { role_customization: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
}, },
importHash: { importHash: {
@ -38,13 +36,13 @@ module.exports = function (sequelize, DataTypes) {
); );
roles.associate = (db) => { roles.associate = (db) => {
db.roles.belongsToMany(db.permissions, { db.roles.belongsToMany(db.permissions, {
as: 'permissions', as: 'permissions',
foreignKey: { foreignKey: {
name: 'roles_permissionsId', name: 'roles_permissionsId',
}, },
constraints: true, constraints: false,
onDelete: 'CASCADE',
through: 'rolesPermissionsPermissions', through: 'rolesPermissionsPermissions',
}); });
@ -53,24 +51,45 @@ module.exports = function (sequelize, DataTypes) {
foreignKey: { foreignKey: {
name: 'roles_permissionsId', name: 'roles_permissionsId',
}, },
constraints: true, constraints: false,
onDelete: 'CASCADE',
through: 'rolesPermissionsPermissions', through: 'rolesPermissionsPermissions',
}); });
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
db.roles.hasMany(db.users, { db.roles.hasMany(db.users, {
as: 'users_app_role', as: 'users_app_role',
foreignKey: { foreignKey: {
name: 'app_roleId', name: 'app_roleId',
}, },
constraints: true, constraints: false,
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
}); });
//end loop
//end loop
db.roles.belongsTo(db.users, { db.roles.belongsTo(db.users, {
as: 'createdBy', as: 'createdBy',
@ -81,5 +100,9 @@ module.exports = function (sequelize, DataTypes) {
}); });
}; };
return roles; return roles;
}; };

View File

@ -1,4 +1,4 @@
module.exports = function (sequelize, DataTypes) { module.exports = function(sequelize, DataTypes) {
const tour_pages = sequelize.define( const tour_pages = sequelize.define(
'tour_pages', 'tour_pages',
{ {
@ -8,122 +8,97 @@ module.exports = function (sequelize, DataTypes) {
primaryKey: true, primaryKey: true,
}, },
environment: { environment: {
type: DataTypes.ENUM, type: DataTypes.ENUM,
allowNull: false, allowNull: false,
defaultValue: 'dev', defaultValue: 'dev',
values: ['dev', 'stage', 'production'], values: [
"dev",
"stage",
"production"
],
}, },
source_key: { source_key: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
}, },
name: { name: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
allowNull: false, allowNull: false,
validate: {
notEmpty: { msg: 'Page name is required' },
len: {
args: [1, 255],
msg: 'Page name must be between 1 and 255 characters',
},
},
}, },
slug: { slug: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
allowNull: false, allowNull: false,
validate: {
notEmpty: { msg: 'Slug is required' },
is: {
args: /^[a-z0-9_-]+$/i,
msg: 'Slug can only contain letters, numbers, dashes, and underscores',
},
len: {
args: [1, 255],
msg: 'Slug must be between 1 and 255 characters',
},
},
}, },
sort_order: { sort_order: {
type: DataTypes.INTEGER, type: DataTypes.INTEGER,
allowNull: false, allowNull: false,
defaultValue: 0, defaultValue: 0,
}, },
background_image_url: { background_image_url: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
}, },
background_video_url: { background_video_url: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
}, },
background_audio_url: { background_audio_url: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
}, },
background_loop: { background_loop: {
type: DataTypes.BOOLEAN, type: DataTypes.BOOLEAN,
allowNull: false, allowNull: false,
defaultValue: false, defaultValue: false,
}, },
background_video_autoplay: { requires_auth: {
type: DataTypes.BOOLEAN, type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: true,
},
background_video_loop: {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: true,
},
background_video_muted: {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: true,
},
background_video_start_time: {
type: DataTypes.DECIMAL(10, 1),
allowNull: true,
defaultValue: null,
},
background_video_end_time: {
type: DataTypes.DECIMAL(10, 1),
allowNull: true,
defaultValue: null,
},
design_width: {
type: DataTypes.INTEGER,
allowNull: true,
defaultValue: null,
},
design_height: {
type: DataTypes.INTEGER,
allowNull: true,
defaultValue: null,
},
requires_auth: {
type: DataTypes.BOOLEAN,
allowNull: false, allowNull: false,
defaultValue: false, defaultValue: false,
}, },
ui_schema_json: { ui_schema_json: {
type: DataTypes.JSON, type: DataTypes.TEXT,
}, },
importHash: { importHash: {
@ -146,20 +121,67 @@ module.exports = function (sequelize, DataTypes) {
); );
tour_pages.associate = (db) => { tour_pages.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
db.tour_pages.hasMany(db.page_elements, {
as: 'page_elements_page',
foreignKey: {
name: 'pageId',
},
constraints: false,
});
db.tour_pages.hasMany(db.page_links, {
as: 'page_links_from_page',
foreignKey: {
name: 'from_pageId',
},
constraints: false,
});
db.tour_pages.hasMany(db.page_links, {
as: 'page_links_to_page',
foreignKey: {
name: 'to_pageId',
},
constraints: false,
});
//end loop
db.tour_pages.belongsTo(db.projects, { db.tour_pages.belongsTo(db.projects, {
as: 'project', as: 'project',
foreignKey: { foreignKey: {
name: 'projectId', name: 'projectId',
}, },
constraints: true, constraints: false,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
}); });
db.tour_pages.belongsTo(db.users, { db.tour_pages.belongsTo(db.users, {
as: 'createdBy', as: 'createdBy',
}); });
@ -169,5 +191,8 @@ module.exports = function (sequelize, DataTypes) {
}); });
}; };
return tour_pages; return tour_pages;
}; };

View File

@ -0,0 +1,157 @@
module.exports = function(sequelize, DataTypes) {
const transitions = sequelize.define(
'transitions',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
environment: {
type: DataTypes.ENUM,
allowNull: false,
defaultValue: 'dev',
values: [
"dev",
"stage",
"production"
],
},
source_key: {
type: DataTypes.TEXT,
},
name: {
type: DataTypes.TEXT,
allowNull: false,
},
slug: {
type: DataTypes.TEXT,
allowNull: false,
},
video_url: {
type: DataTypes.TEXT,
},
audio_url: {
type: DataTypes.TEXT,
},
supports_reverse: {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: false,
},
duration_sec: {
type: DataTypes.DECIMAL,
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
indexes: [
{ fields: ['projectId'] },
{ fields: ['projectId', 'environment', 'slug'], unique: true },
{ fields: ['deletedAt'] },
],
},
);
transitions.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
db.transitions.hasMany(db.page_links, {
as: 'page_links_transition',
foreignKey: {
name: 'transitionId',
},
constraints: false,
});
//end loop
db.transitions.belongsTo(db.projects, {
as: 'project',
foreignKey: {
name: 'projectId',
},
constraints: false,
});
db.transitions.belongsTo(db.users, {
as: 'createdBy',
});
db.transitions.belongsTo(db.users, {
as: 'updatedBy',
});
};
return transitions;
};

View File

@ -0,0 +1,50 @@
module.exports = function (sequelize, DataTypes) {
const ui_elements = sequelize.define(
'ui_elements',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
element_type: {
type: DataTypes.TEXT,
allowNull: false,
},
name: {
type: DataTypes.TEXT,
},
settings_json: {
type: DataTypes.TEXT,
},
sort_order: {
type: DataTypes.INTEGER,
allowNull: false,
defaultValue: 0,
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
indexes: [{ fields: ['element_type'] }, { fields: ['sort_order'] }, { fields: ['deletedAt'] }],
},
);
ui_elements.associate = (db) => {
db.ui_elements.belongsTo(db.users, {
as: 'createdBy',
});
db.ui_elements.belongsTo(db.users, {
as: 'updatedBy',
});
};
return ui_elements;
};

View File

@ -3,7 +3,7 @@ const providers = config.providers;
const crypto = require('crypto'); const crypto = require('crypto');
const bcrypt = require('bcrypt'); const bcrypt = require('bcrypt');
module.exports = function (sequelize, DataTypes) { module.exports = function(sequelize, DataTypes) {
const users = sequelize.define( const users = sequelize.define(
'users', 'users',
{ {
@ -13,67 +13,93 @@ module.exports = function (sequelize, DataTypes) {
primaryKey: true, primaryKey: true,
}, },
firstName: { firstName: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
}, },
lastName: { lastName: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
}, },
phoneNumber: { phoneNumber: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
}, },
email: { email: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
allowNull: false, allowNull: false,
unique: true, unique: true,
validate: {
isEmail: { msg: 'Must be a valid email address' },
notEmpty: { msg: 'Email is required' },
},
}, },
disabled: { disabled: {
type: DataTypes.BOOLEAN, type: DataTypes.BOOLEAN,
allowNull: false, allowNull: false,
defaultValue: false, defaultValue: false,
}, },
password: { password: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
allowNull: false, allowNull: false,
}, },
emailVerified: { emailVerified: {
type: DataTypes.BOOLEAN, type: DataTypes.BOOLEAN,
allowNull: false, allowNull: false,
defaultValue: false, defaultValue: false,
}, },
emailVerificationToken: { emailVerificationToken: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
}, },
emailVerificationTokenExpiresAt: { emailVerificationTokenExpiresAt: {
type: DataTypes.DATE, type: DataTypes.DATE,
}, },
passwordResetToken: { passwordResetToken: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
}, },
passwordResetTokenExpiresAt: { passwordResetTokenExpiresAt: {
type: DataTypes.DATE, type: DataTypes.DATE,
}, },
provider: { provider: {
type: DataTypes.TEXT, type: DataTypes.TEXT,
allowNull: false, allowNull: false,
defaultValue: providers.LOCAL, defaultValue: providers.LOCAL,
}, },
importHash: { importHash: {
@ -95,13 +121,13 @@ module.exports = function (sequelize, DataTypes) {
); );
users.associate = (db) => { users.associate = (db) => {
db.users.belongsToMany(db.permissions, { db.users.belongsToMany(db.permissions, {
as: 'custom_permissions', as: 'custom_permissions',
foreignKey: { foreignKey: {
name: 'users_custom_permissionsId', name: 'users_custom_permissionsId',
}, },
constraints: true, constraints: false,
onDelete: 'CASCADE',
through: 'usersCustom_permissionsPermissions', through: 'usersCustom_permissionsPermissions',
}); });
@ -110,77 +136,88 @@ module.exports = function (sequelize, DataTypes) {
foreignKey: { foreignKey: {
name: 'users_custom_permissionsId', name: 'users_custom_permissionsId',
}, },
constraints: true, constraints: false,
onDelete: 'CASCADE',
through: 'usersCustom_permissionsPermissions', through: 'usersCustom_permissionsPermissions',
}); });
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
db.users.hasMany(db.project_memberships, { db.users.hasMany(db.project_memberships, {
as: 'project_memberships_user', as: 'project_memberships_user',
foreignKey: { foreignKey: {
name: 'userId', name: 'userId',
}, },
constraints: true, constraints: false,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
}); });
db.users.hasMany(db.presigned_url_requests, { db.users.hasMany(db.presigned_url_requests, {
as: 'presigned_url_requests_user', as: 'presigned_url_requests_user',
foreignKey: { foreignKey: {
name: 'userId', name: 'userId',
}, },
constraints: true, constraints: false,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
}); });
db.users.hasMany(db.publish_events, { db.users.hasMany(db.publish_events, {
as: 'publish_events_user', as: 'publish_events_user',
foreignKey: { foreignKey: {
name: 'userId', name: 'userId',
}, },
constraints: true, constraints: false,
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
}); });
db.users.hasMany(db.access_logs, { db.users.hasMany(db.access_logs, {
as: 'access_logs_user', as: 'access_logs_user',
foreignKey: { foreignKey: {
name: 'userId', name: 'userId',
}, },
constraints: true, constraints: false,
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
}); });
//end loop
//end loop
db.users.belongsTo(db.roles, { db.users.belongsTo(db.roles, {
as: 'app_role', as: 'app_role',
foreignKey: { foreignKey: {
name: 'app_roleId', name: 'app_roleId',
}, },
constraints: true, constraints: false,
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
}); });
db.users.hasMany(db.file, { db.users.hasMany(db.file, {
as: 'avatar', as: 'avatar',
foreignKey: 'belongsToId', foreignKey: 'belongsToId',
constraints: true, constraints: false,
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
scope: { scope: {
belongsTo: db.users.getTableName(), belongsTo: db.users.getTableName(),
belongsToColumn: 'avatar', belongsToColumn: 'avatar',
}, },
}); });
db.users.belongsTo(db.users, { db.users.belongsTo(db.users, {
as: 'createdBy', as: 'createdBy',
}); });
@ -190,41 +227,47 @@ module.exports = function (sequelize, DataTypes) {
}); });
}; };
users.beforeCreate((users) => {
users = trimStringFields(users);
if ( users.beforeCreate((users) => {
users.provider !== providers.LOCAL && users = trimStringFields(users);
Object.values(providers).indexOf(users.provider) > -1
) {
users.emailVerified = true;
if (!users.password) { if (users.provider !== providers.LOCAL && Object.values(providers).indexOf(users.provider) > -1) {
const password = crypto.randomBytes(20).toString('hex'); users.emailVerified = true;
const hashedPassword = bcrypt.hashSync( if (!users.password) {
password, const password = crypto
config.bcrypt.saltRounds, .randomBytes(20)
.toString('hex');
const hashedPassword = bcrypt.hashSync(
password,
config.bcrypt.saltRounds,
); );
users.password = hashedPassword; users.password = hashedPassword
} }
} }
}); });
users.beforeUpdate((users) => { users.beforeUpdate((users) => {
trimStringFields(users); trimStringFields(users);
}); });
return users; return users;
}; };
function trimStringFields(users) { function trimStringFields(users) {
users.email = users.email.trim(); users.email = users.email.trim();
users.firstName = users.firstName ? users.firstName.trim() : null; users.firstName = users.firstName
? users.firstName.trim()
: null;
users.lastName = users.lastName ? users.lastName.trim() : null; users.lastName = users.lastName
? users.lastName.trim()
: null;
return users; return users;
} }

Some files were not shown because too many files have changed in this diff Show More