Compare commits
171 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b4a42c7e3e | ||
|
|
a25a67daba | ||
|
|
fd023934ad | ||
|
|
56c8041ad2 | ||
|
|
4d30442a5b | ||
|
|
027af5082b | ||
|
|
06a29dbf6a | ||
|
|
ba813d2602 | ||
|
|
f06a2b2c97 | ||
|
|
4634ad9207 | ||
|
|
2073bee244 | ||
|
|
6581fd70c2 | ||
|
|
e4dd94f478 | ||
|
|
e855db03d1 | ||
|
|
5ef21543b3 | ||
|
|
f5f9d3d6fc | ||
|
|
166dafc217 | ||
|
|
0d6a8b17cf | ||
|
|
e1f032b123 | ||
|
|
2bc7b64707 | ||
|
|
99f126396f | ||
|
|
917223ad43 | ||
|
|
6c904a9bb2 | ||
|
|
988139c063 | ||
|
|
8244a33c69 | ||
|
|
d890fbd82c | ||
|
|
56a319c125 | ||
|
|
71f8b060a8 | ||
|
|
7e54cc8858 | ||
|
|
fe82dbb318 | ||
|
|
b66bced6ab | ||
|
|
6fa18bb8c3 | ||
|
|
525bf966be | ||
|
|
8e315e5997 | ||
|
|
9f63911c78 | ||
|
|
7ea063390d | ||
|
|
b40dfe54b5 | ||
|
|
49ca57c63e | ||
|
|
46fd6a5317 | ||
|
|
c47adfa7fa | ||
|
|
ff36f318fc | ||
|
|
556a0c0700 | ||
|
|
a6de052952 | ||
|
|
6fecb68941 | ||
|
|
4d189346d9 | ||
|
|
1e2b72d2bd | ||
|
|
ccb0122152 | ||
|
|
4b7fed5914 | ||
|
|
4a61fd1a69 | ||
|
|
fc624c0700 | ||
|
|
0da1c02b60 | ||
|
|
6eb27fefd1 | ||
|
|
28b6f8fe71 | ||
|
|
804e082ed7 | ||
|
|
0987c87c1d | ||
|
|
5035788ffc | ||
|
|
87250b8bfe | ||
|
|
85d628fc54 | ||
|
|
72f25886ec | ||
|
|
3c8cf08051 | ||
|
|
4bb5da9ad6 | ||
|
|
b98b5106d9 | ||
|
|
540b7b6aa7 | ||
|
|
f445066706 | ||
|
|
d55453a42d | ||
|
|
ad9c788b21 | ||
|
|
0a36a87cd4 | ||
|
|
62b9b3ceb9 | ||
|
|
8287ed0aab | ||
|
|
ef137199ec | ||
|
|
f8c3bb4a07 | ||
|
|
42cc3456eb | ||
|
|
d98ab24f6e | ||
|
|
848f84efa8 | ||
|
|
4b573c1433 | ||
|
|
7d251319f2 | ||
|
|
8f1d3699a1 | ||
|
|
f2315e91cb | ||
|
|
4bb9a8bfd3 | ||
|
|
479d7d3cdd | ||
|
|
f6d0aeafd7 | ||
|
|
aac20d29a3 | ||
|
|
4bf9339a7f | ||
|
|
cef7c80d8f | ||
|
|
eb712e86f2 | ||
|
|
b5f8f30360 | ||
|
|
fd57a3bf10 | ||
|
|
fec8864e07 | ||
|
|
037d268e44 | ||
|
|
ce5a472b61 | ||
|
|
ce68bad8ba | ||
|
|
61e707f2ba | ||
|
|
c235909c54 | ||
|
|
ca3b3725f9 | ||
|
|
4e431eab9b | ||
|
|
8ef30576b1 | ||
|
|
73f524dab3 | ||
|
|
d2067f1770 | ||
|
|
d1de154b35 | ||
|
|
a454109b79 | ||
|
|
b2fd213fa3 | ||
|
|
b92132db09 | ||
|
|
7e532f8752 | ||
|
|
a9a2866b23 | ||
|
|
91c24165bf | ||
|
|
34770304c5 | ||
|
|
fcc3d9e868 | ||
|
|
a31af13c84 | ||
|
|
e6b4fe69c7 | ||
|
|
e92ab8143b | ||
|
|
b6cf7a702a | ||
|
|
3d06d927cf | ||
|
|
024c04e05a | ||
|
|
7b21006086 | ||
|
|
b66cf94fb4 | ||
|
|
25e6a1f5d2 | ||
|
|
eac21c84b3 | ||
|
|
b925094555 | ||
|
|
41713d1274 | ||
|
|
b8f2274572 | ||
|
|
baef1fca2f | ||
|
|
b9ca9bbc10 | ||
|
|
961241ecc7 | ||
|
|
fa41bd6ee1 | ||
|
|
54aec6d861 | ||
|
|
42303afe69 | ||
|
|
f72d7433fa | ||
|
|
96e771bbfb | ||
|
|
c25e7cdcc2 | ||
|
|
7f3b1795af | ||
|
|
85b4cdc807 | ||
|
|
22fe0cd50f | ||
|
|
fba1857e1b | ||
|
|
82031ac205 | ||
|
|
985bfd611f | ||
|
|
bedb334b3e | ||
|
|
4166e37a93 | ||
|
|
9159f467f5 | ||
|
|
f12ec7c8bb | ||
|
|
0728923dd1 | ||
|
|
4c41205225 | ||
|
|
e8f72cb390 | ||
|
|
0d1676f942 | ||
|
|
c18220d6ba | ||
|
|
df0594f59f | ||
|
|
d3644125db | ||
|
|
f839e6c562 | ||
|
|
b2f641a398 | ||
|
|
991ac75f32 | ||
|
|
07dccfbc37 | ||
|
|
5e2bccdca0 | ||
|
|
5d79b82de2 | ||
|
|
990839e9ca | ||
|
|
ce847e87d6 | ||
|
|
afabb0cce1 | ||
|
|
d21a76e602 | ||
|
|
013560f0c1 | ||
|
|
08ac54f0b5 | ||
|
|
d4821b6a5d | ||
|
|
ffb3a3819c | ||
|
|
e890ccf2ed | ||
|
|
70e3c28f6a | ||
|
|
5ec464e6ce | ||
|
|
1b5c13c8ae | ||
|
|
b4fe0dde81 | ||
|
|
06dd524cd0 | ||
|
|
2680417aae | ||
|
|
42684051c3 | ||
|
|
8a20fdbd9e | ||
|
|
a8942e7c5d | ||
|
|
d3b659f3bc |
5
.gitignore
vendored
5
.gitignore
vendored
@ -3,4 +3,7 @@
|
||||
node_modules/
|
||||
*/node_modules/
|
||||
**/node_modules/
|
||||
*/build/
|
||||
*/build/
|
||||
package-lock.json
|
||||
CLAUDE.md
|
||||
.claude/
|
||||
|
||||
@ -9,6 +9,7 @@ RUN yarn build
|
||||
|
||||
|
||||
FROM node:20.15.1-alpine
|
||||
# FFmpeg is bundled via npm package ffmpeg-static
|
||||
WORKDIR /app
|
||||
COPY backend/package.json backend/yarn.lock ./
|
||||
RUN yarn install --pure-lockfile
|
||||
|
||||
@ -11,16 +11,11 @@ WORKDIR /app/backend
|
||||
COPY backend/package.json backend/yarn.lock ./
|
||||
RUN yarn install --pure-lockfile
|
||||
|
||||
FROM node:20.15.1-alpine AS app-shell-deps
|
||||
RUN apk add --no-cache git
|
||||
WORKDIR /app/app-shell
|
||||
COPY app-shell/package.json app-shell/yarn.lock ./
|
||||
RUN yarn install --pure-lockfile
|
||||
|
||||
# Nginx setup and application build
|
||||
FROM node:20.15.1-alpine AS build
|
||||
RUN apk add --no-cache git nginx curl
|
||||
RUN apk add --no-cache lsof procps
|
||||
# FFmpeg is bundled via npm package ffmpeg-static
|
||||
RUN yarn global add concurrently
|
||||
|
||||
RUN apk add --no-cache \
|
||||
@ -43,11 +38,9 @@ ENV PATH /root/.yarn/bin:/root/.config/yarn/global/node_modules/.bin:$PATH
|
||||
WORKDIR /app
|
||||
COPY --from=frontend-deps /app/frontend /app/frontend
|
||||
COPY --from=backend-deps /app/backend /app/backend
|
||||
COPY --from=app-shell-deps /app/app-shell /app/app-shell
|
||||
|
||||
COPY frontend /app/frontend
|
||||
COPY backend /app/backend
|
||||
COPY app-shell /app/app-shell
|
||||
COPY docker /app/docker
|
||||
|
||||
# Copy all files from root to /app
|
||||
@ -68,8 +61,6 @@ EXPOSE 8080
|
||||
ENV NODE_ENV=dev_stage
|
||||
ENV FRONT_PORT=3001
|
||||
ENV BACKEND_PORT=3000
|
||||
ENV APP_SHELL_PORT=4000
|
||||
|
||||
|
||||
CMD ["sh", "-c", "\
|
||||
yarn --cwd /app/frontend dev & echo $! > /app/pids/frontend.pid && \
|
||||
@ -80,6 +71,5 @@ CMD ["sh", "-c", "\
|
||||
while ! nc -z localhost ${BACKEND_PORT}; do \
|
||||
sleep 2; \
|
||||
done && \
|
||||
echo 'Backend is up. Starting app_shell for Git check...' && \
|
||||
yarn --cwd /app/app-shell start && \
|
||||
wait $NGINX_PID"]
|
||||
echo 'Backend and frontend are up.' && \
|
||||
wait $NGINX_PID"]
|
||||
|
||||
481
README.md
481
README.md
@ -1,244 +1,281 @@
|
||||
|
||||
|
||||
# Tour Builder Platform
|
||||
|
||||
A web application for building and managing interactive virtual tours with drag-and-drop editing, video transitions, and PWA offline support.
|
||||
|
||||
## Features
|
||||
|
||||
- **Visual Tour Builder** - Drag-and-drop editor for creating interactive tour pages
|
||||
- **Video Transitions** - Smooth video-based transitions between pages with forward/reverse playback
|
||||
- **Multiple Element Types** - Navigation buttons, hotspots, galleries, tooltips, video/audio players
|
||||
- **Three-Tier Publishing** - Dev → Stage → Production workflow with environment isolation
|
||||
- **Asset Preloading** - Direct S3 download via presigned URLs for instant page navigation
|
||||
- **PWA Offline Mode** - Tours work offline with Cache API and IndexedDB storage
|
||||
- **Role-Based Access Control** - Granular permissions system
|
||||
- **Team Collaboration** - Project memberships with role-based access
|
||||
- **Asset Management** - Upload, optimize, and manage media assets with variants
|
||||
- **Multi-Language Support** - i18n ready
|
||||
|
||||
## Tech Stack
|
||||
|
||||
| Layer | Technology |
|
||||
|-------|------------|
|
||||
| Frontend | Next.js 15, React 19, TypeScript, Redux Toolkit, Tailwind CSS |
|
||||
| Backend | Node.js, Express, Sequelize ORM |
|
||||
| Database | PostgreSQL |
|
||||
| Authentication | JWT, Google OAuth, Microsoft OAuth |
|
||||
| File Storage | AWS S3 / Google Cloud Storage (direct presigned URL access) |
|
||||
| PWA | Serwist Service Worker, Cache API, IndexedDB (Dexie) |
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Node.js 18+
|
||||
- PostgreSQL 14+
|
||||
- Yarn (backend) / npm (frontend)
|
||||
|
||||
### Database Setup (First Time)
|
||||
|
||||
```bash
|
||||
# Create database user and database
|
||||
PGPASSWORD='postgres' psql -U postgres -c "CREATE USER app_39215 WITH PASSWORD 'your-password';"
|
||||
PGPASSWORD='postgres' psql -U postgres -c "CREATE DATABASE app_39215 OWNER app_39215;"
|
||||
```
|
||||
|
||||
### Start Backend (Terminal 1)
|
||||
|
||||
```bash
|
||||
cd backend
|
||||
yarn install
|
||||
export $(cat .env | xargs) && NODE_ENV=production yarn start
|
||||
```
|
||||
|
||||
Backend runs on **http://localhost:8080**
|
||||
|
||||
### Start Frontend (Terminal 2)
|
||||
|
||||
```bash
|
||||
cd frontend
|
||||
npm install
|
||||
npm run dev
|
||||
```
|
||||
|
||||
Frontend runs on **http://localhost:3000**
|
||||
|
||||
### Default Login
|
||||
|
||||
After seeding, login with credentials configured in `backend/.env`:
|
||||
- Email: `ADMIN_EMAIL` (default: admin@flatlogic.com)
|
||||
- Password: `ADMIN_PASS` (default: 88dbeaf8)
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
├── backend/ # Node.js/Express API server
|
||||
│ ├── src/
|
||||
│ │ ├── routes/ # REST API endpoints
|
||||
│ │ ├── services/ # Business logic
|
||||
│ │ ├── db/
|
||||
│ │ │ ├── models/ # Sequelize models
|
||||
│ │ │ ├── api/ # Database access layer
|
||||
│ │ │ ├── migrations/ # Schema migrations
|
||||
│ │ │ └── seeders/ # Seed data
|
||||
│ │ ├── auth/ # Passport.js authentication
|
||||
│ │ └── middlewares/ # Express middlewares
|
||||
│ └── README.md # Backend documentation
|
||||
│
|
||||
├── frontend/ # Next.js React application
|
||||
│ ├── src/
|
||||
│ │ ├── pages/ # Next.js pages
|
||||
│ │ ├── components/ # React components
|
||||
│ │ ├── stores/ # Redux Toolkit slices
|
||||
│ │ ├── hooks/ # Custom React hooks
|
||||
│ │ ├── types/ # TypeScript definitions
|
||||
│ │ └── lib/ # Utility libraries
|
||||
│ └── README.md # Frontend documentation
|
||||
│
|
||||
└── docker/ # Docker Compose setup
|
||||
├── docker-compose.yml
|
||||
├── start-backend.sh
|
||||
├── wait-for-it.sh
|
||||
└── README.md # Docker documentation
|
||||
```
|
||||
|
||||
## Key Workflows
|
||||
|
||||
### Tour Creation
|
||||
|
||||
1. Create a new project in the dashboard
|
||||
2. Open the **Constructor** (`/constructor?projectId=...`)
|
||||
3. Add pages with background images/videos
|
||||
4. Place interactive elements (buttons, hotspots, etc.)
|
||||
5. Configure navigation targets and transitions on elements
|
||||
6. Preview in **Runtime** mode
|
||||
7. Publish: Dev → Stage → Production
|
||||
|
||||
### Publishing Flow
|
||||
|
||||
Three-tier environment model with separate content per environment:
|
||||
|
||||
```
|
||||
Dev Environment Stage Environment Production Environment
|
||||
│ │ │
|
||||
/constructor?projectId= /p/[projectSlug]/stage /p/[projectSlug]
|
||||
(editing mode) (preview) (public access)
|
||||
│ │ │
|
||||
└── Save to Stage ──────►└── Publish ─────────────►│
|
||||
```
|
||||
|
||||
| Action | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| Save to Stage | `POST /api/publish/save-to-stage` | Copy dev pages to stage |
|
||||
| Publish | `POST /api/publish` | Copy stage pages to production |
|
||||
|
||||
Pages have an `environment` field (`dev`, `stage`, `production`) that determines visibility.
|
||||
|
||||
### Element Types
|
||||
|
||||
| Type | Description |
|
||||
|------|-------------|
|
||||
| `navigation_next` | Forward navigation button |
|
||||
| `navigation_prev` | Back navigation button |
|
||||
| `spot` | Clickable hotspot area |
|
||||
| `description` | Text description overlay |
|
||||
| `tooltip` | Hover tooltip |
|
||||
| `gallery` | Image gallery |
|
||||
| `carousel` | Image carousel |
|
||||
| `logo` | Logo element |
|
||||
| `video_player` | Embedded video player |
|
||||
| `audio_player` | Audio player |
|
||||
| `popup` | Modal popup |
|
||||
|
||||
**Element Defaults:** Each element type has configurable default settings that follow a three-tier hierarchy:
|
||||
- **Global** (`element_type_defaults`) - Platform-wide defaults (auto-seeded)
|
||||
- **Project** (`project_element_defaults`) - Per-project overrides (auto-snapshotted on project creation)
|
||||
- **Instance** (`tour_pages.ui_schema_json`) - Page-specific element values
|
||||
|
||||
## API Overview
|
||||
|
||||
Base URL: `http://localhost:8080/api`
|
||||
|
||||
| Endpoint | Description |
|
||||
|----------|-------------|
|
||||
| `POST /auth/signin/local` | Login |
|
||||
| `POST /auth/signup` | Register |
|
||||
| `GET /auth/me` | Current user |
|
||||
| `GET /projects` | List projects |
|
||||
| `POST /publish/save-to-stage` | Copy dev → stage |
|
||||
| `POST /publish` | Copy stage → production |
|
||||
| `GET /tour_pages` | List tour pages |
|
||||
| `GET /assets` | List assets |
|
||||
| `POST /file/presign` | Get S3 presigned URLs for asset download (public) |
|
||||
|
||||
Full API documentation: `http://localhost:8080/api-docs` (Swagger)
|
||||
|
||||
## Docker Setup
|
||||
|
||||
```bash
|
||||
cd docker
|
||||
chmod +x start-backend.sh wait-for-it.sh
|
||||
|
||||
# Start with fresh database
|
||||
rm -rf data && docker-compose up
|
||||
|
||||
# Or keep existing data
|
||||
docker-compose up
|
||||
```
|
||||
|
||||
Access at `http://localhost:3000`
|
||||
|
||||
## Environment Variables
|
||||
|
||||
### Backend (`backend/.env`)
|
||||
|
||||
```env
|
||||
# Database
|
||||
DB_HOST=localhost
|
||||
DB_PORT=5432
|
||||
DB_NAME=app_39215
|
||||
DB_USER=app_39215
|
||||
DB_PASSWORD=your-password
|
||||
|
||||
## This project was generated by [Flatlogic Platform](https://flatlogic.com).
|
||||
# JWT
|
||||
SECRET_KEY=your-secret-key
|
||||
|
||||
|
||||
- Frontend: [React.js](https://flatlogic.com/templates?framework%5B%5D=react&sort=default)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# Admin (for seeding)
|
||||
ADMIN_EMAIL=admin@example.com
|
||||
ADMIN_PASS=admin-password
|
||||
|
||||
# AWS S3 (optional)
|
||||
AWS_S3_BUCKET=your-bucket
|
||||
AWS_S3_REGION=us-east-1
|
||||
AWS_ACCESS_KEY_ID=your-key
|
||||
AWS_SECRET_ACCESS_KEY=your-secret
|
||||
|
||||
# OAuth (optional)
|
||||
GOOGLE_CLIENT_ID=...
|
||||
GOOGLE_CLIENT_SECRET=...
|
||||
MS_CLIENT_ID=...
|
||||
MS_CLIENT_SECRET=...
|
||||
|
||||
# Email - AWS SES (optional)
|
||||
EMAIL_USER=...
|
||||
EMAIL_PASS=...
|
||||
```
|
||||
|
||||
|
||||
- Backend: [NodeJS](https://flatlogic.com/templates?backend%5B%5D=nodejs&sort=default)
|
||||
|
||||
<details><summary>Backend Folder Structure</summary>
|
||||
|
||||
The generated application has the following backend folder structure:
|
||||
### Frontend (`frontend/.env.local`)
|
||||
|
||||
`src` folder which contains your working files that will be used later to create the build. The src folder contains folders as:
|
||||
```env
|
||||
NEXT_PUBLIC_BACK_API=http://localhost:8080/api
|
||||
```
|
||||
|
||||
- `auth` - config the library for authentication and authorization;
|
||||
## Common Commands
|
||||
|
||||
- `db` - contains such folders as:
|
||||
### Backend
|
||||
|
||||
- `api` - documentation that is automatically generated by jsdoc or other tools;
|
||||
```bash
|
||||
cd backend
|
||||
yarn start # Start server (migrate + seed + watch)
|
||||
yarn db:migrate # Run migrations
|
||||
yarn db:seed # Seed data
|
||||
yarn db:reset # Drop + create + migrate + seed
|
||||
yarn lint # ESLint
|
||||
```
|
||||
|
||||
- `migrations` - is a skeleton of the database or all the actions that users do with the database;
|
||||
### Frontend
|
||||
|
||||
- `models`- what will represent the database for the backend;
|
||||
```bash
|
||||
cd frontend
|
||||
npm run dev # Development server
|
||||
npm run build # Production build
|
||||
npm run lint # ESLint
|
||||
npm run format # Prettier
|
||||
```
|
||||
|
||||
- `seeders` - the entity that creates the data for the database.
|
||||
## Troubleshooting
|
||||
|
||||
- `routes` - this folder would contain all the routes that you have created using Express Router and what they do would be exported from a Controller file;
|
||||
### Connection Refused
|
||||
|
||||
- `services` - contains such folders as `emails` and `notifications`.
|
||||
</details>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
1. Ensure PostgreSQL is running
|
||||
2. Check that port 5432 (db), 8080 (backend), 3000 (frontend) are available
|
||||
3. Verify database credentials in `.env`
|
||||
|
||||
### Database Issues
|
||||
|
||||
|
||||
|
||||
|
||||
- Database: PostgreSQL
|
||||
|
||||
```bash
|
||||
# Reset database completely
|
||||
cd backend
|
||||
yarn db:reset
|
||||
```
|
||||
|
||||
- app-shel: Core application framework that provides essential infrastructure services
|
||||
for the entire application.
|
||||
-----------------------
|
||||
### We offer 2 ways how to start the project locally: by running Frontend and Backend or with Docker.
|
||||
-----------------------
|
||||
### Permission Denied
|
||||
|
||||
## To start the project:
|
||||
Ensure the database user has proper privileges:
|
||||
|
||||
### Backend:
|
||||
```sql
|
||||
GRANT ALL PRIVILEGES ON DATABASE app_39215 TO app_39215;
|
||||
```
|
||||
|
||||
> Please change current folder: `cd backend`
|
||||
## License
|
||||
|
||||
|
||||
|
||||
#### Install local dependencies:
|
||||
`yarn install`
|
||||
|
||||
------------
|
||||
|
||||
#### Adjust local db:
|
||||
##### 1. Install postgres:
|
||||
|
||||
MacOS:
|
||||
|
||||
`brew install postgres`
|
||||
|
||||
> if you don’t have ‘brew‘ please install it (https://brew.sh) and repeat step `brew install postgres`.
|
||||
|
||||
Ubuntu:
|
||||
|
||||
`sudo apt update`
|
||||
|
||||
`sudo apt install postgresql postgresql-contrib`
|
||||
|
||||
##### 2. Create db and admin user:
|
||||
Before run and test connection, make sure you have created a database as described in the above configuration. You can use the `psql` command to create a user and database.
|
||||
|
||||
`psql postgres --u postgres`
|
||||
|
||||
Next, type this command for creating a new user with password then give access for creating the database.
|
||||
|
||||
`postgres-# CREATE ROLE admin WITH LOGIN PASSWORD 'admin_pass';`
|
||||
|
||||
`postgres-# ALTER ROLE admin CREATEDB;`
|
||||
|
||||
Quit `psql` then log in again using the new user that previously created.
|
||||
|
||||
`postgres-# \q`
|
||||
|
||||
`psql postgres -U admin`
|
||||
|
||||
Type this command to creating a new database.
|
||||
|
||||
`postgres=> CREATE DATABASE db_{your_project_name};`
|
||||
|
||||
Then give that new user privileges to the new database then quit the `psql`.
|
||||
|
||||
`postgres=> GRANT ALL PRIVILEGES ON DATABASE db_{your_project_name} TO admin;`
|
||||
|
||||
`postgres=> \q`
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
------------
|
||||
|
||||
|
||||
#### Create database:
|
||||
`yarn db:create`
|
||||
|
||||
#### Start production build:
|
||||
`yarn start`
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
### Frontend:
|
||||
|
||||
> Please change current folder: `cd frontend`
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
## To start the project with Docker:
|
||||
### Description:
|
||||
|
||||
The project contains the **docker folder** and the `Dockerfile`.
|
||||
|
||||
The `Dockerfile` is used to Deploy the project to Google Cloud.
|
||||
|
||||
The **docker folder** contains a couple of helper scripts:
|
||||
|
||||
- `docker-compose.yml` (all our services: web, backend, db are described here)
|
||||
- `start-backend.sh` (starts backend, but only after the database)
|
||||
- `wait-for-it.sh` (imported from https://github.com/vishnubob/wait-for-it)
|
||||
|
||||
> To avoid breaking the application, we recommend you don't edit the following files: everything that includes the **docker folder** and `Dokerfile`.
|
||||
|
||||
## Run services:
|
||||
|
||||
1. Install docker compose (https://docs.docker.com/compose/install/)
|
||||
|
||||
2. Move to `docker` folder. All next steps should be done from this folder.
|
||||
|
||||
``` cd docker ```
|
||||
|
||||
3. Make executables from `wait-for-it.sh` and `start-backend.sh`:
|
||||
|
||||
``` chmod +x start-backend.sh && chmod +x wait-for-it.sh ```
|
||||
|
||||
4. Download dependend projects for services.
|
||||
|
||||
5. Review the docker-compose.yml file. Make sure that all services have Dockerfiles. Only db service doesn't require a Dockerfile.
|
||||
|
||||
6. Make sure you have needed ports (see them in `ports`) available on your local machine.
|
||||
|
||||
7. Start services:
|
||||
|
||||
7.1. With an empty database `rm -rf data && docker-compose up`
|
||||
|
||||
7.2. With a stored (from previus runs) database data `docker-compose up`
|
||||
|
||||
8. Check http://localhost:3000
|
||||
|
||||
9. Stop services:
|
||||
|
||||
9.1. Just press `Ctr+C`
|
||||
|
||||
## Most common errors:
|
||||
|
||||
1. `connection refused`
|
||||
|
||||
There could be many reasons, but the most common are:
|
||||
|
||||
- The port is not open on the destination machine.
|
||||
|
||||
- The port is open on the destination machine, but its backlog of pending connections is full.
|
||||
|
||||
- A firewall between the client and server is blocking access (also check local firewalls).
|
||||
|
||||
After checking for firewalls and that the port is open, use telnet to connect to the IP/port to test connectivity. This removes any potential issues from your application.
|
||||
|
||||
***MacOS:***
|
||||
|
||||
If you suspect that your SSH service might be down, you can run this command to find out:
|
||||
|
||||
`sudo service ssh status`
|
||||
|
||||
If the command line returns a status of down, then you’ve likely found the reason behind your connectivity error.
|
||||
|
||||
***Ubuntu:***
|
||||
|
||||
Sometimes a connection refused error can also indicate that there is an IP address conflict on your network. You can search for possible IP conflicts by running:
|
||||
|
||||
`arp-scan -I eth0 -l | grep <ipaddress>`
|
||||
|
||||
`arp-scan -I eth0 -l | grep <ipaddress>`
|
||||
|
||||
and
|
||||
|
||||
`arping <ipaddress>`
|
||||
|
||||
2. `yarn db:create` creates database with the assembled tables (on MacOS with Postgres database)
|
||||
|
||||
The workaround - put the next commands to your Postgres database terminal:
|
||||
|
||||
`DROP SCHEMA public CASCADE;`
|
||||
|
||||
`CREATE SCHEMA public;`
|
||||
|
||||
`GRANT ALL ON SCHEMA public TO postgres;`
|
||||
|
||||
`GRANT ALL ON SCHEMA public TO public;`
|
||||
|
||||
Afterwards, continue to start your project in the backend directory by running:
|
||||
|
||||
`yarn start`
|
||||
Proprietary - Tour Builder Platform
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 73 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 27 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 93 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 91 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 27 KiB |
@ -1,6 +1,6 @@
|
||||
DB_NAME=app_39215
|
||||
DB_USER=app_39215
|
||||
DB_PASS=88dbeaf8-e906-405e-9e41-c3baadeda5c6
|
||||
DB_NAME=app_39948
|
||||
DB_USER=app_39948
|
||||
DB_PASS=d82cf4a2-477c-4a75-acec-ec606e0b8a01
|
||||
DB_HOST=127.0.0.1
|
||||
DB_PORT=5432
|
||||
PORT=3000
|
||||
|
||||
@ -10,6 +10,7 @@ module.exports = {
|
||||
'import'
|
||||
],
|
||||
rules: {
|
||||
'import/no-unresolved': 'error'
|
||||
'import/no-unresolved': 'error',
|
||||
'no-unused-vars': ['error', { argsIgnorePattern: '^_' }]
|
||||
}
|
||||
};
|
||||
|
||||
@ -1,6 +1,8 @@
|
||||
FROM node:20.15.1-alpine
|
||||
|
||||
RUN apk update && apk add bash
|
||||
# Install bash and FFmpeg for video processing (reversed video generation)
|
||||
RUN apk update && apk add --no-cache bash ffmpeg
|
||||
|
||||
# Create app directory
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
|
||||
@ -1,56 +1,367 @@
|
||||
# Tour Builder Platform - Backend
|
||||
|
||||
#Tour Builder Platform - template backend,
|
||||
Node.js/Express REST API server with Sequelize ORM for the Tour Builder Platform.
|
||||
|
||||
#### Run App on local machine:
|
||||
## Tech Stack
|
||||
|
||||
##### Install local dependencies:
|
||||
- `yarn install`
|
||||
- **Runtime**: Node.js 18+
|
||||
- **Framework**: Express 4.x
|
||||
- **Database**: PostgreSQL with Sequelize ORM
|
||||
- **Authentication**: Passport.js (JWT, Google OAuth, Microsoft OAuth)
|
||||
- **File Storage**: AWS S3 / Google Cloud Storage / Local filesystem
|
||||
- **Email**: Nodemailer with AWS SES
|
||||
- **API Docs**: Swagger/OpenAPI
|
||||
|
||||
------------
|
||||
## Prerequisites
|
||||
|
||||
##### Adjust local db:
|
||||
###### 1. Install postgres:
|
||||
- MacOS:
|
||||
- `brew install postgres`
|
||||
- Node.js 18+
|
||||
- PostgreSQL 14+
|
||||
- Yarn package manager
|
||||
|
||||
- Ubuntu:
|
||||
- `sudo apt update`
|
||||
- `sudo apt install postgresql postgresql-contrib`
|
||||
## Quick Start
|
||||
|
||||
###### 2. Create db and admin user:
|
||||
- Before run and test connection, make sure you have created a database as described in the above configuration. You can use the `psql` command to create a user and database.
|
||||
- `psql postgres --u postgres`
|
||||
```bash
|
||||
# Install dependencies
|
||||
yarn install
|
||||
|
||||
- Next, type this command for creating a new user with password then give access for creating the database.
|
||||
- `postgres-# CREATE ROLE admin WITH LOGIN PASSWORD 'admin_pass';`
|
||||
- `postgres-# ALTER ROLE admin CREATEDB;`
|
||||
# Create database (first time only)
|
||||
yarn db:create
|
||||
|
||||
- Quit `psql` then log in again using the new user that previously created.
|
||||
- `postgres-# \q`
|
||||
- `psql postgres -U admin`
|
||||
# Start server (runs migrations, seeds, and watches for changes)
|
||||
export $(cat .env | xargs) && NODE_ENV=production yarn start
|
||||
```
|
||||
|
||||
- Type this command to creating a new database.
|
||||
- `postgres=> CREATE DATABASE db_tour_builder_platform;`
|
||||
The server runs on **port 8080** by default.
|
||||
|
||||
- Then give that new user privileges to the new database then quit the `psql`.
|
||||
- `postgres=> GRANT ALL PRIVILEGES ON DATABASE db_tour_builder_platform TO admin;`
|
||||
- `postgres=> \q`
|
||||
|
||||
------------
|
||||
## Environment Variables
|
||||
|
||||
#### Api Documentation (Swagger)
|
||||
Create a `.env` file in the backend directory:
|
||||
|
||||
http://localhost:8080/api-docs (local host)
|
||||
```env
|
||||
# Database (required)
|
||||
DB_HOST=localhost
|
||||
DB_PORT=5432
|
||||
DB_NAME=app_39215
|
||||
DB_USER=app_39215
|
||||
DB_PASSWORD=your_password
|
||||
|
||||
http://host_name/api-docs
|
||||
# JWT Secret (required)
|
||||
SECRET_KEY=your-secret-key
|
||||
|
||||
------------
|
||||
# Admin credentials (for seeding)
|
||||
ADMIN_EMAIL=admin@example.com
|
||||
ADMIN_PASS=admin_password
|
||||
USER_PASS=user_password
|
||||
|
||||
##### Setup database tables or update after schema change
|
||||
- `yarn db:migrate`
|
||||
# AWS S3 (optional - for file storage)
|
||||
AWS_S3_BUCKET=your-bucket
|
||||
AWS_S3_REGION=us-east-1
|
||||
AWS_ACCESS_KEY_ID=your-access-key
|
||||
AWS_SECRET_ACCESS_KEY=your-secret-key
|
||||
AWS_S3_PREFIX=your-prefix
|
||||
|
||||
##### Seed the initial data (admin accounts, relevant for the first setup):
|
||||
- `yarn db:seed`
|
||||
|
||||
##### Start build:
|
||||
- `yarn start`
|
||||
# Google OAuth (optional)
|
||||
GOOGLE_CLIENT_ID=your-client-id
|
||||
GOOGLE_CLIENT_SECRET=your-client-secret
|
||||
|
||||
# Microsoft OAuth (optional)
|
||||
MS_CLIENT_ID=your-client-id
|
||||
MS_CLIENT_SECRET=your-client-secret
|
||||
|
||||
# Email - AWS SES (optional)
|
||||
EMAIL_USER=ses-smtp-user
|
||||
EMAIL_PASS=ses-smtp-password
|
||||
|
||||
# OpenAI (optional)
|
||||
GPT_KEY=your-openai-key
|
||||
```
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
backend/src/
|
||||
├── index.js # Express app entry point
|
||||
├── config.js # Environment configuration
|
||||
├── helpers.js # Utility functions (wrapAsync)
|
||||
│
|
||||
├── auth/ # Passport.js authentication strategies
|
||||
│ └── auth.js # JWT, Google, Microsoft strategies
|
||||
│
|
||||
├── db/
|
||||
│ ├── db.config.js # Database connection config (per environment)
|
||||
│ ├── models/ # Sequelize model definitions (16 models)
|
||||
│ ├── api/ # Database access layer (CRUD per model)
|
||||
│ ├── migrations/ # Database migrations
|
||||
│ └── seeders/ # Seed data (admin users, permissions, roles)
|
||||
│
|
||||
├── routes/ # Express route handlers (22 routes)
|
||||
│ ├── auth.js # Authentication endpoints
|
||||
│ ├── projects.js # Project CRUD
|
||||
│ ├── tour_pages.js # Tour page management
|
||||
│ ├── assets.js # Asset management
|
||||
│ ├── file.js # File upload/download, presigned URLs
|
||||
│ ├── publish.js # Publishing workflow
|
||||
│ ├── search.js # Global search
|
||||
│ └── ... # Other entity routes
|
||||
│
|
||||
├── services/ # Business logic layer (21 services)
|
||||
│ ├── auth.js # Auth service (JWT, OAuth)
|
||||
│ ├── publish.js # Publishing workflow logic
|
||||
│ ├── file.js # File storage abstraction
|
||||
│ ├── search.js # Global search service
|
||||
│ ├── email/ # Email templates and sending
|
||||
│ ├── notifications/ # Error classes and i18n messages
|
||||
│ └── ... # Other entity services
|
||||
│
|
||||
├── middlewares/
|
||||
│ ├── check-permissions.js # RBAC permission checking
|
||||
│ ├── runtime-context.js # Environment detection from headers
|
||||
│ ├── runtime-public.js # Public runtime access (no auth)
|
||||
│ ├── upload.js # File upload handling (multer)
|
||||
│ └── rateLimiter.js # Rate limiting for API endpoints
|
||||
│
|
||||
├── factories/
|
||||
│ ├── router.factory.js # Generate CRUD routes
|
||||
│ └── service.factory.js # Generate service classes
|
||||
│
|
||||
└── utils/
|
||||
├── env-validation.js # Environment variable validation (Joi)
|
||||
├── errors.js # Custom error classes
|
||||
├── logger.js # Pino logger configuration
|
||||
└── index.js # Utils barrel export
|
||||
```
|
||||
|
||||
## Database Setup
|
||||
|
||||
### Create Database User and Database
|
||||
|
||||
```bash
|
||||
# Connect to PostgreSQL
|
||||
psql postgres -U postgres
|
||||
|
||||
# Create user
|
||||
CREATE ROLE app_39215 WITH LOGIN PASSWORD 'your-password';
|
||||
ALTER ROLE app_39215 CREATEDB;
|
||||
|
||||
# Create database
|
||||
CREATE DATABASE app_39215 OWNER app_39215;
|
||||
GRANT ALL PRIVILEGES ON DATABASE app_39215 TO app_39215;
|
||||
|
||||
\q
|
||||
```
|
||||
|
||||
### Available Commands
|
||||
|
||||
```bash
|
||||
yarn db:create # Create database
|
||||
yarn db:drop # Drop database
|
||||
yarn db:migrate # Run pending migrations
|
||||
yarn db:migrate:undo # Undo last migration
|
||||
yarn db:migrate:undo:all # Undo all migrations
|
||||
yarn db:migrate:status # Show migration status
|
||||
yarn db:seed # Run all seeders
|
||||
yarn db:seed:undo # Undo all seeders
|
||||
yarn db:reset # Drop, create, migrate, and seed
|
||||
yarn start # Migrate, seed, and start with watch
|
||||
yarn lint # Run ESLint
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
Swagger UI available at: `http://localhost:8080/api-docs`
|
||||
|
||||
### Core Endpoints
|
||||
|
||||
| Endpoint | Description |
|
||||
|----------|-------------|
|
||||
| `POST /api/auth/signin/local` | Email/password login |
|
||||
| `POST /api/auth/signup` | User registration |
|
||||
| `GET /api/auth/me` | Current user info (JWT required) |
|
||||
| `GET /api/auth/signin/google` | Google OAuth login |
|
||||
| `GET /api/auth/signin/microsoft` | Microsoft OAuth login |
|
||||
|
||||
### Entity CRUD Pattern
|
||||
|
||||
All entities follow standard REST patterns:
|
||||
|
||||
```
|
||||
GET /api/{entity} # List with pagination & filters
|
||||
GET /api/{entity}/:id # Get single record
|
||||
POST /api/{entity} # Create record
|
||||
PUT /api/{entity}/:id # Update record
|
||||
DELETE /api/{entity}/:id # Soft delete record
|
||||
```
|
||||
|
||||
### Main Entities
|
||||
|
||||
| Entity | Description |
|
||||
|--------|-------------|
|
||||
| `projects` | Virtual tour projects |
|
||||
| `tour_pages` | Pages within a tour (elements, navigation, transitions stored in ui_schema_json) |
|
||||
| `assets` | Uploaded media files |
|
||||
| `asset_variants` | Resized/optimized asset versions |
|
||||
| `element_type_defaults` | Global element default settings |
|
||||
| `project_element_defaults` | Project-specific element settings |
|
||||
| `project_audio_tracks` | Background audio for projects |
|
||||
| `publish_events` | Publishing history and status tracking |
|
||||
| `pwa_caches` | PWA cache manifests for offline support |
|
||||
| `presigned_url_requests` | S3 presigned URL request tracking |
|
||||
| `access_logs` | User access audit trail |
|
||||
| `users` | User accounts |
|
||||
| `roles` | User roles |
|
||||
| `permissions` | Granular permissions |
|
||||
| `project_memberships` | Team access per project |
|
||||
|
||||
### Element Defaults Hierarchy
|
||||
|
||||
UI elements use a three-tier defaults system:
|
||||
|
||||
```
|
||||
element_type_defaults (Global)
|
||||
│
|
||||
│ auto-snapshot on project creation
|
||||
▼
|
||||
project_element_defaults (Project)
|
||||
│
|
||||
│ applied when creating elements
|
||||
▼
|
||||
tour_pages.ui_schema_json (Instance)
|
||||
```
|
||||
|
||||
1. **Global** (`element_type_defaults`) - Platform-wide defaults for 11 element types (navigation, tooltip, gallery, etc.). Auto-seeded on first API access.
|
||||
|
||||
2. **Project** (`project_element_defaults`) - Per-project overrides. Automatically snapshotted from global when a project is created. Can be customized independently.
|
||||
|
||||
3. **Instance** (`tour_pages.ui_schema_json`) - Page-specific elements with their settings stored inline. Created in constructor with project defaults applied.
|
||||
|
||||
**Additional Endpoints:**
|
||||
- `POST /api/project-element-defaults/:id/reset` - Reset to current global default
|
||||
- `GET /api/project-element-defaults/:id/diff` - Compare with global default
|
||||
|
||||
### Publishing Workflow
|
||||
|
||||
Three-tier environment model for content: `dev` → `stage` → `production`
|
||||
|
||||
```
|
||||
POST /api/publish/save-to-stage # Copy dev content to stage (body: { projectId })
|
||||
POST /api/publish # Copy stage content to production (body: { projectId })
|
||||
```
|
||||
|
||||
Pages have an `environment` field (`dev`, `stage`, or `production`) that determines visibility:
|
||||
- **Constructor** (`/constructor?projectId=`) - Always shows `dev` environment
|
||||
- **Stage preview** (`/p/[slug]/stage`) - Shows `stage` environment
|
||||
- **Public runtime** (`/p/[slug]`) - Shows `production` environment
|
||||
|
||||
## Authentication
|
||||
|
||||
### JWT Authentication
|
||||
|
||||
Protected routes require JWT token in Authorization header:
|
||||
|
||||
```
|
||||
Authorization: Bearer <jwt-token>
|
||||
```
|
||||
|
||||
### OAuth Providers
|
||||
|
||||
- **Google**: `/api/auth/signin/google`
|
||||
- **Microsoft**: `/api/auth/signin/microsoft`
|
||||
|
||||
## File Storage
|
||||
|
||||
Storage provider is auto-detected based on available credentials:
|
||||
|
||||
1. **AWS S3** - If `AWS_S3_BUCKET` is configured
|
||||
2. **Google Cloud Storage** - If GCS credentials are available
|
||||
3. **Local filesystem** - Fallback (files stored in system temp directory)
|
||||
|
||||
### Upload Flow (Presigned URLs)
|
||||
|
||||
```
|
||||
POST /api/file/presigned-url # Get upload URL (authenticated)
|
||||
PUT {presigned-url} # Upload directly to S3
|
||||
POST /api/assets # Register asset in database
|
||||
```
|
||||
|
||||
### Download Flow (Direct S3 Access)
|
||||
|
||||
For runtime asset preloading, the frontend can request presigned download URLs:
|
||||
|
||||
```
|
||||
POST /api/file/presign # Get download URLs (public endpoint)
|
||||
Request: { urls: ["assets/img1.jpg", "assets/video.mp4", ...] }
|
||||
Response: { presignedUrls: { "assets/img1.jpg": "https://s3...", ... } }
|
||||
```
|
||||
|
||||
- **Max URLs per request**: 50
|
||||
- **URL expiry**: 1 hour
|
||||
- **Public endpoint**: No authentication required (for runtime playback)
|
||||
|
||||
This allows the frontend to download assets directly from S3, bypassing the backend for better performance.
|
||||
|
||||
## RBAC (Role-Based Access Control)
|
||||
|
||||
### Permission Format
|
||||
|
||||
```
|
||||
{ACTION}_{ENTITY}
|
||||
```
|
||||
|
||||
Actions: `CREATE`, `READ`, `UPDATE`, `DELETE`
|
||||
|
||||
Example: `CREATE_PROJECTS`, `READ_TOUR_PAGES`, `UPDATE_ASSETS`
|
||||
|
||||
### Default Roles
|
||||
|
||||
| Role | Description |
|
||||
|------|-------------|
|
||||
| Administrator | Full access to all features (user/role/permission management) |
|
||||
| Platform Owner | Full project access, user management |
|
||||
| Account Manager | Project and asset management |
|
||||
| Tour Designer | Create and edit tours, assets, pages |
|
||||
| Content Reviewer | Review and update content (read/update access) |
|
||||
| Analytics Viewer | Read-only access for viewing data |
|
||||
| Public | Minimal access for public users |
|
||||
|
||||
## Environment Detection
|
||||
|
||||
### Server Environment (NODE_ENV)
|
||||
|
||||
The backend uses `NODE_ENV` to determine database configuration:
|
||||
|
||||
| Value | Database | Description |
|
||||
|-------|----------|-------------|
|
||||
| `production` | Production config | Live environment |
|
||||
| `dev_stage` | Staging config | Staging environment |
|
||||
| (other) | Development config | Local development |
|
||||
|
||||
### Content Environment (tour_pages.environment)
|
||||
|
||||
Separate from server environment, tour pages have a content environment field:
|
||||
|
||||
| Value | Access | Description |
|
||||
|-------|--------|-------------|
|
||||
| `dev` | Constructor only | Editing/draft content |
|
||||
| `stage` | Stage preview | Pre-production review |
|
||||
| `production` | Public runtime | Published content |
|
||||
|
||||
The `X-Runtime-Environment` header (set by frontend) determines which content environment to query. The `runtime-context.js` middleware resolves this for API requests.
|
||||
|
||||
## Docker
|
||||
|
||||
See `docker/` directory for Docker Compose setup:
|
||||
|
||||
```bash
|
||||
cd docker
|
||||
docker-compose up
|
||||
```
|
||||
|
||||
## Logging
|
||||
|
||||
Uses Pino logger with pretty printing in development:
|
||||
|
||||
```javascript
|
||||
const logger = require('pino')();
|
||||
logger.info('Server started');
|
||||
logger.error({ err }, 'Error occurred');
|
||||
```
|
||||
|
||||
18071
backend/package-lock.json
generated
18071
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -5,53 +5,67 @@
|
||||
"start": "npm run db:migrate && npm run db:seed && npm run watch",
|
||||
"lint": "eslint . --ext .js",
|
||||
"db:migrate": "sequelize-cli db:migrate",
|
||||
"db:migrate:undo": "sequelize-cli db:migrate:undo",
|
||||
"db:migrate:undo:all": "sequelize-cli db:migrate:undo:all",
|
||||
"db:migrate:status": "sequelize-cli db:migrate:status",
|
||||
"db:seed": "sequelize-cli db:seed:all",
|
||||
"db:seed:undo": "sequelize-cli db:seed:undo:all",
|
||||
"db:drop": "sequelize-cli db:drop",
|
||||
"db:create": "sequelize-cli db:create",
|
||||
"db:reset": "npm run db:drop && npm run db:create && npm run db:migrate && npm run db:seed",
|
||||
"watch": "node watcher.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-s3": "^3.1010.0",
|
||||
"@google-cloud/storage": "^5.18.2",
|
||||
"axios": "^1.6.7",
|
||||
"bcrypt": "5.1.1",
|
||||
"@aws-sdk/client-s3": "^3.1011.0",
|
||||
"@aws-sdk/s3-request-presigner": "^3.1016.0",
|
||||
"@google-cloud/storage": "^7.0.0",
|
||||
"axios": "^1.13.0",
|
||||
"bcrypt": "^6.0.0",
|
||||
"chokidar": "^4.0.3",
|
||||
"cors": "2.8.5",
|
||||
"csv-parser": "^3.0.0",
|
||||
"cors": "^2.8.6",
|
||||
"csv-parser": "^3.2.0",
|
||||
"dotenv": "^16.4.0",
|
||||
"express": "4.18.2",
|
||||
"express-validator": "^7.0.0",
|
||||
"ffmpeg-static": "^5.2.0",
|
||||
"ffprobe-static": "^3.1.0",
|
||||
"fluent-ffmpeg": "^2.1.3",
|
||||
"formidable": "1.2.2",
|
||||
"helmet": "4.1.1",
|
||||
"helmet": "^8.0.0",
|
||||
"joi": "^17.13.0",
|
||||
"json2csv": "^5.0.7",
|
||||
"jsonwebtoken": "8.5.1",
|
||||
"lodash": "4.17.21",
|
||||
"jsonwebtoken": "^9.0.0",
|
||||
"lodash": "^4.17.23",
|
||||
"moment": "2.30.1",
|
||||
"multer": "^1.4.4",
|
||||
"multer": "^2.0.0",
|
||||
"mysql2": "2.2.5",
|
||||
"nodemailer": "6.9.9",
|
||||
"passport": "^0.7.0",
|
||||
"passport-google-oauth2": "^0.2.0",
|
||||
"passport-jwt": "^4.0.1",
|
||||
"passport-microsoft": "^0.1.0",
|
||||
"pg": "8.4.1",
|
||||
"passport-microsoft": "^2.0.0",
|
||||
"pg": "^8.20.0",
|
||||
"pg-hstore": "2.3.4",
|
||||
"sequelize": "6.35.2",
|
||||
"pino": "^9.0.0",
|
||||
"pino-pretty": "^11.0.0",
|
||||
"sequelize": "^6.37.0",
|
||||
"sequelize-json-schema": "^2.1.1",
|
||||
"sqlite": "4.0.15",
|
||||
"swagger-jsdoc": "^6.2.8",
|
||||
"swagger-ui-express": "^5.0.0",
|
||||
"tedious": "^18.2.4"
|
||||
"tedious": "^18.6.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
},
|
||||
"private": true,
|
||||
"devDependencies": {
|
||||
"cross-env": "7.0.3",
|
||||
"eslint": "^8.23.1",
|
||||
"cross-env": "^7.0.3",
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-plugin-import": "^2.29.1",
|
||||
"mocha": "8.1.3",
|
||||
"node-mocks-http": "1.9.0",
|
||||
"nodemon": "2.0.5",
|
||||
"sequelize-cli": "6.6.2"
|
||||
"mocha": "^10.0.0",
|
||||
"node-mocks-http": "^1.17.0",
|
||||
"nodemon": "^3.0.0",
|
||||
"sequelize-cli": "^6.6.5"
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
"use strict";
|
||||
'use strict';
|
||||
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const http = require("http");
|
||||
const https = require("https");
|
||||
const { URL } = require("url");
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const http = require('http');
|
||||
const https = require('https');
|
||||
const { URL } = require('url');
|
||||
|
||||
let CONFIG_CACHE = null;
|
||||
|
||||
@ -40,7 +40,7 @@ async function createResponse(params, options = {}) {
|
||||
if (!Array.isArray(payload.input) || payload.input.length === 0) {
|
||||
return {
|
||||
success: false,
|
||||
error: "input_missing",
|
||||
error: 'input_missing',
|
||||
message: 'Parameter "input" is required and must be a non-empty array.',
|
||||
};
|
||||
}
|
||||
@ -56,7 +56,7 @@ async function createResponse(params, options = {}) {
|
||||
}
|
||||
|
||||
const data = initial.data;
|
||||
if (data && typeof data === "object" && data.ai_request_id) {
|
||||
if (data && typeof data === 'object' && data.ai_request_id) {
|
||||
const pollTimeout = Number(options.poll_timeout ?? 300);
|
||||
const pollInterval = Number(options.poll_interval ?? 5);
|
||||
return await awaitResponse(data.ai_request_id, {
|
||||
@ -78,16 +78,16 @@ async function request(pathValue, payload = {}, options = {}) {
|
||||
if (!resolvedPath) {
|
||||
return {
|
||||
success: false,
|
||||
error: "project_id_missing",
|
||||
message: "PROJECT_ID is not defined; cannot resolve AI proxy endpoint.",
|
||||
error: 'project_id_missing',
|
||||
message: 'PROJECT_ID is not defined; cannot resolve AI proxy endpoint.',
|
||||
};
|
||||
}
|
||||
|
||||
if (!cfg.projectUuid) {
|
||||
return {
|
||||
success: false,
|
||||
error: "project_uuid_missing",
|
||||
message: "PROJECT_UUID is not defined; aborting AI request.",
|
||||
error: 'project_uuid_missing',
|
||||
message: 'PROJECT_UUID is not defined; aborting AI request.',
|
||||
};
|
||||
}
|
||||
|
||||
@ -101,21 +101,21 @@ async function request(pathValue, payload = {}, options = {}) {
|
||||
const verifyTls = resolveVerifyTls(options.verify_tls, cfg.verifyTls);
|
||||
|
||||
const headers = {
|
||||
Accept: "application/json",
|
||||
"Content-Type": "application/json",
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
[cfg.projectHeader]: cfg.projectUuid,
|
||||
};
|
||||
if (Array.isArray(options.headers)) {
|
||||
for (const header of options.headers) {
|
||||
if (typeof header === "string" && header.includes(":")) {
|
||||
const [name, value] = header.split(":", 2);
|
||||
if (typeof header === 'string' && header.includes(':')) {
|
||||
const [name, value] = header.split(':', 2);
|
||||
headers[name.trim()] = value.trim();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const body = JSON.stringify(bodyPayload);
|
||||
return sendRequest(url, "POST", body, headers, timeout, verifyTls);
|
||||
return sendRequest(url, 'POST', body, headers, timeout, verifyTls);
|
||||
}
|
||||
|
||||
async function fetchStatus(aiRequestId, options = {}) {
|
||||
@ -123,8 +123,8 @@ async function fetchStatus(aiRequestId, options = {}) {
|
||||
if (!cfg.projectUuid) {
|
||||
return {
|
||||
success: false,
|
||||
error: "project_uuid_missing",
|
||||
message: "PROJECT_UUID is not defined; aborting status check.",
|
||||
error: 'project_uuid_missing',
|
||||
message: 'PROJECT_UUID is not defined; aborting status check.',
|
||||
};
|
||||
}
|
||||
|
||||
@ -134,19 +134,19 @@ async function fetchStatus(aiRequestId, options = {}) {
|
||||
const verifyTls = resolveVerifyTls(options.verify_tls, cfg.verifyTls);
|
||||
|
||||
const headers = {
|
||||
Accept: "application/json",
|
||||
Accept: 'application/json',
|
||||
[cfg.projectHeader]: cfg.projectUuid,
|
||||
};
|
||||
if (Array.isArray(options.headers)) {
|
||||
for (const header of options.headers) {
|
||||
if (typeof header === "string" && header.includes(":")) {
|
||||
const [name, value] = header.split(":", 2);
|
||||
if (typeof header === 'string' && header.includes(':')) {
|
||||
const [name, value] = header.split(':', 2);
|
||||
headers[name.trim()] = value.trim();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return sendRequest(url, "GET", null, headers, timeout, verifyTls);
|
||||
return sendRequest(url, 'GET', null, headers, timeout, verifyTls);
|
||||
}
|
||||
|
||||
async function awaitResponse(aiRequestId, options = {}) {
|
||||
@ -165,8 +165,8 @@ async function awaitResponse(aiRequestId, options = {}) {
|
||||
|
||||
if (statusResp.success) {
|
||||
const data = statusResp.data || {};
|
||||
if (data && typeof data === "object") {
|
||||
if (data.status === "success") {
|
||||
if (data && typeof data === 'object') {
|
||||
if (data.status === 'success') {
|
||||
isPending = false;
|
||||
return {
|
||||
success: true,
|
||||
@ -174,12 +174,12 @@ async function awaitResponse(aiRequestId, options = {}) {
|
||||
data: data.response || data,
|
||||
};
|
||||
}
|
||||
if (data.status === "failed") {
|
||||
if (data.status === 'failed') {
|
||||
isPending = false;
|
||||
return {
|
||||
success: false,
|
||||
status: 500,
|
||||
error: String(data.error || "AI request failed"),
|
||||
error: String(data.error || 'AI request failed'),
|
||||
data,
|
||||
};
|
||||
}
|
||||
@ -191,8 +191,8 @@ async function awaitResponse(aiRequestId, options = {}) {
|
||||
if (Date.now() >= deadline) {
|
||||
return {
|
||||
success: false,
|
||||
error: "timeout",
|
||||
message: "Timed out waiting for AI response.",
|
||||
error: 'timeout',
|
||||
message: 'Timed out waiting for AI response.',
|
||||
};
|
||||
}
|
||||
|
||||
@ -201,13 +201,14 @@ async function awaitResponse(aiRequestId, options = {}) {
|
||||
}
|
||||
|
||||
function extractText(response) {
|
||||
const payload = response && typeof response === "object" ? response.data || response : null;
|
||||
if (!payload || typeof payload !== "object") {
|
||||
return "";
|
||||
const payload =
|
||||
response && typeof response === 'object' ? response.data || response : null;
|
||||
if (!payload || typeof payload !== 'object') {
|
||||
return '';
|
||||
}
|
||||
|
||||
if (Array.isArray(payload.output)) {
|
||||
let combined = "";
|
||||
let combined = '';
|
||||
for (const item of payload.output) {
|
||||
if (!item || !Array.isArray(item.content)) {
|
||||
continue;
|
||||
@ -215,9 +216,9 @@ function extractText(response) {
|
||||
for (const block of item.content) {
|
||||
if (
|
||||
block &&
|
||||
typeof block === "object" &&
|
||||
block.type === "output_text" &&
|
||||
typeof block.text === "string" &&
|
||||
typeof block === 'object' &&
|
||||
block.type === 'output_text' &&
|
||||
typeof block.text === 'string' &&
|
||||
block.text.length > 0
|
||||
) {
|
||||
combined += block.text;
|
||||
@ -233,32 +234,38 @@ function extractText(response) {
|
||||
payload.choices &&
|
||||
payload.choices[0] &&
|
||||
payload.choices[0].message &&
|
||||
typeof payload.choices[0].message.content === "string"
|
||||
typeof payload.choices[0].message.content === 'string'
|
||||
) {
|
||||
return payload.choices[0].message.content;
|
||||
}
|
||||
|
||||
return "";
|
||||
return '';
|
||||
}
|
||||
|
||||
function decodeJsonFromResponse(response) {
|
||||
const text = extractText(response);
|
||||
if (!text) {
|
||||
throw new Error("No text found in AI response.");
|
||||
throw new Error('No text found in AI response.');
|
||||
}
|
||||
|
||||
const parsed = parseJson(text);
|
||||
if (parsed.ok && parsed.value && typeof parsed.value === "object") {
|
||||
if (parsed.ok && parsed.value && typeof parsed.value === 'object') {
|
||||
return parsed.value;
|
||||
}
|
||||
|
||||
const stripped = stripJsonFence(text);
|
||||
if (stripped !== text) {
|
||||
const parsedStripped = parseJson(stripped);
|
||||
if (parsedStripped.ok && parsedStripped.value && typeof parsedStripped.value === "object") {
|
||||
if (
|
||||
parsedStripped.ok &&
|
||||
parsedStripped.value &&
|
||||
typeof parsedStripped.value === 'object'
|
||||
) {
|
||||
return parsedStripped.value;
|
||||
}
|
||||
throw new Error(`JSON parse failed after stripping fences: ${parsedStripped.error}`);
|
||||
throw new Error(
|
||||
`JSON parse failed after stripping fences: ${parsedStripped.error}`,
|
||||
);
|
||||
}
|
||||
|
||||
throw new Error(`JSON parse failed: ${parsed.error}`);
|
||||
@ -271,7 +278,7 @@ function config() {
|
||||
|
||||
ensureEnvLoaded();
|
||||
|
||||
const baseUrl = process.env.AI_PROXY_BASE_URL || "https://flatlogic.com";
|
||||
const baseUrl = process.env.AI_PROXY_BASE_URL || 'https://flatlogic.com';
|
||||
const projectId = process.env.PROJECT_ID || null;
|
||||
let responsesPath = process.env.AI_RESPONSES_PATH || null;
|
||||
if (!responsesPath && projectId) {
|
||||
@ -286,8 +293,8 @@ function config() {
|
||||
responsesPath,
|
||||
projectId,
|
||||
projectUuid: process.env.PROJECT_UUID || null,
|
||||
projectHeader: process.env.AI_PROJECT_HEADER || "project-uuid",
|
||||
defaultModel: process.env.AI_DEFAULT_MODEL || "gpt-5-mini",
|
||||
projectHeader: process.env.AI_PROJECT_HEADER || 'project-uuid',
|
||||
defaultModel: process.env.AI_DEFAULT_MODEL || 'gpt-5-mini',
|
||||
timeout,
|
||||
verifyTls,
|
||||
};
|
||||
@ -296,29 +303,38 @@ function config() {
|
||||
}
|
||||
|
||||
function buildUrl(pathValue, baseUrl) {
|
||||
const trimmed = String(pathValue || "").trim();
|
||||
if (trimmed === "") {
|
||||
const trimmed = String(pathValue || '').trim();
|
||||
if (trimmed === '') {
|
||||
return baseUrl;
|
||||
}
|
||||
if (trimmed.startsWith("http://") || trimmed.startsWith("https://")) {
|
||||
if (trimmed.startsWith('http://') || trimmed.startsWith('https://')) {
|
||||
return trimmed;
|
||||
}
|
||||
if (trimmed.startsWith("/")) {
|
||||
if (trimmed.startsWith('/')) {
|
||||
return `${baseUrl}${trimmed}`;
|
||||
}
|
||||
return `${baseUrl}/${trimmed}`;
|
||||
}
|
||||
|
||||
function resolveStatusPath(aiRequestId, cfg) {
|
||||
const basePath = (cfg.responsesPath || "").replace(/\/+$/, "");
|
||||
const basePath = (cfg.responsesPath || '').replace(/\/+$/, '');
|
||||
if (!basePath) {
|
||||
return `/ai-request/${encodeURIComponent(String(aiRequestId))}/status`;
|
||||
}
|
||||
const normalized = basePath.endsWith("/ai-request") ? basePath : `${basePath}/ai-request`;
|
||||
const normalized = basePath.endsWith('/ai-request')
|
||||
? basePath
|
||||
: `${basePath}/ai-request`;
|
||||
return `${normalized}/${encodeURIComponent(String(aiRequestId))}/status`;
|
||||
}
|
||||
|
||||
function sendRequest(urlString, method, body, headers, timeoutSeconds, verifyTls) {
|
||||
function sendRequest(
|
||||
urlString,
|
||||
method,
|
||||
body,
|
||||
headers,
|
||||
timeoutSeconds,
|
||||
verifyTls,
|
||||
) {
|
||||
return new Promise((resolve) => {
|
||||
let targetUrl;
|
||||
try {
|
||||
@ -326,13 +342,13 @@ function sendRequest(urlString, method, body, headers, timeoutSeconds, verifyTls
|
||||
} catch (err) {
|
||||
resolve({
|
||||
success: false,
|
||||
error: "invalid_url",
|
||||
error: 'invalid_url',
|
||||
message: err.message,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const isHttps = targetUrl.protocol === "https:";
|
||||
const isHttps = targetUrl.protocol === 'https:';
|
||||
const requestFn = isHttps ? https.request : http.request;
|
||||
const options = {
|
||||
protocol: targetUrl.protocol,
|
||||
@ -348,12 +364,12 @@ function sendRequest(urlString, method, body, headers, timeoutSeconds, verifyTls
|
||||
}
|
||||
|
||||
const req = requestFn(options, (res) => {
|
||||
let responseBody = "";
|
||||
res.setEncoding("utf8");
|
||||
res.on("data", (chunk) => {
|
||||
let responseBody = '';
|
||||
res.setEncoding('utf8');
|
||||
res.on('data', (chunk) => {
|
||||
responseBody += chunk;
|
||||
});
|
||||
res.on("end", () => {
|
||||
res.on('end', () => {
|
||||
const status = res.statusCode || 0;
|
||||
const parsed = parseJson(responseBody);
|
||||
const payload = parsed.ok ? parsed.value : responseBody;
|
||||
@ -372,9 +388,11 @@ function sendRequest(urlString, method, body, headers, timeoutSeconds, verifyTls
|
||||
}
|
||||
|
||||
const errorMessage =
|
||||
parsed.ok && payload && typeof payload === "object"
|
||||
? String(payload.error || payload.message || "AI proxy request failed")
|
||||
: String(responseBody || "AI proxy request failed");
|
||||
parsed.ok && payload && typeof payload === 'object'
|
||||
? String(
|
||||
payload.error || payload.message || 'AI proxy request failed',
|
||||
)
|
||||
: String(responseBody || 'AI proxy request failed');
|
||||
|
||||
resolve({
|
||||
success: false,
|
||||
@ -386,14 +404,14 @@ function sendRequest(urlString, method, body, headers, timeoutSeconds, verifyTls
|
||||
});
|
||||
});
|
||||
|
||||
req.on("timeout", () => {
|
||||
req.destroy(new Error("request_timeout"));
|
||||
req.on('timeout', () => {
|
||||
req.destroy(new Error('request_timeout'));
|
||||
});
|
||||
|
||||
req.on("error", (err) => {
|
||||
req.on('error', (err) => {
|
||||
resolve({
|
||||
success: false,
|
||||
error: "request_failed",
|
||||
error: 'request_failed',
|
||||
message: err.message,
|
||||
});
|
||||
});
|
||||
@ -406,8 +424,8 @@ function sendRequest(urlString, method, body, headers, timeoutSeconds, verifyTls
|
||||
}
|
||||
|
||||
function parseJson(value) {
|
||||
if (typeof value !== "string" || value.trim() === "") {
|
||||
return { ok: false, error: "empty_response" };
|
||||
if (typeof value !== 'string' || value.trim() === '') {
|
||||
return { ok: false, error: 'empty_response' };
|
||||
}
|
||||
try {
|
||||
return { ok: true, value: JSON.parse(value) };
|
||||
@ -418,11 +436,14 @@ function parseJson(value) {
|
||||
|
||||
function stripJsonFence(text) {
|
||||
const trimmed = text.trim();
|
||||
if (trimmed.startsWith("```json")) {
|
||||
return trimmed.replace(/^```json/, "").replace(/```$/, "").trim();
|
||||
if (trimmed.startsWith('```json')) {
|
||||
return trimmed
|
||||
.replace(/^```json/, '')
|
||||
.replace(/```$/, '')
|
||||
.trim();
|
||||
}
|
||||
if (trimmed.startsWith("```")) {
|
||||
return trimmed.replace(/^```/, "").replace(/```$/, "").trim();
|
||||
if (trimmed.startsWith('```')) {
|
||||
return trimmed.replace(/^```/, '').replace(/```$/, '').trim();
|
||||
}
|
||||
return text;
|
||||
}
|
||||
@ -436,7 +457,7 @@ function resolveVerifyTls(value, fallback) {
|
||||
if (value === undefined || value === null) {
|
||||
return Boolean(fallback);
|
||||
}
|
||||
return String(value).toLowerCase() !== "false" && String(value) !== "0";
|
||||
return String(value).toLowerCase() !== 'false' && String(value) !== '0';
|
||||
}
|
||||
|
||||
function ensureEnvLoaded() {
|
||||
@ -444,29 +465,32 @@ function ensureEnvLoaded() {
|
||||
return;
|
||||
}
|
||||
|
||||
const envPath = path.resolve(__dirname, "../../../../.env");
|
||||
const envPath = path.resolve(__dirname, '../../../../.env');
|
||||
if (!fs.existsSync(envPath)) {
|
||||
return;
|
||||
}
|
||||
|
||||
let content;
|
||||
try {
|
||||
content = fs.readFileSync(envPath, "utf8");
|
||||
content = fs.readFileSync(envPath, 'utf8');
|
||||
} catch (err) {
|
||||
throw new Error(`Failed to read executor .env: ${err.message}`);
|
||||
}
|
||||
|
||||
for (const line of content.split(/\r?\n/)) {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed || trimmed.startsWith("#") || !trimmed.includes("=")) {
|
||||
if (!trimmed || trimmed.startsWith('#') || !trimmed.includes('=')) {
|
||||
continue;
|
||||
}
|
||||
const [rawKey, ...rest] = trimmed.split("=");
|
||||
const [rawKey, ...rest] = trimmed.split('=');
|
||||
const key = rawKey.trim();
|
||||
if (!key) {
|
||||
continue;
|
||||
}
|
||||
const value = rest.join("=").trim().replace(/^['"]|['"]$/g, "");
|
||||
const value = rest
|
||||
.join('=')
|
||||
.trim()
|
||||
.replace(/^['"]|['"]$/g, '');
|
||||
if (!process.env[key]) {
|
||||
process.env[key] = value;
|
||||
}
|
||||
|
||||
@ -10,59 +10,69 @@ const GoogleStrategy = require('passport-google-oauth2').Strategy;
|
||||
const MicrosoftStrategy = require('passport-microsoft').Strategy;
|
||||
const UsersDBApi = require('../db/api/users');
|
||||
|
||||
passport.use(
|
||||
new JWTstrategy(
|
||||
{
|
||||
passReqToCallback: true,
|
||||
secretOrKey: config.secret_key,
|
||||
jwtFromRequest: ExtractJWT.fromAuthHeaderAsBearerToken(),
|
||||
},
|
||||
async (req, token, done) => {
|
||||
try {
|
||||
// Use lightweight auth query - only loads essential fields + permissions
|
||||
const user = await UsersDBApi.findByForAuth({ email: token.user.email });
|
||||
|
||||
passport.use(new JWTstrategy({
|
||||
passReqToCallback: true,
|
||||
secretOrKey: config.secret_key,
|
||||
jwtFromRequest: ExtractJWT.fromAuthHeaderAsBearerToken()
|
||||
}, async (req, token, done) => {
|
||||
try {
|
||||
const user = await UsersDBApi.findBy( {email: token.user.email});
|
||||
if (user && user.disabled) {
|
||||
return done(new Error(`User '${user.email}' is disabled`));
|
||||
}
|
||||
|
||||
if (user && user.disabled) {
|
||||
return done (new Error(`User '${user.email}' is disabled`));
|
||||
}
|
||||
req.currentUser = user;
|
||||
|
||||
req.currentUser = user;
|
||||
return done(null, user);
|
||||
} catch (error) {
|
||||
done(error);
|
||||
}
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
return done(null, user);
|
||||
} catch (error) {
|
||||
done(error);
|
||||
}
|
||||
}));
|
||||
passport.use(
|
||||
new GoogleStrategy(
|
||||
{
|
||||
clientID: config.google.clientId,
|
||||
clientSecret: config.google.clientSecret,
|
||||
callbackURL: config.apiUrl + '/auth/signin/google/callback',
|
||||
passReqToCallback: true,
|
||||
},
|
||||
function (request, accessToken, refreshToken, profile, done) {
|
||||
socialStrategy(profile.email, profile, providers.GOOGLE, done);
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
passport.use(new GoogleStrategy({
|
||||
clientID: config.google.clientId,
|
||||
clientSecret: config.google.clientSecret,
|
||||
callbackURL: config.apiUrl + '/auth/signin/google/callback',
|
||||
passReqToCallback: true
|
||||
},
|
||||
function (request, accessToken, refreshToken, profile, done) {
|
||||
socialStrategy(profile.email, profile, providers.GOOGLE, done);
|
||||
}
|
||||
));
|
||||
|
||||
|
||||
passport.use(new MicrosoftStrategy({
|
||||
clientID: config.microsoft.clientId,
|
||||
clientSecret: config.microsoft.clientSecret,
|
||||
callbackURL: config.apiUrl + '/auth/signin/microsoft/callback',
|
||||
passReqToCallback: true
|
||||
},
|
||||
function (request, accessToken, refreshToken, profile, done) {
|
||||
const email = profile._json.mail || profile._json.userPrincipalName;
|
||||
socialStrategy(email, profile, providers.MICROSOFT, done);
|
||||
}
|
||||
));
|
||||
passport.use(
|
||||
new MicrosoftStrategy(
|
||||
{
|
||||
clientID: config.microsoft.clientId,
|
||||
clientSecret: config.microsoft.clientSecret,
|
||||
callbackURL: config.apiUrl + '/auth/signin/microsoft/callback',
|
||||
passReqToCallback: true,
|
||||
},
|
||||
function (request, accessToken, refreshToken, profile, done) {
|
||||
const email = profile._json.mail || profile._json.userPrincipalName;
|
||||
socialStrategy(email, profile, providers.MICROSOFT, done);
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
function socialStrategy(email, profile, provider, done) {
|
||||
db.users.findOrCreate({where: {email, provider}}).then(([user]) => {
|
||||
db.users.findOrCreate({ where: { email, provider } }).then(([user]) => {
|
||||
const body = {
|
||||
id: user.id,
|
||||
email: user.email,
|
||||
name: profile.displayName,
|
||||
};
|
||||
const token = helpers.jwtSign({user: body});
|
||||
return done(null, {token});
|
||||
const token = helpers.jwtSign({ user: body });
|
||||
return done(null, { token });
|
||||
});
|
||||
}
|
||||
|
||||
@ -1,42 +1,15 @@
|
||||
|
||||
|
||||
const os = require('os');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const envFilePath = path.resolve(__dirname, '../.env');
|
||||
require('dotenv').config({ path: path.resolve(__dirname, '../.env') });
|
||||
|
||||
if (fs.existsSync(envFilePath)) {
|
||||
const envContent = fs.readFileSync(envFilePath, 'utf8');
|
||||
envContent.split('\n').forEach((line) => {
|
||||
const trimmedLine = line.trim();
|
||||
|
||||
if (!trimmedLine || trimmedLine.startsWith('#')) {
|
||||
return;
|
||||
}
|
||||
|
||||
const delimiterIndex = trimmedLine.indexOf('=');
|
||||
|
||||
if (delimiterIndex === -1) {
|
||||
return;
|
||||
}
|
||||
|
||||
const key = trimmedLine.slice(0, delimiterIndex).trim();
|
||||
const rawValue = trimmedLine.slice(delimiterIndex + 1).trim();
|
||||
|
||||
if (!key || Object.prototype.hasOwnProperty.call(process.env, key)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const unquotedValue = rawValue.replace(/^['"]|['"]$/g, '');
|
||||
process.env[key] = unquotedValue;
|
||||
});
|
||||
}
|
||||
const { validateEnv } = require('./utils/env-validation');
|
||||
validateEnv();
|
||||
|
||||
const config = {
|
||||
gcloud: {
|
||||
bucket: "fldemo-files",
|
||||
hash: "afeefb9d49f5b7977577876b99532ac7"
|
||||
bucket: 'fldemo-files',
|
||||
hash: 'afeefb9d49f5b7977577876b99532ac7',
|
||||
},
|
||||
s3: {
|
||||
bucket: process.env.AWS_S3_BUCKET || '',
|
||||
@ -44,37 +17,53 @@ const config = {
|
||||
accessKeyId: process.env.AWS_ACCESS_KEY_ID || '',
|
||||
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY || '',
|
||||
prefix: process.env.AWS_S3_PREFIX || 'afeefb9d49f5b7977577876b99532ac7',
|
||||
// Timeout configuration (in milliseconds)
|
||||
connectionTimeout:
|
||||
parseInt(process.env.AWS_S3_CONNECTION_TIMEOUT, 10) || 5000,
|
||||
requestTimeout: parseInt(process.env.AWS_S3_REQUEST_TIMEOUT, 10) || 30000,
|
||||
// Retry configuration
|
||||
maxAttempts: parseInt(process.env.AWS_S3_MAX_ATTEMPTS, 10) || 3,
|
||||
// Connection pool configuration
|
||||
maxSockets: parseInt(process.env.AWS_S3_MAX_SOCKETS, 10) || 50,
|
||||
keepAlive: process.env.AWS_S3_KEEP_ALIVE !== 'false',
|
||||
// Presigned URL expiry (in seconds)
|
||||
presignExpirySeconds:
|
||||
parseInt(process.env.AWS_S3_PRESIGN_EXPIRY, 10) || 3600,
|
||||
},
|
||||
bcrypt: {
|
||||
saltRounds: 12
|
||||
saltRounds: 12,
|
||||
},
|
||||
admin_pass: "88dbeaf8",
|
||||
user_pass: "c3baadeda5c6",
|
||||
admin_email: "admin@flatlogic.com",
|
||||
admin_pass: process.env.ADMIN_PASS || '88dbeaf8',
|
||||
user_pass: process.env.USER_PASS || 'c3baadeda5c6',
|
||||
admin_email: process.env.ADMIN_EMAIL || 'admin@flatlogic.com',
|
||||
providers: {
|
||||
LOCAL: 'local',
|
||||
GOOGLE: 'google',
|
||||
MICROSOFT: 'microsoft'
|
||||
MICROSOFT: 'microsoft',
|
||||
},
|
||||
secret_key: process.env.SECRET_KEY || '88dbeaf8-e906-405e-9e41-c3baadeda5c6',
|
||||
remote: '',
|
||||
port: process.env.NODE_ENV === "production" ? "" : "8080",
|
||||
hostUI: process.env.NODE_ENV === "production" ? "" : "http://localhost",
|
||||
portUI: process.env.NODE_ENV === "production" ? "" : "3000",
|
||||
port: process.env.NODE_ENV === 'production' ? '' : '8080',
|
||||
hostUI: process.env.NODE_ENV === 'production' ? '' : 'http://localhost',
|
||||
portUI: process.env.NODE_ENV === 'production' ? '' : '3000',
|
||||
|
||||
portUIProd: process.env.NODE_ENV === "production" ? "" : ":3000",
|
||||
portUIProd: process.env.NODE_ENV === 'production' ? '' : ':3000',
|
||||
|
||||
swaggerUI: process.env.NODE_ENV === "production" ? "" : "http://localhost",
|
||||
swaggerPort: process.env.NODE_ENV === "production" ? "" : ":8080",
|
||||
swaggerUI: process.env.NODE_ENV === 'production' ? '' : 'http://localhost',
|
||||
swaggerPort: process.env.NODE_ENV === 'production' ? '' : ':8080',
|
||||
google: {
|
||||
clientId: process.env.GOOGLE_CLIENT_ID || '',
|
||||
clientSecret: process.env.GOOGLE_CLIENT_SECRET || '',
|
||||
},
|
||||
microsoft: {
|
||||
clientId: process.env.MS_CLIENT_ID || '',
|
||||
clientSecret: process.env.MS_CLIENT_SECRET || '',
|
||||
clientId: process.env.MS_CLIENT_ID || '',
|
||||
clientSecret: process.env.MS_CLIENT_SECRET || '',
|
||||
},
|
||||
uploadDir: os.tmpdir(),
|
||||
// Local cache for S3 proxy downloads (improves performance for repeated requests)
|
||||
s3CacheDir: process.env.S3_CACHE_DIR || path.join(os.tmpdir(), 's3-cache'),
|
||||
s3CacheEnabled: process.env.S3_CACHE_ENABLED !== 'false', // Enabled by default
|
||||
s3CacheMaxAge: parseInt(process.env.S3_CACHE_MAX_AGE, 10) || 86400, // 24 hours
|
||||
email: {
|
||||
from: 'Tour Builder Platform <app@flatlogic.app>',
|
||||
host: 'email-smtp.us-east-1.amazonaws.com',
|
||||
@ -85,29 +74,26 @@ const config = {
|
||||
},
|
||||
tls: {
|
||||
rejectUnauthorized: process.env.EMAIL_TLS_REJECT_UNAUTHORIZED !== 'false',
|
||||
}
|
||||
},
|
||||
},
|
||||
roles: {
|
||||
|
||||
admin: 'Administrator',
|
||||
|
||||
|
||||
|
||||
user: 'Analytics Viewer',
|
||||
|
||||
user: 'Analytics Viewer',
|
||||
},
|
||||
|
||||
project_uuid: '88dbeaf8-e906-405e-9e41-c3baadeda5c6',
|
||||
flHost: process.env.NODE_ENV === 'production' || process.env.NODE_ENV === 'dev_stage' ? 'https://flatlogic.com/projects' : 'http://localhost:3000/projects',
|
||||
|
||||
flHost:
|
||||
process.env.NODE_ENV === 'production' ||
|
||||
process.env.NODE_ENV === 'dev_stage'
|
||||
? 'https://flatlogic.com/projects'
|
||||
: 'http://localhost:3000/projects',
|
||||
|
||||
gpt_key: process.env.GPT_KEY || '',
|
||||
};
|
||||
|
||||
config.pexelsKey = process.env.PEXELS_KEY || '';
|
||||
|
||||
config.pexelsQuery = 'Architect drafting blueprint';
|
||||
config.host = process.env.NODE_ENV === "production" ? config.remote : "http://localhost";
|
||||
config.host =
|
||||
process.env.NODE_ENV === 'production' ? config.remote : 'http://localhost';
|
||||
config.apiUrl = `${config.host}${config.port ? `:${config.port}` : ``}/api`;
|
||||
config.swaggerUrl = `${config.swaggerUI}${config.swaggerPort}`;
|
||||
config.uiUrl = `${config.hostUI}${config.portUI ? `:${config.portUI}` : ``}/#`;
|
||||
|
||||
@ -1,514 +1,88 @@
|
||||
|
||||
const GenericDBApi = require('./base.api');
|
||||
const db = require('../models');
|
||||
const Utils = require('../utils');
|
||||
|
||||
class Access_logsDBApi extends GenericDBApi {
|
||||
static get MODEL() {
|
||||
return db.access_logs;
|
||||
}
|
||||
|
||||
static get TABLE_NAME() {
|
||||
return 'access_logs';
|
||||
}
|
||||
|
||||
const Sequelize = db.Sequelize;
|
||||
const Op = Sequelize.Op;
|
||||
static get SEARCHABLE_FIELDS() {
|
||||
return ['path', 'ip_address', 'user_agent'];
|
||||
}
|
||||
|
||||
module.exports = class Access_logsDBApi {
|
||||
|
||||
static get RANGE_FIELDS() {
|
||||
return ['accessed_at'];
|
||||
}
|
||||
|
||||
|
||||
static async create(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
static get ENUM_FIELDS() {
|
||||
return ['environment'];
|
||||
}
|
||||
|
||||
const access_logs = await db.access_logs.create(
|
||||
{
|
||||
id: data.id || undefined,
|
||||
|
||||
environment: data.environment
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
path: data.path
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
ip_address: data.ip_address
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
user_agent: data.user_agent
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
accessed_at: data.accessed_at
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
importHash: data.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
static get CSV_FIELDS() {
|
||||
return [
|
||||
'id',
|
||||
'environment',
|
||||
'path',
|
||||
'ip_address',
|
||||
'user_agent',
|
||||
'accessed_at',
|
||||
'createdAt',
|
||||
];
|
||||
}
|
||||
|
||||
|
||||
await access_logs.setProject( data.project || null, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await access_logs.setUser( data.user || null, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
static get AUTOCOMPLETE_FIELD() {
|
||||
return 'path';
|
||||
}
|
||||
|
||||
|
||||
static get ASSOCIATIONS() {
|
||||
return [
|
||||
{ field: 'project', setter: 'setProject', isArray: false },
|
||||
{ field: 'user', setter: 'setUser', isArray: false },
|
||||
];
|
||||
}
|
||||
|
||||
|
||||
static get FIND_BY_INCLUDES() {
|
||||
return [{ association: 'project' }, { association: 'user' }];
|
||||
}
|
||||
|
||||
return access_logs;
|
||||
}
|
||||
|
||||
|
||||
static async bulkImport(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
// Prepare data - wrapping individual data transformations in a map() method
|
||||
const access_logsData = data.map((item, index) => ({
|
||||
id: item.id || undefined,
|
||||
|
||||
environment: item.environment
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
path: item.path
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
ip_address: item.ip_address
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
user_agent: item.user_agent
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
accessed_at: item.accessed_at
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
importHash: item.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
createdAt: new Date(Date.now() + index * 1000),
|
||||
}));
|
||||
|
||||
// Bulk create items
|
||||
const access_logs = await db.access_logs.bulkCreate(access_logsData, { transaction });
|
||||
|
||||
// For each item created, replace relation files
|
||||
|
||||
|
||||
return access_logs;
|
||||
}
|
||||
|
||||
static async update(id, data, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
|
||||
const access_logs = await db.access_logs.findByPk(id, {transaction});
|
||||
|
||||
|
||||
|
||||
|
||||
const updatePayload = {};
|
||||
|
||||
if (data.environment !== undefined) updatePayload.environment = data.environment;
|
||||
|
||||
|
||||
if (data.path !== undefined) updatePayload.path = data.path;
|
||||
|
||||
|
||||
if (data.ip_address !== undefined) updatePayload.ip_address = data.ip_address;
|
||||
|
||||
|
||||
if (data.user_agent !== undefined) updatePayload.user_agent = data.user_agent;
|
||||
|
||||
|
||||
if (data.accessed_at !== undefined) updatePayload.accessed_at = data.accessed_at;
|
||||
|
||||
|
||||
updatePayload.updatedById = currentUser.id;
|
||||
|
||||
await access_logs.update(updatePayload, {transaction});
|
||||
|
||||
|
||||
|
||||
if (data.project !== undefined) {
|
||||
await access_logs.setProject(
|
||||
|
||||
data.project,
|
||||
|
||||
{ transaction }
|
||||
);
|
||||
}
|
||||
|
||||
if (data.user !== undefined) {
|
||||
await access_logs.setUser(
|
||||
|
||||
data.user,
|
||||
|
||||
{ transaction }
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
return access_logs;
|
||||
}
|
||||
|
||||
static async deleteByIds(ids, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const access_logs = await db.access_logs.findAll({
|
||||
where: {
|
||||
id: {
|
||||
[Op.in]: ids,
|
||||
},
|
||||
},
|
||||
transaction,
|
||||
});
|
||||
|
||||
await db.sequelize.transaction(async (transaction) => {
|
||||
for (const record of access_logs) {
|
||||
await record.update(
|
||||
{deletedBy: currentUser.id},
|
||||
{transaction}
|
||||
);
|
||||
}
|
||||
for (const record of access_logs) {
|
||||
await record.destroy({transaction});
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
return access_logs;
|
||||
}
|
||||
|
||||
static async remove(id, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const access_logs = await db.access_logs.findByPk(id, options);
|
||||
|
||||
await access_logs.update({
|
||||
deletedBy: currentUser.id
|
||||
}, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await access_logs.destroy({
|
||||
transaction
|
||||
});
|
||||
|
||||
return access_logs;
|
||||
}
|
||||
|
||||
static async findBy(where, options) {
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const access_logs = await db.access_logs.findOne({
|
||||
where,
|
||||
transaction,
|
||||
});
|
||||
|
||||
if (!access_logs) {
|
||||
return access_logs;
|
||||
}
|
||||
|
||||
const output = access_logs.get({plain: true});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
output.project = await access_logs.getProject({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
output.user = await access_logs.getUser({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
static async findAll(
|
||||
filter,
|
||||
options
|
||||
) {
|
||||
const limit = filter.limit || 0;
|
||||
let offset = 0;
|
||||
let where = {};
|
||||
const currentPage = +filter.page;
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
offset = currentPage * limit;
|
||||
|
||||
let include = [
|
||||
static get FIND_ALL_INCLUDES() {
|
||||
return [
|
||||
{ model: db.projects, as: 'project', required: false },
|
||||
{ model: db.users, as: 'user', required: false },
|
||||
];
|
||||
}
|
||||
|
||||
static get RELATION_FILTERS() {
|
||||
return [
|
||||
{
|
||||
filterKey: 'project',
|
||||
model: db.projects,
|
||||
as: 'project',
|
||||
|
||||
where: filter.project ? {
|
||||
[Op.or]: [
|
||||
{ id: { [Op.in]: filter.project.split('|').map(term => Utils.uuid(term)) } },
|
||||
{
|
||||
name: {
|
||||
[Op.or]: filter.project.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
|
||||
}
|
||||
},
|
||||
]
|
||||
} : {},
|
||||
|
||||
searchField: 'name',
|
||||
},
|
||||
|
||||
{
|
||||
filterKey: 'user',
|
||||
model: db.users,
|
||||
as: 'user',
|
||||
|
||||
where: filter.user ? {
|
||||
[Op.or]: [
|
||||
{ id: { [Op.in]: filter.user.split('|').map(term => Utils.uuid(term)) } },
|
||||
{
|
||||
firstName: {
|
||||
[Op.or]: filter.user.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
|
||||
}
|
||||
},
|
||||
]
|
||||
} : {},
|
||||
|
||||
searchField: 'firstName',
|
||||
},
|
||||
|
||||
|
||||
|
||||
];
|
||||
}
|
||||
|
||||
if (filter) {
|
||||
if (filter.id) {
|
||||
where = {
|
||||
...where,
|
||||
['id']: Utils.uuid(filter.id),
|
||||
};
|
||||
}
|
||||
static getFieldMapping(data) {
|
||||
return {
|
||||
id: data.id || undefined,
|
||||
environment: data.environment || null,
|
||||
path: data.path || null,
|
||||
ip_address: data.ip_address || null,
|
||||
user_agent: data.user_agent || null,
|
||||
accessed_at: data.accessed_at || null,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (filter.path) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'access_logs',
|
||||
'path',
|
||||
filter.path,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.ip_address) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'access_logs',
|
||||
'ip_address',
|
||||
filter.ip_address,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.user_agent) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'access_logs',
|
||||
'user_agent',
|
||||
filter.user_agent,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (filter.accessed_atRange) {
|
||||
const [start, end] = filter.accessed_atRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
accessed_at: {
|
||||
...where.accessed_at,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
accessed_at: {
|
||||
...where.accessed_at,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (filter.active !== undefined) {
|
||||
where = {
|
||||
...where,
|
||||
active: filter.active === true || filter.active === 'true'
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
if (filter.environment) {
|
||||
where = {
|
||||
...where,
|
||||
environment: filter.environment,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (filter.createdAtRange) {
|
||||
const [start, end] = filter.createdAtRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
const queryOptions = {
|
||||
where,
|
||||
include,
|
||||
distinct: true,
|
||||
order: filter.field && filter.sort
|
||||
? [[filter.field, filter.sort]]
|
||||
: [['createdAt', 'desc']],
|
||||
transaction: options?.transaction,
|
||||
logging: console.log
|
||||
};
|
||||
|
||||
if (!options?.countOnly) {
|
||||
queryOptions.limit = limit ? Number(limit) : undefined;
|
||||
queryOptions.offset = offset ? Number(offset) : undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
const { rows, count } = await db.access_logs.findAndCountAll(queryOptions);
|
||||
|
||||
return {
|
||||
rows: options?.countOnly ? [] : rows,
|
||||
count: count
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error executing query:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async findAllAutocomplete(query, limit, offset, ) {
|
||||
let where = {};
|
||||
|
||||
|
||||
|
||||
if (query) {
|
||||
where = {
|
||||
[Op.or]: [
|
||||
{ ['id']: Utils.uuid(query) },
|
||||
Utils.ilike(
|
||||
'access_logs',
|
||||
'path',
|
||||
query,
|
||||
),
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
const records = await db.access_logs.findAll({
|
||||
attributes: [ 'id', 'path' ],
|
||||
where,
|
||||
limit: limit ? Number(limit) : undefined,
|
||||
offset: offset ? Number(offset) : undefined,
|
||||
orderBy: [['path', 'ASC']],
|
||||
});
|
||||
|
||||
return records.map((record) => ({
|
||||
id: record.id,
|
||||
label: record.path,
|
||||
}));
|
||||
}
|
||||
|
||||
|
||||
};
|
||||
module.exports = Access_logsDBApi;
|
||||
|
||||
@ -1,503 +1,84 @@
|
||||
|
||||
const GenericDBApi = require('./base.api');
|
||||
const db = require('../models');
|
||||
const Utils = require('../utils');
|
||||
|
||||
class Asset_variantsDBApi extends GenericDBApi {
|
||||
static get MODEL() {
|
||||
return db.asset_variants;
|
||||
}
|
||||
|
||||
static get TABLE_NAME() {
|
||||
return 'asset_variants';
|
||||
}
|
||||
|
||||
const Sequelize = db.Sequelize;
|
||||
const Op = Sequelize.Op;
|
||||
static get SEARCHABLE_FIELDS() {
|
||||
return ['cdn_url'];
|
||||
}
|
||||
|
||||
module.exports = class Asset_variantsDBApi {
|
||||
|
||||
static get RANGE_FIELDS() {
|
||||
return ['width_px', 'height_px', 'size_mb'];
|
||||
}
|
||||
|
||||
|
||||
static async create(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
static get ENUM_FIELDS() {
|
||||
return ['variant_type'];
|
||||
}
|
||||
|
||||
const asset_variants = await db.asset_variants.create(
|
||||
{
|
||||
id: data.id || undefined,
|
||||
|
||||
variant_type: data.variant_type
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
cdn_url: data.cdn_url
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
width_px: data.width_px
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
height_px: data.height_px
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
size_mb: data.size_mb
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
importHash: data.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
static get CSV_FIELDS() {
|
||||
return [
|
||||
'id',
|
||||
'variant_type',
|
||||
'cdn_url',
|
||||
'width_px',
|
||||
'height_px',
|
||||
'size_mb',
|
||||
'createdAt',
|
||||
];
|
||||
}
|
||||
|
||||
|
||||
await asset_variants.setAsset( data.asset || null, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
static get AUTOCOMPLETE_FIELD() {
|
||||
return 'variant_type';
|
||||
}
|
||||
|
||||
|
||||
static get ASSOCIATIONS() {
|
||||
return [{ field: 'asset', setter: 'setAsset', isArray: false }];
|
||||
}
|
||||
|
||||
|
||||
|
||||
return asset_variants;
|
||||
}
|
||||
|
||||
|
||||
static async bulkImport(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
// Prepare data - wrapping individual data transformations in a map() method
|
||||
const asset_variantsData = data.map((item, index) => ({
|
||||
id: item.id || undefined,
|
||||
|
||||
variant_type: item.variant_type
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
cdn_url: item.cdn_url
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
width_px: item.width_px
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
height_px: item.height_px
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
size_mb: item.size_mb
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
importHash: item.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
createdAt: new Date(Date.now() + index * 1000),
|
||||
}));
|
||||
|
||||
// Bulk create items
|
||||
const asset_variants = await db.asset_variants.bulkCreate(asset_variantsData, { transaction });
|
||||
|
||||
// For each item created, replace relation files
|
||||
|
||||
|
||||
return asset_variants;
|
||||
}
|
||||
|
||||
static async update(id, data, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
|
||||
const asset_variants = await db.asset_variants.findByPk(id, {transaction});
|
||||
|
||||
|
||||
|
||||
|
||||
const updatePayload = {};
|
||||
|
||||
if (data.variant_type !== undefined) updatePayload.variant_type = data.variant_type;
|
||||
|
||||
|
||||
if (data.cdn_url !== undefined) updatePayload.cdn_url = data.cdn_url;
|
||||
|
||||
|
||||
if (data.width_px !== undefined) updatePayload.width_px = data.width_px;
|
||||
|
||||
|
||||
if (data.height_px !== undefined) updatePayload.height_px = data.height_px;
|
||||
|
||||
|
||||
if (data.size_mb !== undefined) updatePayload.size_mb = data.size_mb;
|
||||
|
||||
|
||||
updatePayload.updatedById = currentUser.id;
|
||||
|
||||
await asset_variants.update(updatePayload, {transaction});
|
||||
|
||||
|
||||
|
||||
if (data.asset !== undefined) {
|
||||
await asset_variants.setAsset(
|
||||
|
||||
data.asset,
|
||||
|
||||
{ transaction }
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
return asset_variants;
|
||||
}
|
||||
|
||||
static async deleteByIds(ids, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const asset_variants = await db.asset_variants.findAll({
|
||||
where: {
|
||||
id: {
|
||||
[Op.in]: ids,
|
||||
},
|
||||
},
|
||||
transaction,
|
||||
});
|
||||
|
||||
await db.sequelize.transaction(async (transaction) => {
|
||||
for (const record of asset_variants) {
|
||||
await record.update(
|
||||
{deletedBy: currentUser.id},
|
||||
{transaction}
|
||||
);
|
||||
}
|
||||
for (const record of asset_variants) {
|
||||
await record.destroy({transaction});
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
return asset_variants;
|
||||
}
|
||||
|
||||
static async remove(id, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const asset_variants = await db.asset_variants.findByPk(id, options);
|
||||
|
||||
await asset_variants.update({
|
||||
deletedBy: currentUser.id
|
||||
}, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await asset_variants.destroy({
|
||||
transaction
|
||||
});
|
||||
|
||||
return asset_variants;
|
||||
}
|
||||
|
||||
static async findBy(where, options) {
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const asset_variants = await db.asset_variants.findOne({
|
||||
where,
|
||||
transaction,
|
||||
});
|
||||
|
||||
if (!asset_variants) {
|
||||
return asset_variants;
|
||||
}
|
||||
|
||||
const output = asset_variants.get({plain: true});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
output.asset = await asset_variants.getAsset({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
static async findAll(
|
||||
filter,
|
||||
options
|
||||
) {
|
||||
const limit = filter.limit || 0;
|
||||
let offset = 0;
|
||||
let where = {};
|
||||
const currentPage = +filter.page;
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
offset = currentPage * limit;
|
||||
|
||||
let include = [
|
||||
static get FIND_BY_INCLUDES() {
|
||||
return [{ association: 'asset' }];
|
||||
}
|
||||
|
||||
static get FIND_ALL_INCLUDES() {
|
||||
return [
|
||||
{
|
||||
model: db.assets,
|
||||
as: 'asset',
|
||||
|
||||
where: filter.asset ? {
|
||||
[Op.or]: [
|
||||
{ id: { [Op.in]: filter.asset.split('|').map(term => Utils.uuid(term)) } },
|
||||
{
|
||||
name: {
|
||||
[Op.or]: filter.asset.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
|
||||
}
|
||||
},
|
||||
]
|
||||
} : {},
|
||||
|
||||
required: false,
|
||||
},
|
||||
|
||||
|
||||
|
||||
];
|
||||
}
|
||||
|
||||
if (filter) {
|
||||
if (filter.id) {
|
||||
where = {
|
||||
...where,
|
||||
['id']: Utils.uuid(filter.id),
|
||||
};
|
||||
}
|
||||
static get RELATION_FILTERS() {
|
||||
return [
|
||||
{
|
||||
filterKey: 'asset',
|
||||
model: db.assets,
|
||||
as: 'asset',
|
||||
searchField: 'name',
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
|
||||
if (filter.cdn_url) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'asset_variants',
|
||||
'cdn_url',
|
||||
filter.cdn_url,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
static getFieldMapping(data) {
|
||||
return {
|
||||
id: data.id || undefined,
|
||||
assetId: data.assetId || null,
|
||||
variant_type: data.variant_type || null,
|
||||
cdn_url: data.cdn_url || null,
|
||||
storage_key: data.storage_key || null,
|
||||
width_px: data.width_px || null,
|
||||
height_px: data.height_px || null,
|
||||
size_mb: data.size_mb || null,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (filter.width_pxRange) {
|
||||
const [start, end] = filter.width_pxRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
width_px: {
|
||||
...where.width_px,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
width_px: {
|
||||
...where.width_px,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.height_pxRange) {
|
||||
const [start, end] = filter.height_pxRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
height_px: {
|
||||
...where.height_px,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
height_px: {
|
||||
...where.height_px,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.size_mbRange) {
|
||||
const [start, end] = filter.size_mbRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
size_mb: {
|
||||
...where.size_mb,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
size_mb: {
|
||||
...where.size_mb,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (filter.active !== undefined) {
|
||||
where = {
|
||||
...where,
|
||||
active: filter.active === true || filter.active === 'true'
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
if (filter.variant_type) {
|
||||
where = {
|
||||
...where,
|
||||
variant_type: filter.variant_type,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (filter.createdAtRange) {
|
||||
const [start, end] = filter.createdAtRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
const queryOptions = {
|
||||
where,
|
||||
include,
|
||||
distinct: true,
|
||||
order: filter.field && filter.sort
|
||||
? [[filter.field, filter.sort]]
|
||||
: [['createdAt', 'desc']],
|
||||
transaction: options?.transaction,
|
||||
logging: console.log
|
||||
};
|
||||
|
||||
if (!options?.countOnly) {
|
||||
queryOptions.limit = limit ? Number(limit) : undefined;
|
||||
queryOptions.offset = offset ? Number(offset) : undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
const { rows, count } = await db.asset_variants.findAndCountAll(queryOptions);
|
||||
|
||||
return {
|
||||
rows: options?.countOnly ? [] : rows,
|
||||
count: count
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error executing query:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async findAllAutocomplete(query, limit, offset, ) {
|
||||
let where = {};
|
||||
|
||||
|
||||
|
||||
if (query) {
|
||||
where = {
|
||||
[Op.or]: [
|
||||
{ ['id']: Utils.uuid(query) },
|
||||
Utils.ilike(
|
||||
'asset_variants',
|
||||
'variant_type',
|
||||
query,
|
||||
),
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
const records = await db.asset_variants.findAll({
|
||||
attributes: [ 'id', 'variant_type' ],
|
||||
where,
|
||||
limit: limit ? Number(limit) : undefined,
|
||||
offset: offset ? Number(offset) : undefined,
|
||||
orderBy: [['variant_type', 'ASC']],
|
||||
});
|
||||
|
||||
return records.map((record) => ({
|
||||
id: record.id,
|
||||
label: record.variant_type,
|
||||
}));
|
||||
}
|
||||
|
||||
|
||||
};
|
||||
module.exports = Asset_variantsDBApi;
|
||||
|
||||
@ -1,721 +1,92 @@
|
||||
|
||||
const GenericDBApi = require('./base.api');
|
||||
const db = require('../models');
|
||||
const Utils = require('../utils');
|
||||
|
||||
class AssetsDBApi extends GenericDBApi {
|
||||
static get MODEL() {
|
||||
return db.assets;
|
||||
}
|
||||
|
||||
static get TABLE_NAME() {
|
||||
return 'assets';
|
||||
}
|
||||
|
||||
const Sequelize = db.Sequelize;
|
||||
const Op = Sequelize.Op;
|
||||
static get SEARCHABLE_FIELDS() {
|
||||
return ['name', 'cdn_url', 'storage_key', 'mime_type', 'checksum'];
|
||||
}
|
||||
|
||||
module.exports = class AssetsDBApi {
|
||||
|
||||
static get RANGE_FIELDS() {
|
||||
return ['size_mb', 'width_px', 'height_px', 'duration_sec'];
|
||||
}
|
||||
|
||||
|
||||
static async create(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
static get ENUM_FIELDS() {
|
||||
return ['asset_type', 'type', 'is_public'];
|
||||
}
|
||||
|
||||
const assets = await db.assets.create(
|
||||
{
|
||||
id: data.id || undefined,
|
||||
|
||||
name: data.name
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
asset_type: data.asset_type
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
cdn_url: data.cdn_url
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
storage_key: data.storage_key
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
mime_type: data.mime_type
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
size_mb: data.size_mb
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
width_px: data.width_px
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
height_px: data.height_px
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
duration_sec: data.duration_sec
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
checksum: data.checksum
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
is_public: data.is_public
|
||||
||
|
||||
false
|
||||
|
||||
,
|
||||
|
||||
is_deleted: data.is_deleted
|
||||
||
|
||||
false
|
||||
|
||||
,
|
||||
|
||||
deleted_at_time: data.deleted_at_time
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
importHash: data.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
static get UUID_FIELDS() {
|
||||
return ['projectId'];
|
||||
}
|
||||
|
||||
|
||||
await assets.setProject( data.project || null, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
static get CSV_FIELDS() {
|
||||
return [
|
||||
'id',
|
||||
'name',
|
||||
'asset_type',
|
||||
'type',
|
||||
'cdn_url',
|
||||
'storage_key',
|
||||
'mime_type',
|
||||
'size_mb',
|
||||
'createdAt',
|
||||
];
|
||||
}
|
||||
|
||||
|
||||
static get AUTOCOMPLETE_FIELD() {
|
||||
return 'name';
|
||||
}
|
||||
|
||||
|
||||
static get ASSOCIATIONS() {
|
||||
return [{ field: 'project', setter: 'setProject', isArray: false }];
|
||||
}
|
||||
|
||||
return assets;
|
||||
}
|
||||
|
||||
static get FIND_BY_INCLUDES() {
|
||||
return [
|
||||
{ association: 'asset_variants_asset' },
|
||||
{ association: 'project' },
|
||||
];
|
||||
}
|
||||
|
||||
static async bulkImport(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
// Prepare data - wrapping individual data transformations in a map() method
|
||||
const assetsData = data.map((item, index) => ({
|
||||
id: item.id || undefined,
|
||||
|
||||
name: item.name
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
asset_type: item.asset_type
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
cdn_url: item.cdn_url
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
storage_key: item.storage_key
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
mime_type: item.mime_type
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
size_mb: item.size_mb
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
width_px: item.width_px
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
height_px: item.height_px
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
duration_sec: item.duration_sec
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
checksum: item.checksum
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
is_public: item.is_public
|
||||
||
|
||||
false
|
||||
|
||||
,
|
||||
|
||||
is_deleted: item.is_deleted
|
||||
||
|
||||
false
|
||||
|
||||
,
|
||||
|
||||
deleted_at_time: item.deleted_at_time
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
importHash: item.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
createdAt: new Date(Date.now() + index * 1000),
|
||||
}));
|
||||
|
||||
// Bulk create items
|
||||
const assets = await db.assets.bulkCreate(assetsData, { transaction });
|
||||
|
||||
// For each item created, replace relation files
|
||||
|
||||
|
||||
return assets;
|
||||
}
|
||||
|
||||
static async update(id, data, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
|
||||
const assets = await db.assets.findByPk(id, {transaction});
|
||||
|
||||
|
||||
|
||||
|
||||
const updatePayload = {};
|
||||
|
||||
if (data.name !== undefined) updatePayload.name = data.name;
|
||||
|
||||
|
||||
if (data.asset_type !== undefined) updatePayload.asset_type = data.asset_type;
|
||||
|
||||
|
||||
if (data.cdn_url !== undefined) updatePayload.cdn_url = data.cdn_url;
|
||||
|
||||
|
||||
if (data.storage_key !== undefined) updatePayload.storage_key = data.storage_key;
|
||||
|
||||
|
||||
if (data.mime_type !== undefined) updatePayload.mime_type = data.mime_type;
|
||||
|
||||
|
||||
if (data.size_mb !== undefined) updatePayload.size_mb = data.size_mb;
|
||||
|
||||
|
||||
if (data.width_px !== undefined) updatePayload.width_px = data.width_px;
|
||||
|
||||
|
||||
if (data.height_px !== undefined) updatePayload.height_px = data.height_px;
|
||||
|
||||
|
||||
if (data.duration_sec !== undefined) updatePayload.duration_sec = data.duration_sec;
|
||||
|
||||
|
||||
if (data.checksum !== undefined) updatePayload.checksum = data.checksum;
|
||||
|
||||
|
||||
if (data.is_public !== undefined) updatePayload.is_public = data.is_public;
|
||||
|
||||
|
||||
if (data.is_deleted !== undefined) updatePayload.is_deleted = data.is_deleted;
|
||||
|
||||
|
||||
if (data.deleted_at_time !== undefined) updatePayload.deleted_at_time = data.deleted_at_time;
|
||||
|
||||
|
||||
updatePayload.updatedById = currentUser.id;
|
||||
|
||||
await assets.update(updatePayload, {transaction});
|
||||
|
||||
|
||||
|
||||
if (data.project !== undefined) {
|
||||
await assets.setProject(
|
||||
|
||||
data.project,
|
||||
|
||||
{ transaction }
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
return assets;
|
||||
}
|
||||
|
||||
static async deleteByIds(ids, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const assets = await db.assets.findAll({
|
||||
where: {
|
||||
id: {
|
||||
[Op.in]: ids,
|
||||
},
|
||||
},
|
||||
transaction,
|
||||
});
|
||||
|
||||
await db.sequelize.transaction(async (transaction) => {
|
||||
for (const record of assets) {
|
||||
await record.update(
|
||||
{deletedBy: currentUser.id},
|
||||
{transaction}
|
||||
);
|
||||
}
|
||||
for (const record of assets) {
|
||||
await record.destroy({transaction});
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
return assets;
|
||||
}
|
||||
|
||||
static async remove(id, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const assets = await db.assets.findByPk(id, options);
|
||||
|
||||
await assets.update({
|
||||
deletedBy: currentUser.id
|
||||
}, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await assets.destroy({
|
||||
transaction
|
||||
});
|
||||
|
||||
return assets;
|
||||
}
|
||||
|
||||
static async findBy(where, options) {
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const assets = await db.assets.findOne({
|
||||
where,
|
||||
transaction,
|
||||
});
|
||||
|
||||
if (!assets) {
|
||||
return assets;
|
||||
}
|
||||
|
||||
const output = assets.get({plain: true});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
output.asset_variants_asset = await assets.getAsset_variants_asset({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
output.project = await assets.getProject({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
static async findAll(
|
||||
filter,
|
||||
options
|
||||
) {
|
||||
const limit = filter.limit || 0;
|
||||
let offset = 0;
|
||||
let where = {};
|
||||
const currentPage = +filter.page;
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
offset = currentPage * limit;
|
||||
|
||||
let include = [
|
||||
static get FIND_ALL_INCLUDES() {
|
||||
return [{ model: db.projects, as: 'project', required: false }];
|
||||
}
|
||||
|
||||
static get RELATION_FILTERS() {
|
||||
return [
|
||||
{
|
||||
filterKey: 'project',
|
||||
model: db.projects,
|
||||
as: 'project',
|
||||
|
||||
where: filter.project ? {
|
||||
[Op.or]: [
|
||||
{ id: { [Op.in]: filter.project.split('|').map(term => Utils.uuid(term)) } },
|
||||
{
|
||||
name: {
|
||||
[Op.or]: filter.project.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
|
||||
}
|
||||
},
|
||||
]
|
||||
} : {},
|
||||
|
||||
searchField: 'name',
|
||||
},
|
||||
|
||||
|
||||
|
||||
];
|
||||
}
|
||||
|
||||
if (filter) {
|
||||
if (filter.id) {
|
||||
where = {
|
||||
...where,
|
||||
['id']: Utils.uuid(filter.id),
|
||||
};
|
||||
}
|
||||
static getFieldMapping(data) {
|
||||
return {
|
||||
id: data.id || undefined,
|
||||
name: data.name || null,
|
||||
asset_type: data.asset_type || null,
|
||||
type: data.type || 'general',
|
||||
cdn_url: data.cdn_url || null,
|
||||
storage_key: data.storage_key || null,
|
||||
mime_type: data.mime_type || null,
|
||||
size_mb: data.size_mb || null,
|
||||
width_px: data.width_px || null,
|
||||
height_px: data.height_px || null,
|
||||
duration_sec: data.duration_sec || null,
|
||||
checksum: data.checksum || null,
|
||||
is_public: data.is_public || false,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (filter.name) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'assets',
|
||||
'name',
|
||||
filter.name,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.cdn_url) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'assets',
|
||||
'cdn_url',
|
||||
filter.cdn_url,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.storage_key) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'assets',
|
||||
'storage_key',
|
||||
filter.storage_key,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.mime_type) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'assets',
|
||||
'mime_type',
|
||||
filter.mime_type,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.checksum) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'assets',
|
||||
'checksum',
|
||||
filter.checksum,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (filter.size_mbRange) {
|
||||
const [start, end] = filter.size_mbRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
size_mb: {
|
||||
...where.size_mb,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
size_mb: {
|
||||
...where.size_mb,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.width_pxRange) {
|
||||
const [start, end] = filter.width_pxRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
width_px: {
|
||||
...where.width_px,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
width_px: {
|
||||
...where.width_px,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.height_pxRange) {
|
||||
const [start, end] = filter.height_pxRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
height_px: {
|
||||
...where.height_px,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
height_px: {
|
||||
...where.height_px,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.duration_secRange) {
|
||||
const [start, end] = filter.duration_secRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
duration_sec: {
|
||||
...where.duration_sec,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
duration_sec: {
|
||||
...where.duration_sec,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.deleted_at_timeRange) {
|
||||
const [start, end] = filter.deleted_at_timeRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
deleted_at_time: {
|
||||
...where.deleted_at_time,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
deleted_at_time: {
|
||||
...where.deleted_at_time,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (filter.active !== undefined) {
|
||||
where = {
|
||||
...where,
|
||||
active: filter.active === true || filter.active === 'true'
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
if (filter.asset_type) {
|
||||
where = {
|
||||
...where,
|
||||
asset_type: filter.asset_type,
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.is_public) {
|
||||
where = {
|
||||
...where,
|
||||
is_public: filter.is_public,
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.is_deleted) {
|
||||
where = {
|
||||
...where,
|
||||
is_deleted: filter.is_deleted,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (filter.createdAtRange) {
|
||||
const [start, end] = filter.createdAtRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
const queryOptions = {
|
||||
where,
|
||||
include,
|
||||
distinct: true,
|
||||
order: filter.field && filter.sort
|
||||
? [[filter.field, filter.sort]]
|
||||
: [['createdAt', 'desc']],
|
||||
transaction: options?.transaction,
|
||||
logging: console.log
|
||||
};
|
||||
|
||||
if (!options?.countOnly) {
|
||||
queryOptions.limit = limit ? Number(limit) : undefined;
|
||||
queryOptions.offset = offset ? Number(offset) : undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
const { rows, count } = await db.assets.findAndCountAll(queryOptions);
|
||||
|
||||
return {
|
||||
rows: options?.countOnly ? [] : rows,
|
||||
count: count
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error executing query:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async findAllAutocomplete(query, limit, offset, ) {
|
||||
let where = {};
|
||||
|
||||
|
||||
|
||||
if (query) {
|
||||
where = {
|
||||
[Op.or]: [
|
||||
{ ['id']: Utils.uuid(query) },
|
||||
Utils.ilike(
|
||||
'assets',
|
||||
'name',
|
||||
query,
|
||||
),
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
const records = await db.assets.findAll({
|
||||
attributes: [ 'id', 'name' ],
|
||||
where,
|
||||
limit: limit ? Number(limit) : undefined,
|
||||
offset: offset ? Number(offset) : undefined,
|
||||
orderBy: [['name', 'ASC']],
|
||||
});
|
||||
|
||||
return records.map((record) => ({
|
||||
id: record.id,
|
||||
label: record.name,
|
||||
}));
|
||||
}
|
||||
|
||||
|
||||
};
|
||||
module.exports = AssetsDBApi;
|
||||
|
||||
480
backend/src/db/api/base.api.js
Normal file
480
backend/src/db/api/base.api.js
Normal file
@ -0,0 +1,480 @@
|
||||
const db = require('../models');
|
||||
const Utils = require('../utils');
|
||||
const { parse } = require('json2csv');
|
||||
|
||||
const Sequelize = db.Sequelize;
|
||||
const Op = Sequelize.Op;
|
||||
|
||||
class GenericDBApi {
|
||||
static get MODEL() {
|
||||
throw new Error('MODEL must be defined in subclass');
|
||||
}
|
||||
|
||||
static get TABLE_NAME() {
|
||||
return this.MODEL.getTableName();
|
||||
}
|
||||
|
||||
static get SEARCHABLE_FIELDS() {
|
||||
return [];
|
||||
}
|
||||
|
||||
static get RANGE_FIELDS() {
|
||||
return [];
|
||||
}
|
||||
|
||||
static get ENUM_FIELDS() {
|
||||
return [];
|
||||
}
|
||||
|
||||
/**
|
||||
* UUID fields that require validation before querying.
|
||||
* These are typically foreign key fields like 'projectId'.
|
||||
* Invalid UUIDs will return empty results instead of causing DB errors.
|
||||
* Override in subclass to specify fields.
|
||||
* Example: return ['projectId', 'userId'];
|
||||
*/
|
||||
static get UUID_FIELDS() {
|
||||
return [];
|
||||
}
|
||||
|
||||
static get RELATION_FILTERS() {
|
||||
return [];
|
||||
}
|
||||
|
||||
static get CSV_FIELDS() {
|
||||
return ['id', 'createdAt'];
|
||||
}
|
||||
|
||||
static get AUTOCOMPLETE_FIELD() {
|
||||
return 'name';
|
||||
}
|
||||
|
||||
static get ASSOCIATIONS() {
|
||||
return [];
|
||||
}
|
||||
|
||||
static get FIND_BY_INCLUDES() {
|
||||
return [];
|
||||
}
|
||||
|
||||
static get FIND_ALL_INCLUDES() {
|
||||
return [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Fields that should be automatically JSON-stringified
|
||||
* Override in subclass to specify fields.
|
||||
* Example: return ['settings_json', 'metadata_json'];
|
||||
*/
|
||||
static get JSON_FIELDS() {
|
||||
return [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom field transformers for data mapping.
|
||||
* Override in subclass to add custom transformations.
|
||||
* Example:
|
||||
* return {
|
||||
* email: (value) => value?.toLowerCase().trim(),
|
||||
* slug: (value) => value?.toLowerCase().replace(/\s+/g, '-'),
|
||||
* };
|
||||
*/
|
||||
static get FIELD_TRANSFORMERS() {
|
||||
return {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Field mapping configuration for declarative field handling.
|
||||
* Override in subclass to specify how fields should be mapped.
|
||||
* Example:
|
||||
* return {
|
||||
* name: { default: null },
|
||||
* sort_order: { default: 0 },
|
||||
* is_active: { default: true },
|
||||
* };
|
||||
*/
|
||||
static get FIELD_DEFAULTS() {
|
||||
return {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform input data for database operations.
|
||||
* Template Method Pattern: Uses JSON_FIELDS, FIELD_TRANSFORMERS, and FIELD_DEFAULTS
|
||||
* to declaratively transform data, reducing boilerplate in subclasses.
|
||||
*
|
||||
* Override this method for complex custom transformations that can't be
|
||||
* expressed declaratively.
|
||||
*
|
||||
* @param {Object} data - Input data to transform
|
||||
* @returns {Object} - Transformed data ready for database
|
||||
*/
|
||||
static getFieldMapping(data) {
|
||||
if (!data) return data;
|
||||
const mapped = { ...data };
|
||||
|
||||
// Apply field defaults
|
||||
for (const [field, config] of Object.entries(this.FIELD_DEFAULTS)) {
|
||||
if (mapped[field] === undefined) {
|
||||
mapped[field] = config.default;
|
||||
} else if (mapped[field] === null && config.nullDefault !== undefined) {
|
||||
mapped[field] = config.nullDefault;
|
||||
}
|
||||
}
|
||||
|
||||
// Auto-stringify JSON fields
|
||||
for (const field of this.JSON_FIELDS) {
|
||||
if (mapped[field] !== undefined && mapped[field] !== null) {
|
||||
if (typeof mapped[field] !== 'string') {
|
||||
mapped[field] = JSON.stringify(mapped[field]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Apply custom transformers
|
||||
for (const [field, transformer] of Object.entries(
|
||||
this.FIELD_TRANSFORMERS,
|
||||
)) {
|
||||
if (mapped[field] !== undefined) {
|
||||
mapped[field] = transformer(mapped[field]);
|
||||
}
|
||||
}
|
||||
|
||||
return mapped;
|
||||
}
|
||||
|
||||
static async create(data, options = {}) {
|
||||
const currentUser = options.currentUser || { id: null };
|
||||
const transaction = options.transaction;
|
||||
|
||||
const mappedData = this.getFieldMapping(data);
|
||||
|
||||
const record = await this.MODEL.create(
|
||||
{
|
||||
...mappedData,
|
||||
importHash: data.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
for (const assoc of this.ASSOCIATIONS) {
|
||||
if (data[assoc.field] !== undefined) {
|
||||
await record[assoc.setter](
|
||||
data[assoc.field] || (assoc.isArray ? [] : null),
|
||||
{ transaction },
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return record;
|
||||
}
|
||||
|
||||
static async bulkImport(data, options = {}) {
|
||||
const currentUser = options.currentUser || { id: null };
|
||||
const transaction = options.transaction;
|
||||
|
||||
const recordsData = data.map((item, index) => ({
|
||||
...this.getFieldMapping(item),
|
||||
importHash: item.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
createdAt: new Date(Date.now() + index * 1000),
|
||||
}));
|
||||
|
||||
return this.MODEL.bulkCreate(recordsData, { transaction });
|
||||
}
|
||||
|
||||
static async update(id, data, options = {}) {
|
||||
const currentUser = options.currentUser || { id: null };
|
||||
const transaction = options.transaction;
|
||||
|
||||
const record = await this.MODEL.findByPk(id, { transaction });
|
||||
|
||||
if (!record) {
|
||||
throw { status: 404, message: `${this.TABLE_NAME} not found` };
|
||||
}
|
||||
|
||||
const updatePayload = { updatedById: currentUser.id };
|
||||
const mappedData = this.getFieldMapping(data);
|
||||
|
||||
for (const [key, value] of Object.entries(mappedData)) {
|
||||
if (value !== undefined) {
|
||||
updatePayload[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
await record.update(updatePayload, { transaction });
|
||||
|
||||
for (const assoc of this.ASSOCIATIONS) {
|
||||
if (data[assoc.field] !== undefined) {
|
||||
await record[assoc.setter](data[assoc.field], { transaction });
|
||||
}
|
||||
}
|
||||
|
||||
return record;
|
||||
}
|
||||
|
||||
/**
|
||||
* Partial update - only updates fields explicitly passed in data.
|
||||
* Unlike update(), this doesn't go through getFieldMapping which
|
||||
* converts missing fields to null.
|
||||
*
|
||||
* Use this when you need to update specific fields without affecting others.
|
||||
*
|
||||
* @param {string} id - Record ID
|
||||
* @param {Object} data - Fields to update (only these will be modified)
|
||||
* @param {Object} options - Options with currentUser and transaction
|
||||
*/
|
||||
static async partialUpdate(id, data, options = {}) {
|
||||
const currentUser = options.currentUser || { id: null };
|
||||
const transaction = options.transaction;
|
||||
|
||||
const record = await this.MODEL.findByPk(id, { transaction });
|
||||
|
||||
if (!record) {
|
||||
throw { status: 404, message: `${this.TABLE_NAME} not found` };
|
||||
}
|
||||
|
||||
const updatePayload = { updatedById: currentUser.id };
|
||||
|
||||
// Only include fields that are explicitly in the data object
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
if (value !== undefined) {
|
||||
updatePayload[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
await record.update(updatePayload, { transaction });
|
||||
|
||||
return record;
|
||||
}
|
||||
|
||||
static async deleteByIds(ids, options = {}) {
|
||||
const currentUser = options.currentUser || { id: null };
|
||||
const transaction = options.transaction;
|
||||
|
||||
const records = await this.MODEL.findAll({
|
||||
where: { id: { [Op.in]: ids } },
|
||||
transaction,
|
||||
});
|
||||
|
||||
for (const record of records) {
|
||||
await record.update({ deletedBy: currentUser.id }, { transaction });
|
||||
}
|
||||
for (const record of records) {
|
||||
await record.destroy({ transaction });
|
||||
}
|
||||
|
||||
return records;
|
||||
}
|
||||
|
||||
static async remove(id, options = {}) {
|
||||
const currentUser = options.currentUser || { id: null };
|
||||
const transaction = options.transaction;
|
||||
|
||||
const record = await this.MODEL.findByPk(id, { transaction });
|
||||
|
||||
if (!record) {
|
||||
throw { status: 404, message: `${this.TABLE_NAME} not found` };
|
||||
}
|
||||
|
||||
await record.update({ deletedBy: currentUser.id }, { transaction });
|
||||
await record.destroy({ transaction });
|
||||
|
||||
return record;
|
||||
}
|
||||
|
||||
static async findBy(where, options = {}) {
|
||||
const transaction = options.transaction;
|
||||
const include =
|
||||
options.include !== undefined ? options.include : this.FIND_BY_INCLUDES;
|
||||
|
||||
const record = await this.MODEL.findOne({
|
||||
where,
|
||||
transaction,
|
||||
include,
|
||||
});
|
||||
|
||||
if (!record) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return record.get({ plain: true });
|
||||
}
|
||||
|
||||
static async findAll(filter = {}, options = {}) {
|
||||
filter = filter || {};
|
||||
const limit = filter.limit || 0;
|
||||
const currentPage = +filter.page || 0;
|
||||
const offset = currentPage * limit;
|
||||
|
||||
let where = {};
|
||||
let include = [...this.FIND_ALL_INCLUDES];
|
||||
|
||||
if (filter.id) {
|
||||
if (!Utils.isValidUuid(filter.id)) {
|
||||
return { rows: [], count: 0 };
|
||||
}
|
||||
where.id = filter.id;
|
||||
}
|
||||
|
||||
for (const field of this.SEARCHABLE_FIELDS) {
|
||||
if (filter[field]) {
|
||||
where[Op.and] = Utils.ilike(this.TABLE_NAME, field, filter[field]);
|
||||
}
|
||||
}
|
||||
|
||||
for (const field of this.RANGE_FIELDS) {
|
||||
const rangeKey = `${field}Range`;
|
||||
if (filter[rangeKey]) {
|
||||
const [start, end] = filter[rangeKey];
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where[field] = { ...where[field], [Op.gte]: start };
|
||||
}
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where[field] = { ...where[field], [Op.lte]: end };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const field of this.ENUM_FIELDS) {
|
||||
if (filter[field] !== undefined) {
|
||||
where[field] = filter[field];
|
||||
}
|
||||
}
|
||||
|
||||
// Validate UUID fields - return empty results for invalid UUIDs
|
||||
for (const field of this.UUID_FIELDS) {
|
||||
if (filter[field] !== undefined) {
|
||||
if (!Utils.isValidUuid(filter[field])) {
|
||||
return { rows: [], count: 0 };
|
||||
}
|
||||
where[field] = filter[field];
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.active !== undefined) {
|
||||
where.active = filter.active === true || filter.active === 'true';
|
||||
}
|
||||
|
||||
if (filter.createdAtRange) {
|
||||
const [start, end] = filter.createdAtRange;
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where.createdAt = { ...where.createdAt, [Op.gte]: start };
|
||||
}
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where.createdAt = { ...where.createdAt, [Op.lte]: end };
|
||||
}
|
||||
}
|
||||
|
||||
for (const rel of this.RELATION_FILTERS) {
|
||||
if (filter[rel.filterKey]) {
|
||||
const searchTerms = filter[rel.filterKey].split('|');
|
||||
const validUuids = Utils.filterValidUuids(searchTerms);
|
||||
|
||||
// Build OR conditions array
|
||||
const orConditions = [];
|
||||
|
||||
// Add UUID condition only if there are valid UUIDs
|
||||
if (validUuids.length > 0) {
|
||||
orConditions.push({ id: { [Op.in]: validUuids } });
|
||||
}
|
||||
|
||||
// Add text search condition if searchField is defined
|
||||
if (rel.searchField) {
|
||||
orConditions.push({
|
||||
[rel.searchField]: {
|
||||
[Op.or]: searchTerms.map((term) => ({
|
||||
[Op.iLike]: `%${term}%`,
|
||||
})),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
const relInclude = {
|
||||
model: rel.model,
|
||||
as: rel.as,
|
||||
required: orConditions.length > 0,
|
||||
where:
|
||||
orConditions.length > 0 ? { [Op.or]: orConditions } : undefined,
|
||||
};
|
||||
include = [relInclude, ...include];
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
if (options.countOnly) {
|
||||
const count = await this.MODEL.count({
|
||||
where,
|
||||
include: include.filter((entry) => entry.required || entry.where),
|
||||
distinct: true,
|
||||
transaction: options.transaction,
|
||||
});
|
||||
|
||||
return {
|
||||
rows: [],
|
||||
count,
|
||||
};
|
||||
}
|
||||
|
||||
const queryOptions = {
|
||||
where,
|
||||
include,
|
||||
distinct: true,
|
||||
order:
|
||||
filter.field && filter.sort
|
||||
? [[filter.field, filter.sort]]
|
||||
: [['createdAt', 'desc']],
|
||||
transaction: options.transaction,
|
||||
limit: limit ? Number(limit) : undefined,
|
||||
offset: offset ? Number(offset) : undefined,
|
||||
};
|
||||
|
||||
const { rows, count } = await this.MODEL.findAndCountAll(queryOptions);
|
||||
return {
|
||||
rows,
|
||||
count,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error executing query:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async findAllAutocomplete(query, limit, offset) {
|
||||
let where = {};
|
||||
|
||||
if (query) {
|
||||
const orConditions = [
|
||||
Utils.ilike(this.TABLE_NAME, this.AUTOCOMPLETE_FIELD, query),
|
||||
];
|
||||
|
||||
if (Utils.isValidUuid(query)) {
|
||||
orConditions.unshift({ id: query });
|
||||
}
|
||||
|
||||
where = { [Op.or]: orConditions };
|
||||
}
|
||||
|
||||
const records = await this.MODEL.findAll({
|
||||
attributes: ['id', this.AUTOCOMPLETE_FIELD],
|
||||
where,
|
||||
limit: limit ? Number(limit) : undefined,
|
||||
offset: offset ? Number(offset) : undefined,
|
||||
order: [[this.AUTOCOMPLETE_FIELD, 'ASC']],
|
||||
});
|
||||
|
||||
return records.map((record) => ({
|
||||
id: record.id,
|
||||
label: record[this.AUTOCOMPLETE_FIELD],
|
||||
}));
|
||||
}
|
||||
|
||||
static toCSV(rows) {
|
||||
const opts = { fields: this.CSV_FIELDS };
|
||||
return parse(rows, opts);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = GenericDBApi;
|
||||
298
backend/src/db/api/element_type_defaults.js
Normal file
298
backend/src/db/api/element_type_defaults.js
Normal file
@ -0,0 +1,298 @@
|
||||
const GenericDBApi = require('./base.api');
|
||||
const db = require('../models');
|
||||
|
||||
class Element_type_defaultsDBApi extends GenericDBApi {
|
||||
static get MODEL() {
|
||||
return db.element_type_defaults;
|
||||
}
|
||||
|
||||
static get TABLE_NAME() {
|
||||
return 'element_type_defaults';
|
||||
}
|
||||
|
||||
static get SEARCHABLE_FIELDS() {
|
||||
return ['name', 'element_type'];
|
||||
}
|
||||
|
||||
static get RANGE_FIELDS() {
|
||||
return ['sort_order'];
|
||||
}
|
||||
|
||||
static get ENUM_FIELDS() {
|
||||
return [];
|
||||
}
|
||||
|
||||
static get CSV_FIELDS() {
|
||||
return [
|
||||
'id',
|
||||
'element_type',
|
||||
'name',
|
||||
'sort_order',
|
||||
'is_active',
|
||||
'createdAt',
|
||||
];
|
||||
}
|
||||
|
||||
static get AUTOCOMPLETE_FIELD() {
|
||||
return 'name';
|
||||
}
|
||||
|
||||
// Declarative field configuration using base class patterns
|
||||
static get JSON_FIELDS() {
|
||||
return ['default_settings_json'];
|
||||
}
|
||||
|
||||
static get FIELD_DEFAULTS() {
|
||||
return {
|
||||
element_type: { default: null },
|
||||
name: { default: null },
|
||||
sort_order: { default: 0 },
|
||||
};
|
||||
}
|
||||
|
||||
static getFieldMapping(data) {
|
||||
// Apply base class transformations (JSON fields, defaults, transformers)
|
||||
const mapped = super.getFieldMapping(data);
|
||||
|
||||
return {
|
||||
id: mapped.id || undefined,
|
||||
element_type: mapped.element_type,
|
||||
name: mapped.name,
|
||||
sort_order: mapped.sort_order,
|
||||
default_settings_json: mapped.default_settings_json,
|
||||
};
|
||||
}
|
||||
|
||||
static get DEFAULT_ROWS() {
|
||||
return [
|
||||
{
|
||||
element_type: 'navigation_next',
|
||||
name: 'Navigation Forward Button',
|
||||
sort_order: 1,
|
||||
default_settings_json: {
|
||||
label: 'Navigation: Forward',
|
||||
navLabel: 'Forward',
|
||||
navType: 'forward',
|
||||
navDisabled: false,
|
||||
transitionReverseMode: 'auto_reverse',
|
||||
transitionDurationSec: 0.7,
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
element_type: 'navigation_prev',
|
||||
name: 'Navigation Back Button',
|
||||
sort_order: 2,
|
||||
default_settings_json: {
|
||||
label: 'Navigation: Back',
|
||||
navLabel: 'Back',
|
||||
navType: 'back',
|
||||
navDisabled: false,
|
||||
transitionReverseMode: 'auto_reverse',
|
||||
transitionDurationSec: 0.7,
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
element_type: 'tooltip',
|
||||
name: 'Tooltip',
|
||||
sort_order: 3,
|
||||
default_settings_json: {
|
||||
label: 'Tooltip',
|
||||
tooltipTitle: 'Tooltip title',
|
||||
tooltipText: 'Tooltip text',
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
element_type: 'description',
|
||||
name: 'Description',
|
||||
sort_order: 4,
|
||||
default_settings_json: {
|
||||
label: 'Description',
|
||||
descriptionTitle: 'TITLE',
|
||||
descriptionText: '',
|
||||
descriptionTitleFontSize: '48px',
|
||||
descriptionTextFontSize: '36px',
|
||||
descriptionTitleFontFamily: 'inherit',
|
||||
descriptionTextFontFamily: 'inherit',
|
||||
descriptionTitleColor: '#000000',
|
||||
descriptionTextColor: '#4B5563',
|
||||
descriptionBackgroundColor: 'transparent',
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
element_type: 'gallery',
|
||||
name: 'Gallery',
|
||||
sort_order: 5,
|
||||
default_settings_json: {
|
||||
label: 'Gallery',
|
||||
galleryCards: [{ imageUrl: '', title: 'Card 1', description: '' }],
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
element_type: 'carousel',
|
||||
name: 'Carousel',
|
||||
sort_order: 6,
|
||||
default_settings_json: {
|
||||
label: 'Carousel',
|
||||
carouselSlides: [{ imageUrl: '', caption: 'Slide 1' }],
|
||||
carouselPrevIconUrl: '',
|
||||
carouselNextIconUrl: '',
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
element_type: 'video_player',
|
||||
name: 'Video Player',
|
||||
sort_order: 7,
|
||||
default_settings_json: {
|
||||
label: 'Video Player',
|
||||
mediaUrl: '',
|
||||
mediaAutoplay: true,
|
||||
mediaLoop: true,
|
||||
mediaMuted: true,
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
element_type: 'audio_player',
|
||||
name: 'Audio Player',
|
||||
sort_order: 8,
|
||||
default_settings_json: {
|
||||
label: 'Audio Player',
|
||||
mediaUrl: '',
|
||||
mediaAutoplay: true,
|
||||
mediaLoop: true,
|
||||
mediaMuted: false,
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
element_type: 'spot',
|
||||
name: 'Hotspot',
|
||||
sort_order: 9,
|
||||
default_settings_json: {
|
||||
label: 'Hotspot',
|
||||
iconUrl: '',
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
element_type: 'logo',
|
||||
name: 'Logo',
|
||||
sort_order: 10,
|
||||
default_settings_json: {
|
||||
label: 'Logo',
|
||||
iconUrl: '',
|
||||
backgroundImageUrl: '',
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
element_type: 'popup',
|
||||
name: 'Popup',
|
||||
sort_order: 11,
|
||||
default_settings_json: {
|
||||
label: 'Popup',
|
||||
iconUrl: '',
|
||||
popupTitle: '',
|
||||
popupContent: '',
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
},
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
static async ensureInitialized() {
|
||||
if (!this.initializationPromise) {
|
||||
this.initializationPromise = (async () => {
|
||||
let count = 0;
|
||||
|
||||
try {
|
||||
count = await this.MODEL.count();
|
||||
} catch (error) {
|
||||
if (error?.original?.code !== '42P01') {
|
||||
throw error;
|
||||
}
|
||||
|
||||
await this.MODEL.sync();
|
||||
count = await this.MODEL.count();
|
||||
}
|
||||
|
||||
if (count > 0) return;
|
||||
|
||||
const now = new Date();
|
||||
await this.MODEL.bulkCreate(
|
||||
this.DEFAULT_ROWS.map((item) => ({
|
||||
...this.getFieldMapping(item),
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})),
|
||||
);
|
||||
})().catch((error) => {
|
||||
this.initializationPromise = null;
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
await this.initializationPromise;
|
||||
}
|
||||
|
||||
static async create(data, options = {}) {
|
||||
await this.ensureInitialized();
|
||||
return super.create(data, options);
|
||||
}
|
||||
|
||||
static async bulkImport(data, options = {}) {
|
||||
await this.ensureInitialized();
|
||||
return super.bulkImport(data, options);
|
||||
}
|
||||
|
||||
static async update(id, data, options = {}) {
|
||||
await this.ensureInitialized();
|
||||
return super.update(id, data, options);
|
||||
}
|
||||
|
||||
static async deleteByIds(ids, options = {}) {
|
||||
await this.ensureInitialized();
|
||||
return super.deleteByIds(ids, options);
|
||||
}
|
||||
|
||||
static async remove(id, options = {}) {
|
||||
await this.ensureInitialized();
|
||||
return super.remove(id, options);
|
||||
}
|
||||
|
||||
static async findBy(where, options = {}) {
|
||||
await this.ensureInitialized();
|
||||
return super.findBy(where, options);
|
||||
}
|
||||
|
||||
static async findAll(filter = {}, options = {}) {
|
||||
await this.ensureInitialized();
|
||||
return super.findAll(filter, options);
|
||||
}
|
||||
|
||||
static async findAllAutocomplete(query, limit, offset) {
|
||||
await this.ensureInitialized();
|
||||
return super.findAllAutocomplete(query, limit, offset);
|
||||
}
|
||||
}
|
||||
|
||||
Element_type_defaultsDBApi.initializationPromise = null;
|
||||
|
||||
module.exports = Element_type_defaultsDBApi;
|
||||
@ -1,18 +1,11 @@
|
||||
const db = require('../models');
|
||||
const assert = require('assert');
|
||||
const services = require('../../services/file');
|
||||
const services = require('../../services/file/');
|
||||
|
||||
module.exports = class FileDBApi {
|
||||
static async replaceRelationFiles(
|
||||
relation,
|
||||
rawFiles,
|
||||
options,
|
||||
) {
|
||||
static async replaceRelationFiles(relation, rawFiles, options) {
|
||||
assert(relation.belongsTo, 'belongsTo is required');
|
||||
assert(
|
||||
relation.belongsToColumn,
|
||||
'belongsToColumn is required',
|
||||
);
|
||||
assert(relation.belongsToColumn, 'belongsToColumn is required');
|
||||
assert(relation.belongsToId, 'belongsToId is required');
|
||||
|
||||
let files = [];
|
||||
@ -29,11 +22,9 @@ module.exports = class FileDBApi {
|
||||
|
||||
static async _addFiles(relation, files, options) {
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
|
||||
const inexistentFiles = files.filter(
|
||||
(file) => !!file.new,
|
||||
);
|
||||
const inexistentFiles = files.filter((file) => !!file.new);
|
||||
|
||||
for (const file of inexistentFiles) {
|
||||
await db.file.create(
|
||||
@ -55,11 +46,7 @@ module.exports = class FileDBApi {
|
||||
}
|
||||
}
|
||||
|
||||
static async _removeLegacyFiles(
|
||||
relation,
|
||||
files,
|
||||
options,
|
||||
) {
|
||||
static async _removeLegacyFiles(relation, files, options) {
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const filesToDelete = await db.file.findAll({
|
||||
@ -68,10 +55,9 @@ module.exports = class FileDBApi {
|
||||
belongsToId: relation.belongsToId,
|
||||
belongsToColumn: relation.belongsToColumn,
|
||||
id: {
|
||||
[db.Sequelize.Op
|
||||
.notIn]: files
|
||||
[db.Sequelize.Op.notIn]: files
|
||||
.filter((file) => !file.new)
|
||||
.map((file) => file.id)
|
||||
.map((file) => file.id),
|
||||
},
|
||||
},
|
||||
transaction,
|
||||
|
||||
155
backend/src/db/api/global_transition_defaults.js
Normal file
155
backend/src/db/api/global_transition_defaults.js
Normal file
@ -0,0 +1,155 @@
|
||||
const GenericDBApi = require('./base.api');
|
||||
const db = require('../models');
|
||||
|
||||
/**
|
||||
* Global Transition Defaults API
|
||||
*
|
||||
* Single-row table pattern for platform-wide transition settings.
|
||||
* Auto-seeds default values if the table is empty.
|
||||
*/
|
||||
class Global_transition_defaultsDBApi extends GenericDBApi {
|
||||
static get MODEL() {
|
||||
return db.global_transition_defaults;
|
||||
}
|
||||
|
||||
static get TABLE_NAME() {
|
||||
return 'global_transition_defaults';
|
||||
}
|
||||
|
||||
static get SEARCHABLE_FIELDS() {
|
||||
return [];
|
||||
}
|
||||
|
||||
static get RANGE_FIELDS() {
|
||||
return [];
|
||||
}
|
||||
|
||||
static get ENUM_FIELDS() {
|
||||
return ['transition_type', 'easing'];
|
||||
}
|
||||
|
||||
static get CSV_FIELDS() {
|
||||
return [
|
||||
'id',
|
||||
'transition_type',
|
||||
'duration_ms',
|
||||
'easing',
|
||||
'overlay_color',
|
||||
'createdAt',
|
||||
'updatedAt',
|
||||
];
|
||||
}
|
||||
|
||||
static get AUTOCOMPLETE_FIELD() {
|
||||
return 'transition_type';
|
||||
}
|
||||
|
||||
static get FIELD_DEFAULTS() {
|
||||
return {
|
||||
transition_type: { default: 'fade' },
|
||||
duration_ms: { default: 700 },
|
||||
easing: { default: 'ease-in-out' },
|
||||
overlay_color: { default: '#000000' },
|
||||
};
|
||||
}
|
||||
|
||||
static get DEFAULT_ROW() {
|
||||
return {
|
||||
transition_type: 'fade',
|
||||
duration_ms: 700,
|
||||
easing: 'ease-in-out',
|
||||
overlay_color: '#000000',
|
||||
};
|
||||
}
|
||||
|
||||
static getFieldMapping(data) {
|
||||
const mapped = super.getFieldMapping(data);
|
||||
|
||||
return {
|
||||
id: mapped.id || undefined,
|
||||
transition_type: mapped.transition_type,
|
||||
duration_ms: mapped.duration_ms,
|
||||
easing: mapped.easing,
|
||||
overlay_color: mapped.overlay_color,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures the singleton row exists.
|
||||
* Creates the default row if table is empty.
|
||||
*/
|
||||
static async ensureInitialized() {
|
||||
if (!this.initializationPromise) {
|
||||
this.initializationPromise = (async () => {
|
||||
let count = 0;
|
||||
|
||||
try {
|
||||
count = await this.MODEL.count();
|
||||
} catch (error) {
|
||||
// Table doesn't exist yet (happens during initial migration)
|
||||
if (error?.original?.code !== '42P01') {
|
||||
throw error;
|
||||
}
|
||||
|
||||
await this.MODEL.sync();
|
||||
count = await this.MODEL.count();
|
||||
}
|
||||
|
||||
if (count > 0) return;
|
||||
|
||||
const now = new Date();
|
||||
await this.MODEL.create({
|
||||
...this.getFieldMapping(this.DEFAULT_ROW),
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
});
|
||||
})().catch((error) => {
|
||||
this.initializationPromise = null;
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
await this.initializationPromise;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the singleton row.
|
||||
* Always returns a single object, not an array.
|
||||
*/
|
||||
static async findOne(options = {}) {
|
||||
await this.ensureInitialized();
|
||||
|
||||
const record = await this.MODEL.findOne({
|
||||
transaction: options.transaction,
|
||||
});
|
||||
|
||||
if (!record) return null;
|
||||
return record.get({ plain: true });
|
||||
}
|
||||
|
||||
/**
|
||||
* Alias for findOne to maintain semantic clarity.
|
||||
*/
|
||||
static async get(options = {}) {
|
||||
return this.findOne(options);
|
||||
}
|
||||
|
||||
static async update(id, data, options = {}) {
|
||||
await this.ensureInitialized();
|
||||
return super.update(id, data, options);
|
||||
}
|
||||
|
||||
static async findBy(where, options = {}) {
|
||||
await this.ensureInitialized();
|
||||
return super.findBy(where, options);
|
||||
}
|
||||
|
||||
static async findAll(filter = {}, options = {}) {
|
||||
await this.ensureInitialized();
|
||||
return super.findAll(filter, options);
|
||||
}
|
||||
}
|
||||
|
||||
Global_transition_defaultsDBApi.initializationPromise = null;
|
||||
|
||||
module.exports = Global_transition_defaultsDBApi;
|
||||
@ -1,656 +0,0 @@
|
||||
|
||||
const db = require('../models');
|
||||
const Utils = require('../utils');
|
||||
const {
|
||||
getRuntimeEnvironment,
|
||||
getRuntimeProjectSlug,
|
||||
} = require('./runtime-context');
|
||||
|
||||
|
||||
|
||||
const Sequelize = db.Sequelize;
|
||||
const Op = Sequelize.Op;
|
||||
|
||||
module.exports = class Page_elementsDBApi {
|
||||
|
||||
|
||||
|
||||
static async create(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const page_elements = await db.page_elements.create(
|
||||
{
|
||||
id: data.id || undefined,
|
||||
|
||||
element_type: data.element_type ?? null,
|
||||
|
||||
name: data.name ?? null,
|
||||
|
||||
sort_order: data.sort_order ?? 0,
|
||||
|
||||
is_visible: data.is_visible ?? false,
|
||||
|
||||
x_percent: data.x_percent ?? null,
|
||||
|
||||
y_percent: data.y_percent ?? null,
|
||||
|
||||
width_percent: data.width_percent ?? null,
|
||||
|
||||
height_percent: data.height_percent ?? null,
|
||||
|
||||
rotation_deg: data.rotation_deg ?? null,
|
||||
|
||||
style_json: data.style_json ?? null,
|
||||
|
||||
content_json: data.content_json ?? null,
|
||||
|
||||
importHash: data.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
|
||||
await page_elements.setPage( data.page || null, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
return page_elements;
|
||||
}
|
||||
|
||||
|
||||
static async bulkImport(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
// Prepare data - wrapping individual data transformations in a map() method
|
||||
const page_elementsData = data.map((item, index) => ({
|
||||
id: item.id || undefined,
|
||||
|
||||
element_type: item.element_type ?? null,
|
||||
|
||||
name: item.name ?? null,
|
||||
|
||||
sort_order: item.sort_order ?? 0,
|
||||
|
||||
is_visible: item.is_visible ?? false,
|
||||
|
||||
x_percent: item.x_percent ?? null,
|
||||
|
||||
y_percent: item.y_percent ?? null,
|
||||
|
||||
width_percent: item.width_percent ?? null,
|
||||
|
||||
height_percent: item.height_percent ?? null,
|
||||
|
||||
rotation_deg: item.rotation_deg ?? null,
|
||||
|
||||
style_json: item.style_json ?? null,
|
||||
|
||||
content_json: item.content_json ?? null,
|
||||
|
||||
importHash: item.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
createdAt: new Date(Date.now() + index * 1000),
|
||||
}));
|
||||
|
||||
// Bulk create items
|
||||
const page_elements = await db.page_elements.bulkCreate(page_elementsData, { transaction });
|
||||
|
||||
// For each item created, replace relation files
|
||||
|
||||
|
||||
return page_elements;
|
||||
}
|
||||
|
||||
static async update(id, data, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
|
||||
const page_elements = await db.page_elements.findByPk(id, {transaction});
|
||||
|
||||
|
||||
|
||||
|
||||
const updatePayload = {};
|
||||
|
||||
if (data.element_type !== undefined) updatePayload.element_type = data.element_type;
|
||||
|
||||
|
||||
if (data.name !== undefined) updatePayload.name = data.name;
|
||||
|
||||
|
||||
if (data.sort_order !== undefined) updatePayload.sort_order = data.sort_order;
|
||||
|
||||
|
||||
if (data.is_visible !== undefined) updatePayload.is_visible = data.is_visible;
|
||||
|
||||
|
||||
if (data.x_percent !== undefined) updatePayload.x_percent = data.x_percent;
|
||||
|
||||
|
||||
if (data.y_percent !== undefined) updatePayload.y_percent = data.y_percent;
|
||||
|
||||
|
||||
if (data.width_percent !== undefined) updatePayload.width_percent = data.width_percent;
|
||||
|
||||
|
||||
if (data.height_percent !== undefined) updatePayload.height_percent = data.height_percent;
|
||||
|
||||
|
||||
if (data.rotation_deg !== undefined) updatePayload.rotation_deg = data.rotation_deg;
|
||||
|
||||
|
||||
if (data.style_json !== undefined) updatePayload.style_json = data.style_json;
|
||||
|
||||
|
||||
if (data.content_json !== undefined) updatePayload.content_json = data.content_json;
|
||||
|
||||
|
||||
updatePayload.updatedById = currentUser.id;
|
||||
|
||||
await page_elements.update(updatePayload, {transaction});
|
||||
|
||||
|
||||
|
||||
if (data.page !== undefined) {
|
||||
await page_elements.setPage(
|
||||
|
||||
data.page,
|
||||
|
||||
{ transaction }
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
return page_elements;
|
||||
}
|
||||
|
||||
static async deleteByIds(ids, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const page_elements = await db.page_elements.findAll({
|
||||
where: {
|
||||
id: {
|
||||
[Op.in]: ids,
|
||||
},
|
||||
},
|
||||
transaction,
|
||||
});
|
||||
|
||||
await db.sequelize.transaction(async (transaction) => {
|
||||
for (const record of page_elements) {
|
||||
await record.update(
|
||||
{deletedBy: currentUser.id},
|
||||
{transaction}
|
||||
);
|
||||
}
|
||||
for (const record of page_elements) {
|
||||
await record.destroy({transaction});
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
return page_elements;
|
||||
}
|
||||
|
||||
static async remove(id, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const page_elements = await db.page_elements.findByPk(id, options);
|
||||
|
||||
await page_elements.update({
|
||||
deletedBy: currentUser.id
|
||||
}, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await page_elements.destroy({
|
||||
transaction
|
||||
});
|
||||
|
||||
return page_elements;
|
||||
}
|
||||
|
||||
static async findBy(where, options) {
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
const runtimeEnvironment = getRuntimeEnvironment(options);
|
||||
const runtimeProjectSlug = getRuntimeProjectSlug(options);
|
||||
const pageInclude = {
|
||||
model: db.tour_pages,
|
||||
as: 'page',
|
||||
required: Boolean(runtimeEnvironment || runtimeProjectSlug),
|
||||
where: runtimeEnvironment ? { environment: runtimeEnvironment } : {},
|
||||
include: runtimeProjectSlug
|
||||
? [{
|
||||
model: db.projects,
|
||||
as: 'project',
|
||||
required: true,
|
||||
where: { slug: runtimeProjectSlug },
|
||||
}]
|
||||
: [],
|
||||
};
|
||||
|
||||
const page_elements = await db.page_elements.findOne(
|
||||
{ where, include: [pageInclude], transaction },
|
||||
);
|
||||
|
||||
if (!page_elements) {
|
||||
return page_elements;
|
||||
}
|
||||
|
||||
const output = page_elements.get({plain: true});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
output.page = await page_elements.getPage({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
static async findAll(
|
||||
filter,
|
||||
options
|
||||
) {
|
||||
filter = filter || {};
|
||||
const limit = filter.limit || 0;
|
||||
let offset = 0;
|
||||
let where = {};
|
||||
const currentPage = +filter.page;
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
offset = currentPage * limit;
|
||||
|
||||
let include = [
|
||||
|
||||
{
|
||||
model: db.tour_pages,
|
||||
as: 'page',
|
||||
|
||||
where: filter.page ? {
|
||||
[Op.or]: [
|
||||
{ id: { [Op.in]: filter.page.split('|').map(term => Utils.uuid(term)) } },
|
||||
{
|
||||
name: {
|
||||
[Op.or]: filter.page.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
|
||||
}
|
||||
},
|
||||
]
|
||||
} : {},
|
||||
|
||||
},
|
||||
|
||||
|
||||
|
||||
];
|
||||
const runtimeEnvironment = getRuntimeEnvironment(options);
|
||||
const runtimeProjectSlug = getRuntimeProjectSlug(options);
|
||||
|
||||
if (runtimeEnvironment) {
|
||||
include[0].where = {
|
||||
...(include[0].where || {}),
|
||||
environment: runtimeEnvironment,
|
||||
};
|
||||
include[0].required = true;
|
||||
}
|
||||
|
||||
if (runtimeProjectSlug) {
|
||||
include[0].include = [{
|
||||
model: db.projects,
|
||||
as: 'project',
|
||||
required: true,
|
||||
where: { slug: runtimeProjectSlug },
|
||||
}];
|
||||
include[0].required = true;
|
||||
}
|
||||
|
||||
if (filter) {
|
||||
if (filter.id) {
|
||||
where = {
|
||||
...where,
|
||||
['id']: Utils.uuid(filter.id),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
if (filter.name) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'page_elements',
|
||||
'name',
|
||||
filter.name,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.style_json) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'page_elements',
|
||||
'style_json',
|
||||
filter.style_json,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.content_json) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'page_elements',
|
||||
'content_json',
|
||||
filter.content_json,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (filter.sort_orderRange) {
|
||||
const [start, end] = filter.sort_orderRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
sort_order: {
|
||||
...where.sort_order,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
sort_order: {
|
||||
...where.sort_order,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.x_percentRange) {
|
||||
const [start, end] = filter.x_percentRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
x_percent: {
|
||||
...where.x_percent,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
x_percent: {
|
||||
...where.x_percent,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.y_percentRange) {
|
||||
const [start, end] = filter.y_percentRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
y_percent: {
|
||||
...where.y_percent,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
y_percent: {
|
||||
...where.y_percent,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.width_percentRange) {
|
||||
const [start, end] = filter.width_percentRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
width_percent: {
|
||||
...where.width_percent,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
width_percent: {
|
||||
...where.width_percent,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.height_percentRange) {
|
||||
const [start, end] = filter.height_percentRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
height_percent: {
|
||||
...where.height_percent,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
height_percent: {
|
||||
...where.height_percent,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.rotation_degRange) {
|
||||
const [start, end] = filter.rotation_degRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
rotation_deg: {
|
||||
...where.rotation_deg,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
rotation_deg: {
|
||||
...where.rotation_deg,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (filter.active !== undefined) {
|
||||
where = {
|
||||
...where,
|
||||
active: filter.active === true || filter.active === 'true'
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
if (filter.element_type) {
|
||||
where = {
|
||||
...where,
|
||||
element_type: filter.element_type,
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.is_visible) {
|
||||
where = {
|
||||
...where,
|
||||
is_visible: filter.is_visible,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (filter.createdAtRange) {
|
||||
const [start, end] = filter.createdAtRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
const queryOptions = {
|
||||
where,
|
||||
include,
|
||||
distinct: true,
|
||||
order: filter.field && filter.sort
|
||||
? [[filter.field, filter.sort]]
|
||||
: [['createdAt', 'desc']],
|
||||
transaction: options?.transaction,
|
||||
logging: console.log
|
||||
};
|
||||
|
||||
if (!options?.countOnly) {
|
||||
queryOptions.limit = limit ? Number(limit) : undefined;
|
||||
queryOptions.offset = offset ? Number(offset) : undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
const { rows, count } = await db.page_elements.findAndCountAll(queryOptions);
|
||||
|
||||
return {
|
||||
rows: options?.countOnly ? [] : rows,
|
||||
count: count
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error executing query:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async findAllAutocomplete(query, limit, offset, ) {
|
||||
let where = {};
|
||||
|
||||
|
||||
|
||||
if (query) {
|
||||
where = {
|
||||
[Op.or]: [
|
||||
{ ['id']: Utils.uuid(query) },
|
||||
Utils.ilike(
|
||||
'page_elements',
|
||||
'name',
|
||||
query,
|
||||
),
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
const records = await db.page_elements.findAll({
|
||||
attributes: [ 'id', 'name' ],
|
||||
where,
|
||||
limit: limit ? Number(limit) : undefined,
|
||||
offset: offset ? Number(offset) : undefined,
|
||||
orderBy: [['name', 'ASC']],
|
||||
});
|
||||
|
||||
return records.map((record) => ({
|
||||
id: record.id,
|
||||
label: record.name,
|
||||
}));
|
||||
}
|
||||
|
||||
|
||||
};
|
||||
@ -1,589 +0,0 @@
|
||||
|
||||
const db = require('../models');
|
||||
const Utils = require('../utils');
|
||||
const {
|
||||
getRuntimeEnvironment,
|
||||
getRuntimeProjectSlug,
|
||||
} = require('./runtime-context');
|
||||
|
||||
|
||||
|
||||
const Sequelize = db.Sequelize;
|
||||
const Op = Sequelize.Op;
|
||||
|
||||
module.exports = class Page_linksDBApi {
|
||||
|
||||
|
||||
|
||||
static async create(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const page_links = await db.page_links.create(
|
||||
{
|
||||
id: data.id || undefined,
|
||||
|
||||
direction: data.direction
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
external_url: data.external_url
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
is_active: data.is_active
|
||||
||
|
||||
false
|
||||
|
||||
,
|
||||
|
||||
trigger_selector: data.trigger_selector
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
importHash: data.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
|
||||
await page_links.setFrom_page( data.from_page || null, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await page_links.setTo_page( data.to_page || null, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await page_links.setTransition( data.transition || null, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
return page_links;
|
||||
}
|
||||
|
||||
|
||||
static async bulkImport(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
// Prepare data - wrapping individual data transformations in a map() method
|
||||
const page_linksData = data.map((item, index) => ({
|
||||
id: item.id || undefined,
|
||||
|
||||
direction: item.direction
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
external_url: item.external_url
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
is_active: item.is_active
|
||||
||
|
||||
false
|
||||
|
||||
,
|
||||
|
||||
trigger_selector: item.trigger_selector
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
importHash: item.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
createdAt: new Date(Date.now() + index * 1000),
|
||||
}));
|
||||
|
||||
// Bulk create items
|
||||
const page_links = await db.page_links.bulkCreate(page_linksData, { transaction });
|
||||
|
||||
// For each item created, replace relation files
|
||||
|
||||
|
||||
return page_links;
|
||||
}
|
||||
|
||||
static async update(id, data, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
|
||||
const page_links = await db.page_links.findByPk(id, {transaction});
|
||||
|
||||
|
||||
|
||||
|
||||
const updatePayload = {};
|
||||
|
||||
if (data.direction !== undefined) updatePayload.direction = data.direction;
|
||||
|
||||
|
||||
if (data.external_url !== undefined) updatePayload.external_url = data.external_url;
|
||||
|
||||
|
||||
if (data.is_active !== undefined) updatePayload.is_active = data.is_active;
|
||||
|
||||
|
||||
if (data.trigger_selector !== undefined) updatePayload.trigger_selector = data.trigger_selector;
|
||||
|
||||
|
||||
updatePayload.updatedById = currentUser.id;
|
||||
|
||||
await page_links.update(updatePayload, {transaction});
|
||||
|
||||
|
||||
|
||||
if (data.from_page !== undefined) {
|
||||
await page_links.setFrom_page(
|
||||
|
||||
data.from_page,
|
||||
|
||||
{ transaction }
|
||||
);
|
||||
}
|
||||
|
||||
if (data.to_page !== undefined) {
|
||||
await page_links.setTo_page(
|
||||
|
||||
data.to_page,
|
||||
|
||||
{ transaction }
|
||||
);
|
||||
}
|
||||
|
||||
if (data.transition !== undefined) {
|
||||
await page_links.setTransition(
|
||||
|
||||
data.transition,
|
||||
|
||||
{ transaction }
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
return page_links;
|
||||
}
|
||||
|
||||
static async deleteByIds(ids, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const page_links = await db.page_links.findAll({
|
||||
where: {
|
||||
id: {
|
||||
[Op.in]: ids,
|
||||
},
|
||||
},
|
||||
transaction,
|
||||
});
|
||||
|
||||
await db.sequelize.transaction(async (transaction) => {
|
||||
for (const record of page_links) {
|
||||
await record.update(
|
||||
{deletedBy: currentUser.id},
|
||||
{transaction}
|
||||
);
|
||||
}
|
||||
for (const record of page_links) {
|
||||
await record.destroy({transaction});
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
return page_links;
|
||||
}
|
||||
|
||||
static async remove(id, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const page_links = await db.page_links.findByPk(id, options);
|
||||
|
||||
await page_links.update({
|
||||
deletedBy: currentUser.id
|
||||
}, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await page_links.destroy({
|
||||
transaction
|
||||
});
|
||||
|
||||
return page_links;
|
||||
}
|
||||
|
||||
static async findBy(where, options) {
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
const runtimeEnvironment = getRuntimeEnvironment(options);
|
||||
const runtimeProjectSlug = getRuntimeProjectSlug(options);
|
||||
const buildProjectInclude = () => (
|
||||
runtimeProjectSlug
|
||||
? [{
|
||||
model: db.projects,
|
||||
as: 'project',
|
||||
required: true,
|
||||
where: { slug: runtimeProjectSlug },
|
||||
}]
|
||||
: []
|
||||
);
|
||||
|
||||
const page_links = await db.page_links.findOne(
|
||||
{
|
||||
where,
|
||||
include: [
|
||||
{
|
||||
model: db.tour_pages,
|
||||
as: 'from_page',
|
||||
required: Boolean(runtimeEnvironment || runtimeProjectSlug),
|
||||
where: runtimeEnvironment ? { environment: runtimeEnvironment } : {},
|
||||
include: buildProjectInclude(),
|
||||
},
|
||||
{
|
||||
model: db.transitions,
|
||||
as: 'transition',
|
||||
required: false,
|
||||
where: runtimeEnvironment ? { environment: runtimeEnvironment } : {},
|
||||
include: buildProjectInclude(),
|
||||
},
|
||||
],
|
||||
transaction,
|
||||
},
|
||||
);
|
||||
|
||||
if (!page_links) {
|
||||
return page_links;
|
||||
}
|
||||
|
||||
const output = page_links.get({plain: true});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
output.from_page = await page_links.getFrom_page({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
output.to_page = await page_links.getTo_page({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
output.transition = await page_links.getTransition({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
static async findAll(
|
||||
filter,
|
||||
options
|
||||
) {
|
||||
filter = filter || {};
|
||||
const limit = filter.limit || 0;
|
||||
let offset = 0;
|
||||
let where = {};
|
||||
const currentPage = +filter.page;
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
offset = currentPage * limit;
|
||||
|
||||
let include = [
|
||||
|
||||
{
|
||||
model: db.tour_pages,
|
||||
as: 'from_page',
|
||||
|
||||
where: filter.from_page ? {
|
||||
[Op.or]: [
|
||||
{ id: { [Op.in]: filter.from_page.split('|').map(term => Utils.uuid(term)) } },
|
||||
{
|
||||
name: {
|
||||
[Op.or]: filter.from_page.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
|
||||
}
|
||||
},
|
||||
]
|
||||
} : {},
|
||||
|
||||
},
|
||||
|
||||
{
|
||||
model: db.tour_pages,
|
||||
as: 'to_page',
|
||||
|
||||
where: filter.to_page ? {
|
||||
[Op.or]: [
|
||||
{ id: { [Op.in]: filter.to_page.split('|').map(term => Utils.uuid(term)) } },
|
||||
{
|
||||
name: {
|
||||
[Op.or]: filter.to_page.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
|
||||
}
|
||||
},
|
||||
]
|
||||
} : {},
|
||||
|
||||
},
|
||||
|
||||
{
|
||||
model: db.transitions,
|
||||
as: 'transition',
|
||||
|
||||
where: filter.transition ? {
|
||||
[Op.or]: [
|
||||
{ id: { [Op.in]: filter.transition.split('|').map(term => Utils.uuid(term)) } },
|
||||
{
|
||||
name: {
|
||||
[Op.or]: filter.transition.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
|
||||
}
|
||||
},
|
||||
]
|
||||
} : {},
|
||||
|
||||
},
|
||||
|
||||
|
||||
|
||||
];
|
||||
const runtimeEnvironment = getRuntimeEnvironment(options);
|
||||
const runtimeProjectSlug = getRuntimeProjectSlug(options);
|
||||
|
||||
if (runtimeEnvironment) {
|
||||
include[0].where = {
|
||||
...(include[0].where || {}),
|
||||
environment: runtimeEnvironment,
|
||||
};
|
||||
include[0].required = true;
|
||||
include[1].where = {
|
||||
...(include[1].where || {}),
|
||||
environment: runtimeEnvironment,
|
||||
};
|
||||
include[2].where = {
|
||||
...(include[2].where || {}),
|
||||
environment: runtimeEnvironment,
|
||||
};
|
||||
include[2].required = false;
|
||||
}
|
||||
|
||||
if (runtimeProjectSlug) {
|
||||
include[0].include = [{
|
||||
model: db.projects,
|
||||
as: 'project',
|
||||
required: true,
|
||||
where: { slug: runtimeProjectSlug },
|
||||
}];
|
||||
include[0].required = true;
|
||||
include[1].include = [{
|
||||
model: db.projects,
|
||||
as: 'project',
|
||||
required: true,
|
||||
where: { slug: runtimeProjectSlug },
|
||||
}];
|
||||
include[2].include = [{
|
||||
model: db.projects,
|
||||
as: 'project',
|
||||
required: true,
|
||||
where: { slug: runtimeProjectSlug },
|
||||
}];
|
||||
include[2].required = false;
|
||||
}
|
||||
|
||||
if (filter) {
|
||||
if (filter.id) {
|
||||
where = {
|
||||
...where,
|
||||
['id']: Utils.uuid(filter.id),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
if (filter.external_url) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'page_links',
|
||||
'external_url',
|
||||
filter.external_url,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.trigger_selector) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'page_links',
|
||||
'trigger_selector',
|
||||
filter.trigger_selector,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (filter.active !== undefined) {
|
||||
where = {
|
||||
...where,
|
||||
active: filter.active === true || filter.active === 'true'
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
if (filter.direction) {
|
||||
where = {
|
||||
...where,
|
||||
direction: filter.direction,
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.is_active) {
|
||||
where = {
|
||||
...where,
|
||||
is_active: filter.is_active,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (filter.createdAtRange) {
|
||||
const [start, end] = filter.createdAtRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
const queryOptions = {
|
||||
where,
|
||||
include,
|
||||
distinct: true,
|
||||
order: filter.field && filter.sort
|
||||
? [[filter.field, filter.sort]]
|
||||
: [['createdAt', 'desc']],
|
||||
transaction: options?.transaction,
|
||||
logging: console.log
|
||||
};
|
||||
|
||||
if (!options?.countOnly) {
|
||||
queryOptions.limit = limit ? Number(limit) : undefined;
|
||||
queryOptions.offset = offset ? Number(offset) : undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
const { rows, count } = await db.page_links.findAndCountAll(queryOptions);
|
||||
|
||||
return {
|
||||
rows: options?.countOnly ? [] : rows,
|
||||
count: count
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error executing query:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async findAllAutocomplete(query, limit, offset, ) {
|
||||
let where = {};
|
||||
|
||||
|
||||
|
||||
if (query) {
|
||||
where = {
|
||||
[Op.or]: [
|
||||
{ ['id']: Utils.uuid(query) },
|
||||
Utils.ilike(
|
||||
'page_links',
|
||||
'direction',
|
||||
query,
|
||||
),
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
const records = await db.page_links.findAll({
|
||||
attributes: [ 'id', 'direction' ],
|
||||
where,
|
||||
limit: limit ? Number(limit) : undefined,
|
||||
offset: offset ? Number(offset) : undefined,
|
||||
orderBy: [['direction', 'ASC']],
|
||||
});
|
||||
|
||||
return records.map((record) => ({
|
||||
id: record.id,
|
||||
label: record.direction,
|
||||
}));
|
||||
}
|
||||
|
||||
|
||||
};
|
||||
@ -1,335 +1,53 @@
|
||||
|
||||
const GenericDBApi = require('./base.api');
|
||||
const db = require('../models');
|
||||
const Utils = require('../utils');
|
||||
|
||||
class PermissionsDBApi extends GenericDBApi {
|
||||
static get MODEL() {
|
||||
return db.permissions;
|
||||
}
|
||||
|
||||
static get TABLE_NAME() {
|
||||
return 'permissions';
|
||||
}
|
||||
|
||||
const Sequelize = db.Sequelize;
|
||||
const Op = Sequelize.Op;
|
||||
static get SEARCHABLE_FIELDS() {
|
||||
return ['name'];
|
||||
}
|
||||
|
||||
module.exports = class PermissionsDBApi {
|
||||
|
||||
static get RANGE_FIELDS() {
|
||||
return [];
|
||||
}
|
||||
|
||||
|
||||
static async create(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
static get ENUM_FIELDS() {
|
||||
return [];
|
||||
}
|
||||
|
||||
const permissions = await db.permissions.create(
|
||||
{
|
||||
id: data.id || undefined,
|
||||
|
||||
name: data.name
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
importHash: data.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
static get CSV_FIELDS() {
|
||||
return ['id', 'name', 'createdAt'];
|
||||
}
|
||||
|
||||
|
||||
static get AUTOCOMPLETE_FIELD() {
|
||||
return 'name';
|
||||
}
|
||||
|
||||
|
||||
static get ASSOCIATIONS() {
|
||||
return [];
|
||||
}
|
||||
|
||||
|
||||
static get FIND_BY_INCLUDES() {
|
||||
return [];
|
||||
}
|
||||
|
||||
return permissions;
|
||||
}
|
||||
|
||||
static get FIND_ALL_INCLUDES() {
|
||||
return [];
|
||||
}
|
||||
|
||||
static async bulkImport(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
static getFieldMapping(data) {
|
||||
return {
|
||||
id: data.id || undefined,
|
||||
name: data.name || null,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Prepare data - wrapping individual data transformations in a map() method
|
||||
const permissionsData = data.map((item, index) => ({
|
||||
id: item.id || undefined,
|
||||
|
||||
name: item.name
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
importHash: item.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
createdAt: new Date(Date.now() + index * 1000),
|
||||
}));
|
||||
|
||||
// Bulk create items
|
||||
const permissions = await db.permissions.bulkCreate(permissionsData, { transaction });
|
||||
|
||||
// For each item created, replace relation files
|
||||
|
||||
|
||||
return permissions;
|
||||
}
|
||||
|
||||
static async update(id, data, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
|
||||
const permissions = await db.permissions.findByPk(id, {transaction});
|
||||
|
||||
|
||||
|
||||
|
||||
const updatePayload = {};
|
||||
|
||||
if (data.name !== undefined) updatePayload.name = data.name;
|
||||
|
||||
|
||||
updatePayload.updatedById = currentUser.id;
|
||||
|
||||
await permissions.update(updatePayload, {transaction});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
return permissions;
|
||||
}
|
||||
|
||||
static async deleteByIds(ids, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const permissions = await db.permissions.findAll({
|
||||
where: {
|
||||
id: {
|
||||
[Op.in]: ids,
|
||||
},
|
||||
},
|
||||
transaction,
|
||||
});
|
||||
|
||||
await db.sequelize.transaction(async (transaction) => {
|
||||
for (const record of permissions) {
|
||||
await record.update(
|
||||
{deletedBy: currentUser.id},
|
||||
{transaction}
|
||||
);
|
||||
}
|
||||
for (const record of permissions) {
|
||||
await record.destroy({transaction});
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
return permissions;
|
||||
}
|
||||
|
||||
static async remove(id, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const permissions = await db.permissions.findByPk(id, options);
|
||||
|
||||
await permissions.update({
|
||||
deletedBy: currentUser.id
|
||||
}, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await permissions.destroy({
|
||||
transaction
|
||||
});
|
||||
|
||||
return permissions;
|
||||
}
|
||||
|
||||
static async findBy(where, options) {
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const permissions = await db.permissions.findOne({
|
||||
where,
|
||||
transaction,
|
||||
});
|
||||
|
||||
if (!permissions) {
|
||||
return permissions;
|
||||
}
|
||||
|
||||
const output = permissions.get({plain: true});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
static async findAll(
|
||||
filter,
|
||||
options
|
||||
) {
|
||||
const limit = filter.limit || 0;
|
||||
let offset = 0;
|
||||
let where = {};
|
||||
const currentPage = +filter.page;
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
offset = currentPage * limit;
|
||||
|
||||
let include = [
|
||||
|
||||
|
||||
|
||||
];
|
||||
|
||||
if (filter) {
|
||||
if (filter.id) {
|
||||
where = {
|
||||
...where,
|
||||
['id']: Utils.uuid(filter.id),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
if (filter.name) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'permissions',
|
||||
'name',
|
||||
filter.name,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (filter.active !== undefined) {
|
||||
where = {
|
||||
...where,
|
||||
active: filter.active === true || filter.active === 'true'
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (filter.createdAtRange) {
|
||||
const [start, end] = filter.createdAtRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
const queryOptions = {
|
||||
where,
|
||||
include,
|
||||
distinct: true,
|
||||
order: filter.field && filter.sort
|
||||
? [[filter.field, filter.sort]]
|
||||
: [['createdAt', 'desc']],
|
||||
transaction: options?.transaction,
|
||||
logging: console.log
|
||||
};
|
||||
|
||||
if (!options?.countOnly) {
|
||||
queryOptions.limit = limit ? Number(limit) : undefined;
|
||||
queryOptions.offset = offset ? Number(offset) : undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
const { rows, count } = await db.permissions.findAndCountAll(queryOptions);
|
||||
|
||||
return {
|
||||
rows: options?.countOnly ? [] : rows,
|
||||
count: count
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error executing query:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async findAllAutocomplete(query, limit, offset, ) {
|
||||
let where = {};
|
||||
|
||||
|
||||
|
||||
if (query) {
|
||||
where = {
|
||||
[Op.or]: [
|
||||
{ ['id']: Utils.uuid(query) },
|
||||
Utils.ilike(
|
||||
'permissions',
|
||||
'name',
|
||||
query,
|
||||
),
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
const records = await db.permissions.findAll({
|
||||
attributes: [ 'id', 'name' ],
|
||||
where,
|
||||
limit: limit ? Number(limit) : undefined,
|
||||
offset: offset ? Number(offset) : undefined,
|
||||
orderBy: [['name', 'ASC']],
|
||||
});
|
||||
|
||||
return records.map((record) => ({
|
||||
id: record.id,
|
||||
label: record.name,
|
||||
}));
|
||||
}
|
||||
|
||||
|
||||
};
|
||||
module.exports = PermissionsDBApi;
|
||||
|
||||
@ -1,571 +1,90 @@
|
||||
|
||||
const GenericDBApi = require('./base.api');
|
||||
const db = require('../models');
|
||||
const Utils = require('../utils');
|
||||
|
||||
class Presigned_url_requestsDBApi extends GenericDBApi {
|
||||
static get MODEL() {
|
||||
return db.presigned_url_requests;
|
||||
}
|
||||
|
||||
static get TABLE_NAME() {
|
||||
return 'presigned_url_requests';
|
||||
}
|
||||
|
||||
const Sequelize = db.Sequelize;
|
||||
const Op = Sequelize.Op;
|
||||
static get SEARCHABLE_FIELDS() {
|
||||
return ['requested_key', 'mime_type', 'status'];
|
||||
}
|
||||
|
||||
module.exports = class Presigned_url_requestsDBApi {
|
||||
|
||||
static get RANGE_FIELDS() {
|
||||
return ['requested_size_mb', 'expires_at'];
|
||||
}
|
||||
|
||||
|
||||
static async create(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
static get ENUM_FIELDS() {
|
||||
return ['purpose', 'asset_type'];
|
||||
}
|
||||
|
||||
const presigned_url_requests = await db.presigned_url_requests.create(
|
||||
{
|
||||
id: data.id || undefined,
|
||||
|
||||
purpose: data.purpose
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
asset_type: data.asset_type
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
requested_key: data.requested_key
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
mime_type: data.mime_type
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
requested_size_mb: data.requested_size_mb
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
expires_at: data.expires_at
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
status: data.status
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
importHash: data.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
static get CSV_FIELDS() {
|
||||
return [
|
||||
'id',
|
||||
'purpose',
|
||||
'asset_type',
|
||||
'requested_key',
|
||||
'mime_type',
|
||||
'status',
|
||||
'createdAt',
|
||||
];
|
||||
}
|
||||
|
||||
|
||||
await presigned_url_requests.setProject( data.project || null, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await presigned_url_requests.setUser( data.user || null, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
static get AUTOCOMPLETE_FIELD() {
|
||||
return 'requested_key';
|
||||
}
|
||||
|
||||
|
||||
static get ASSOCIATIONS() {
|
||||
return [
|
||||
{ field: 'project', setter: 'setProject', isArray: false },
|
||||
{ field: 'user', setter: 'setUser', isArray: false },
|
||||
];
|
||||
}
|
||||
|
||||
|
||||
static get FIND_BY_INCLUDES() {
|
||||
return [{ association: 'project' }, { association: 'user' }];
|
||||
}
|
||||
|
||||
return presigned_url_requests;
|
||||
}
|
||||
|
||||
|
||||
static async bulkImport(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
// Prepare data - wrapping individual data transformations in a map() method
|
||||
const presigned_url_requestsData = data.map((item, index) => ({
|
||||
id: item.id || undefined,
|
||||
|
||||
purpose: item.purpose
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
asset_type: item.asset_type
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
requested_key: item.requested_key
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
mime_type: item.mime_type
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
requested_size_mb: item.requested_size_mb
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
expires_at: item.expires_at
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
status: item.status
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
importHash: item.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
createdAt: new Date(Date.now() + index * 1000),
|
||||
}));
|
||||
|
||||
// Bulk create items
|
||||
const presigned_url_requests = await db.presigned_url_requests.bulkCreate(presigned_url_requestsData, { transaction });
|
||||
|
||||
// For each item created, replace relation files
|
||||
|
||||
|
||||
return presigned_url_requests;
|
||||
}
|
||||
|
||||
static async update(id, data, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
|
||||
const presigned_url_requests = await db.presigned_url_requests.findByPk(id, {transaction});
|
||||
|
||||
|
||||
|
||||
|
||||
const updatePayload = {};
|
||||
|
||||
if (data.purpose !== undefined) updatePayload.purpose = data.purpose;
|
||||
|
||||
|
||||
if (data.asset_type !== undefined) updatePayload.asset_type = data.asset_type;
|
||||
|
||||
|
||||
if (data.requested_key !== undefined) updatePayload.requested_key = data.requested_key;
|
||||
|
||||
|
||||
if (data.mime_type !== undefined) updatePayload.mime_type = data.mime_type;
|
||||
|
||||
|
||||
if (data.requested_size_mb !== undefined) updatePayload.requested_size_mb = data.requested_size_mb;
|
||||
|
||||
|
||||
if (data.expires_at !== undefined) updatePayload.expires_at = data.expires_at;
|
||||
|
||||
|
||||
if (data.status !== undefined) updatePayload.status = data.status;
|
||||
|
||||
|
||||
updatePayload.updatedById = currentUser.id;
|
||||
|
||||
await presigned_url_requests.update(updatePayload, {transaction});
|
||||
|
||||
|
||||
|
||||
if (data.project !== undefined) {
|
||||
await presigned_url_requests.setProject(
|
||||
|
||||
data.project,
|
||||
|
||||
{ transaction }
|
||||
);
|
||||
}
|
||||
|
||||
if (data.user !== undefined) {
|
||||
await presigned_url_requests.setUser(
|
||||
|
||||
data.user,
|
||||
|
||||
{ transaction }
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
return presigned_url_requests;
|
||||
}
|
||||
|
||||
static async deleteByIds(ids, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const presigned_url_requests = await db.presigned_url_requests.findAll({
|
||||
where: {
|
||||
id: {
|
||||
[Op.in]: ids,
|
||||
},
|
||||
},
|
||||
transaction,
|
||||
});
|
||||
|
||||
await db.sequelize.transaction(async (transaction) => {
|
||||
for (const record of presigned_url_requests) {
|
||||
await record.update(
|
||||
{deletedBy: currentUser.id},
|
||||
{transaction}
|
||||
);
|
||||
}
|
||||
for (const record of presigned_url_requests) {
|
||||
await record.destroy({transaction});
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
return presigned_url_requests;
|
||||
}
|
||||
|
||||
static async remove(id, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const presigned_url_requests = await db.presigned_url_requests.findByPk(id, options);
|
||||
|
||||
await presigned_url_requests.update({
|
||||
deletedBy: currentUser.id
|
||||
}, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await presigned_url_requests.destroy({
|
||||
transaction
|
||||
});
|
||||
|
||||
return presigned_url_requests;
|
||||
}
|
||||
|
||||
static async findBy(where, options) {
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const presigned_url_requests = await db.presigned_url_requests.findOne({
|
||||
where,
|
||||
transaction,
|
||||
});
|
||||
|
||||
if (!presigned_url_requests) {
|
||||
return presigned_url_requests;
|
||||
}
|
||||
|
||||
const output = presigned_url_requests.get({plain: true});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
output.project = await presigned_url_requests.getProject({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
output.user = await presigned_url_requests.getUser({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
static async findAll(
|
||||
filter,
|
||||
options
|
||||
) {
|
||||
const limit = filter.limit || 0;
|
||||
let offset = 0;
|
||||
let where = {};
|
||||
const currentPage = +filter.page;
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
offset = currentPage * limit;
|
||||
|
||||
let include = [
|
||||
static get FIND_ALL_INCLUDES() {
|
||||
return [
|
||||
{ model: db.projects, as: 'project', required: false },
|
||||
{ model: db.users, as: 'user', required: false },
|
||||
];
|
||||
}
|
||||
|
||||
static get RELATION_FILTERS() {
|
||||
return [
|
||||
{
|
||||
filterKey: 'project',
|
||||
model: db.projects,
|
||||
as: 'project',
|
||||
|
||||
where: filter.project ? {
|
||||
[Op.or]: [
|
||||
{ id: { [Op.in]: filter.project.split('|').map(term => Utils.uuid(term)) } },
|
||||
{
|
||||
name: {
|
||||
[Op.or]: filter.project.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
|
||||
}
|
||||
},
|
||||
]
|
||||
} : {},
|
||||
|
||||
searchField: 'name',
|
||||
},
|
||||
|
||||
{
|
||||
filterKey: 'user',
|
||||
model: db.users,
|
||||
as: 'user',
|
||||
|
||||
where: filter.user ? {
|
||||
[Op.or]: [
|
||||
{ id: { [Op.in]: filter.user.split('|').map(term => Utils.uuid(term)) } },
|
||||
{
|
||||
firstName: {
|
||||
[Op.or]: filter.user.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
|
||||
}
|
||||
},
|
||||
]
|
||||
} : {},
|
||||
|
||||
searchField: 'firstName',
|
||||
},
|
||||
|
||||
|
||||
|
||||
];
|
||||
}
|
||||
|
||||
if (filter) {
|
||||
if (filter.id) {
|
||||
where = {
|
||||
...where,
|
||||
['id']: Utils.uuid(filter.id),
|
||||
};
|
||||
}
|
||||
static getFieldMapping(data) {
|
||||
return {
|
||||
id: data.id || undefined,
|
||||
purpose: data.purpose || null,
|
||||
asset_type: data.asset_type || null,
|
||||
requested_key: data.requested_key || null,
|
||||
mime_type: data.mime_type || null,
|
||||
requested_size_mb: data.requested_size_mb || null,
|
||||
expires_at: data.expires_at || null,
|
||||
status: data.status || null,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (filter.requested_key) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'presigned_url_requests',
|
||||
'requested_key',
|
||||
filter.requested_key,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.mime_type) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'presigned_url_requests',
|
||||
'mime_type',
|
||||
filter.mime_type,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.status) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'presigned_url_requests',
|
||||
'status',
|
||||
filter.status,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (filter.requested_size_mbRange) {
|
||||
const [start, end] = filter.requested_size_mbRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
requested_size_mb: {
|
||||
...where.requested_size_mb,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
requested_size_mb: {
|
||||
...where.requested_size_mb,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.expires_atRange) {
|
||||
const [start, end] = filter.expires_atRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
expires_at: {
|
||||
...where.expires_at,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
expires_at: {
|
||||
...where.expires_at,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (filter.active !== undefined) {
|
||||
where = {
|
||||
...where,
|
||||
active: filter.active === true || filter.active === 'true'
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
if (filter.purpose) {
|
||||
where = {
|
||||
...where,
|
||||
purpose: filter.purpose,
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.asset_type) {
|
||||
where = {
|
||||
...where,
|
||||
asset_type: filter.asset_type,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (filter.createdAtRange) {
|
||||
const [start, end] = filter.createdAtRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
const queryOptions = {
|
||||
where,
|
||||
include,
|
||||
distinct: true,
|
||||
order: filter.field && filter.sort
|
||||
? [[filter.field, filter.sort]]
|
||||
: [['createdAt', 'desc']],
|
||||
transaction: options?.transaction,
|
||||
logging: console.log
|
||||
};
|
||||
|
||||
if (!options?.countOnly) {
|
||||
queryOptions.limit = limit ? Number(limit) : undefined;
|
||||
queryOptions.offset = offset ? Number(offset) : undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
const { rows, count } = await db.presigned_url_requests.findAndCountAll(queryOptions);
|
||||
|
||||
return {
|
||||
rows: options?.countOnly ? [] : rows,
|
||||
count: count
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error executing query:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async findAllAutocomplete(query, limit, offset, ) {
|
||||
let where = {};
|
||||
|
||||
|
||||
|
||||
if (query) {
|
||||
where = {
|
||||
[Op.or]: [
|
||||
{ ['id']: Utils.uuid(query) },
|
||||
Utils.ilike(
|
||||
'presigned_url_requests',
|
||||
'requested_key',
|
||||
query,
|
||||
),
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
const records = await db.presigned_url_requests.findAll({
|
||||
attributes: [ 'id', 'requested_key' ],
|
||||
where,
|
||||
limit: limit ? Number(limit) : undefined,
|
||||
offset: offset ? Number(offset) : undefined,
|
||||
orderBy: [['requested_key', 'ASC']],
|
||||
});
|
||||
|
||||
return records.map((record) => ({
|
||||
id: record.id,
|
||||
label: record.requested_key,
|
||||
}));
|
||||
}
|
||||
|
||||
|
||||
};
|
||||
module.exports = Presigned_url_requestsDBApi;
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
|
||||
const GenericDBApi = require('./base.api');
|
||||
const db = require('../models');
|
||||
const Utils = require('../utils');
|
||||
const {
|
||||
@ -6,591 +6,194 @@ const {
|
||||
applyRuntimeProjectFilter,
|
||||
} = require('./runtime-context');
|
||||
|
||||
|
||||
|
||||
const Sequelize = db.Sequelize;
|
||||
const Op = Sequelize.Op;
|
||||
|
||||
module.exports = class Project_audio_tracksDBApi {
|
||||
|
||||
class Project_audio_tracksDBApi extends GenericDBApi {
|
||||
static get MODEL() {
|
||||
return db.project_audio_tracks;
|
||||
}
|
||||
|
||||
|
||||
static async create(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
static get TABLE_NAME() {
|
||||
return 'project_audio_tracks';
|
||||
}
|
||||
|
||||
const project_audio_tracks = await db.project_audio_tracks.create(
|
||||
{
|
||||
id: data.id || undefined,
|
||||
|
||||
environment: data.environment
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
source_key: data.source_key
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
name: data.name
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
slug: data.slug
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
url: data.url
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
loop: data.loop
|
||||
||
|
||||
false
|
||||
|
||||
,
|
||||
|
||||
volume: data.volume
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
sort_order: data.sort_order
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
is_enabled: data.is_enabled
|
||||
||
|
||||
false
|
||||
|
||||
,
|
||||
|
||||
importHash: data.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
},
|
||||
{ transaction },
|
||||
static get SEARCHABLE_FIELDS() {
|
||||
return ['source_key', 'name', 'slug', 'url'];
|
||||
}
|
||||
|
||||
static get RANGE_FIELDS() {
|
||||
return ['volume', 'sort_order'];
|
||||
}
|
||||
|
||||
static get ENUM_FIELDS() {
|
||||
return ['environment', 'loop', 'is_enabled'];
|
||||
}
|
||||
|
||||
static get CSV_FIELDS() {
|
||||
return [
|
||||
'id',
|
||||
'environment',
|
||||
'source_key',
|
||||
'name',
|
||||
'slug',
|
||||
'url',
|
||||
'loop',
|
||||
'volume',
|
||||
'createdAt',
|
||||
];
|
||||
}
|
||||
|
||||
static get AUTOCOMPLETE_FIELD() {
|
||||
return 'name';
|
||||
}
|
||||
|
||||
static get ASSOCIATIONS() {
|
||||
return [{ field: 'project', setter: 'setProject', isArray: false }];
|
||||
}
|
||||
|
||||
static getFieldMapping(data) {
|
||||
return {
|
||||
id: data.id || undefined,
|
||||
environment: data.environment || null,
|
||||
source_key: data.source_key || null,
|
||||
name: data.name || null,
|
||||
slug: data.slug || null,
|
||||
url: data.url || null,
|
||||
loop: data.loop || false,
|
||||
volume: data.volume || null,
|
||||
sort_order: data.sort_order || null,
|
||||
is_enabled: data.is_enabled || false,
|
||||
};
|
||||
}
|
||||
|
||||
static async findBy(where, options = {}) {
|
||||
const transaction = options.transaction;
|
||||
const queryWhere = applyRuntimeEnvironment({ ...where }, options);
|
||||
const projectInclude = applyRuntimeProjectFilter(
|
||||
{ model: db.projects, as: 'project' },
|
||||
options,
|
||||
);
|
||||
|
||||
|
||||
await project_audio_tracks.setProject( data.project || null, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
const record = await this.MODEL.findOne({
|
||||
where: queryWhere,
|
||||
transaction,
|
||||
include: [projectInclude],
|
||||
});
|
||||
|
||||
|
||||
if (!record) return null;
|
||||
return record.get({ plain: true });
|
||||
}
|
||||
|
||||
|
||||
static async findAll(filter = {}, options = {}) {
|
||||
filter = filter || {};
|
||||
const limit = filter.limit || 0;
|
||||
const currentPage = +filter.page || 0;
|
||||
const offset = currentPage * limit;
|
||||
|
||||
return project_audio_tracks;
|
||||
}
|
||||
|
||||
let where = {};
|
||||
|
||||
static async bulkImport(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
// Prepare data - wrapping individual data transformations in a map() method
|
||||
const project_audio_tracksData = data.map((item, index) => ({
|
||||
id: item.id || undefined,
|
||||
|
||||
environment: item.environment
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
source_key: item.source_key
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
name: item.name
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
slug: item.slug
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
url: item.url
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
loop: item.loop
|
||||
||
|
||||
false
|
||||
|
||||
,
|
||||
|
||||
volume: item.volume
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
sort_order: item.sort_order
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
is_enabled: item.is_enabled
|
||||
||
|
||||
false
|
||||
|
||||
,
|
||||
|
||||
importHash: item.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
createdAt: new Date(Date.now() + index * 1000),
|
||||
}));
|
||||
|
||||
// Bulk create items
|
||||
const project_audio_tracks = await db.project_audio_tracks.bulkCreate(project_audio_tracksData, { transaction });
|
||||
|
||||
// For each item created, replace relation files
|
||||
|
||||
|
||||
return project_audio_tracks;
|
||||
}
|
||||
|
||||
static async update(id, data, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
|
||||
const project_audio_tracks = await db.project_audio_tracks.findByPk(id, {transaction});
|
||||
|
||||
|
||||
|
||||
|
||||
const updatePayload = {};
|
||||
|
||||
if (data.environment !== undefined) updatePayload.environment = data.environment;
|
||||
|
||||
|
||||
if (data.source_key !== undefined) updatePayload.source_key = data.source_key;
|
||||
|
||||
|
||||
if (data.name !== undefined) updatePayload.name = data.name;
|
||||
|
||||
|
||||
if (data.slug !== undefined) updatePayload.slug = data.slug;
|
||||
|
||||
|
||||
if (data.url !== undefined) updatePayload.url = data.url;
|
||||
|
||||
|
||||
if (data.loop !== undefined) updatePayload.loop = data.loop;
|
||||
|
||||
|
||||
if (data.volume !== undefined) updatePayload.volume = data.volume;
|
||||
|
||||
|
||||
if (data.sort_order !== undefined) updatePayload.sort_order = data.sort_order;
|
||||
|
||||
|
||||
if (data.is_enabled !== undefined) updatePayload.is_enabled = data.is_enabled;
|
||||
|
||||
|
||||
updatePayload.updatedById = currentUser.id;
|
||||
|
||||
await project_audio_tracks.update(updatePayload, {transaction});
|
||||
|
||||
|
||||
|
||||
if (data.project !== undefined) {
|
||||
await project_audio_tracks.setProject(
|
||||
|
||||
data.project,
|
||||
|
||||
{ transaction }
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
return project_audio_tracks;
|
||||
}
|
||||
|
||||
static async deleteByIds(ids, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const project_audio_tracks = await db.project_audio_tracks.findAll({
|
||||
where: {
|
||||
id: {
|
||||
[Op.in]: ids,
|
||||
},
|
||||
},
|
||||
transaction,
|
||||
});
|
||||
|
||||
await db.sequelize.transaction(async (transaction) => {
|
||||
for (const record of project_audio_tracks) {
|
||||
await record.update(
|
||||
{deletedBy: currentUser.id},
|
||||
{transaction}
|
||||
);
|
||||
}
|
||||
for (const record of project_audio_tracks) {
|
||||
await record.destroy({transaction});
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
return project_audio_tracks;
|
||||
}
|
||||
|
||||
static async remove(id, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const project_audio_tracks = await db.project_audio_tracks.findByPk(id, options);
|
||||
|
||||
await project_audio_tracks.update({
|
||||
deletedBy: currentUser.id
|
||||
}, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await project_audio_tracks.destroy({
|
||||
transaction
|
||||
});
|
||||
|
||||
return project_audio_tracks;
|
||||
}
|
||||
|
||||
static async findBy(where, options) {
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
const queryWhere = applyRuntimeEnvironment({ ...where }, options);
|
||||
const projectInclude = applyRuntimeProjectFilter(
|
||||
{
|
||||
model: db.projects,
|
||||
as: 'project',
|
||||
},
|
||||
options,
|
||||
);
|
||||
|
||||
const project_audio_tracks = await db.project_audio_tracks.findOne(
|
||||
{ where: queryWhere, include: [projectInclude], transaction },
|
||||
);
|
||||
|
||||
if (!project_audio_tracks) {
|
||||
return project_audio_tracks;
|
||||
}
|
||||
|
||||
const output = project_audio_tracks.get({plain: true});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
output.project = await project_audio_tracks.getProject({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
static async findAll(
|
||||
filter,
|
||||
options
|
||||
) {
|
||||
filter = filter || {};
|
||||
const limit = filter.limit || 0;
|
||||
let offset = 0;
|
||||
let where = {};
|
||||
const currentPage = +filter.page;
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
offset = currentPage * limit;
|
||||
const terms = filter.project ? filter.project.split('|') : [];
|
||||
const validUuids = Utils.filterValidUuids(terms);
|
||||
|
||||
let include = [
|
||||
|
||||
{
|
||||
model: db.projects,
|
||||
as: 'project',
|
||||
|
||||
where: filter.project ? {
|
||||
[Op.or]: [
|
||||
{ id: { [Op.in]: filter.project.split('|').map(term => Utils.uuid(term)) } },
|
||||
{
|
||||
name: {
|
||||
[Op.or]: filter.project.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
|
||||
}
|
||||
},
|
||||
]
|
||||
} : {},
|
||||
|
||||
where: filter.project
|
||||
? {
|
||||
[Op.or]: [
|
||||
...(validUuids.length > 0
|
||||
? [{ id: { [Op.in]: validUuids } }]
|
||||
: []),
|
||||
{
|
||||
name: {
|
||||
[Op.or]: terms.map((term) => ({ [Op.iLike]: `%${term}%` })),
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
: {},
|
||||
},
|
||||
|
||||
|
||||
|
||||
];
|
||||
include[0] = applyRuntimeProjectFilter(include[0], options);
|
||||
|
||||
if (filter) {
|
||||
if (filter.id) {
|
||||
where = {
|
||||
...where,
|
||||
['id']: Utils.uuid(filter.id),
|
||||
};
|
||||
}
|
||||
include[0] = applyRuntimeProjectFilter(include[0], options);
|
||||
|
||||
|
||||
if (filter.source_key) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'project_audio_tracks',
|
||||
'source_key',
|
||||
filter.source_key,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.name) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'project_audio_tracks',
|
||||
'name',
|
||||
filter.name,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.slug) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'project_audio_tracks',
|
||||
'slug',
|
||||
filter.slug,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.url) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'project_audio_tracks',
|
||||
'url',
|
||||
filter.url,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (filter.volumeRange) {
|
||||
const [start, end] = filter.volumeRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
volume: {
|
||||
...where.volume,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
volume: {
|
||||
...where.volume,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.sort_orderRange) {
|
||||
const [start, end] = filter.sort_orderRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
sort_order: {
|
||||
...where.sort_order,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
sort_order: {
|
||||
...where.sort_order,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (filter.active !== undefined) {
|
||||
where = {
|
||||
...where,
|
||||
active: filter.active === true || filter.active === 'true'
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
if (filter.environment) {
|
||||
where = {
|
||||
...where,
|
||||
environment: filter.environment,
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.loop) {
|
||||
where = {
|
||||
...where,
|
||||
loop: filter.loop,
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.is_enabled) {
|
||||
where = {
|
||||
...where,
|
||||
is_enabled: filter.is_enabled,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (filter.createdAtRange) {
|
||||
const [start, end] = filter.createdAtRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
where = applyRuntimeEnvironment(where, options);
|
||||
|
||||
|
||||
|
||||
const queryOptions = {
|
||||
where,
|
||||
include,
|
||||
distinct: true,
|
||||
order: filter.field && filter.sort
|
||||
? [[filter.field, filter.sort]]
|
||||
: [['createdAt', 'desc']],
|
||||
transaction: options?.transaction,
|
||||
logging: console.log
|
||||
};
|
||||
|
||||
if (!options?.countOnly) {
|
||||
queryOptions.limit = limit ? Number(limit) : undefined;
|
||||
queryOptions.offset = offset ? Number(offset) : undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
const { rows, count } = await db.project_audio_tracks.findAndCountAll(queryOptions);
|
||||
|
||||
return {
|
||||
rows: options?.countOnly ? [] : rows,
|
||||
count: count
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error executing query:', error);
|
||||
throw error;
|
||||
}
|
||||
if (filter.id) {
|
||||
if (!Utils.isValidUuid(filter.id)) {
|
||||
return { rows: [], count: 0 };
|
||||
}
|
||||
where.id = filter.id;
|
||||
}
|
||||
|
||||
static async findAllAutocomplete(query, limit, offset, ) {
|
||||
let where = {};
|
||||
|
||||
|
||||
|
||||
if (query) {
|
||||
where = {
|
||||
[Op.or]: [
|
||||
{ ['id']: Utils.uuid(query) },
|
||||
Utils.ilike(
|
||||
'project_audio_tracks',
|
||||
'name',
|
||||
query,
|
||||
),
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
const records = await db.project_audio_tracks.findAll({
|
||||
attributes: [ 'id', 'name' ],
|
||||
where,
|
||||
limit: limit ? Number(limit) : undefined,
|
||||
offset: offset ? Number(offset) : undefined,
|
||||
orderBy: [['name', 'ASC']],
|
||||
});
|
||||
|
||||
return records.map((record) => ({
|
||||
id: record.id,
|
||||
label: record.name,
|
||||
}));
|
||||
for (const field of this.SEARCHABLE_FIELDS) {
|
||||
if (filter[field]) {
|
||||
where[Op.and] = Utils.ilike(this.TABLE_NAME, field, filter[field]);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
};
|
||||
for (const field of this.RANGE_FIELDS) {
|
||||
const rangeKey = `${field}Range`;
|
||||
if (filter[rangeKey]) {
|
||||
const [start, end] = filter[rangeKey];
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where[field] = { ...where[field], [Op.gte]: start };
|
||||
}
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where[field] = { ...where[field], [Op.lte]: end };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const field of this.ENUM_FIELDS) {
|
||||
if (filter[field] !== undefined) {
|
||||
where[field] = filter[field];
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.active !== undefined) {
|
||||
where.active = filter.active === true || filter.active === 'true';
|
||||
}
|
||||
|
||||
if (filter.createdAtRange) {
|
||||
const [start, end] = filter.createdAtRange;
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where.createdAt = { ...where.createdAt, [Op.gte]: start };
|
||||
}
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where.createdAt = { ...where.createdAt, [Op.lte]: end };
|
||||
}
|
||||
}
|
||||
|
||||
where = applyRuntimeEnvironment(where, options);
|
||||
|
||||
const queryOptions = {
|
||||
where,
|
||||
include,
|
||||
distinct: true,
|
||||
order:
|
||||
filter.field && filter.sort
|
||||
? [[filter.field, filter.sort]]
|
||||
: [['createdAt', 'desc']],
|
||||
transaction: options.transaction,
|
||||
};
|
||||
|
||||
if (!options.countOnly) {
|
||||
queryOptions.limit = limit ? Number(limit) : undefined;
|
||||
queryOptions.offset = offset ? Number(offset) : undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
const { rows, count } = await this.MODEL.findAndCountAll(queryOptions);
|
||||
return {
|
||||
rows: options.countOnly ? [] : rows,
|
||||
count,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error executing query:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Project_audio_tracksDBApi;
|
||||
|
||||
390
backend/src/db/api/project_element_defaults.js
Normal file
390
backend/src/db/api/project_element_defaults.js
Normal file
@ -0,0 +1,390 @@
|
||||
const GenericDBApi = require('./base.api');
|
||||
const db = require('../models');
|
||||
const Utils = require('../utils');
|
||||
|
||||
const Sequelize = db.Sequelize;
|
||||
const Op = Sequelize.Op;
|
||||
|
||||
class Project_element_defaultsDBApi extends GenericDBApi {
|
||||
static get MODEL() {
|
||||
return db.project_element_defaults;
|
||||
}
|
||||
|
||||
static get TABLE_NAME() {
|
||||
return 'project_element_defaults';
|
||||
}
|
||||
|
||||
static get SEARCHABLE_FIELDS() {
|
||||
return ['name', 'element_type'];
|
||||
}
|
||||
|
||||
static get RANGE_FIELDS() {
|
||||
return ['sort_order', 'snapshot_version'];
|
||||
}
|
||||
|
||||
static get ENUM_FIELDS() {
|
||||
return [];
|
||||
}
|
||||
|
||||
static get ASSOCIATIONS() {
|
||||
return [{ field: 'project', setter: 'setProject', isArray: false }];
|
||||
}
|
||||
|
||||
static get RELATION_FILTERS() {
|
||||
return [
|
||||
{
|
||||
filterKey: 'project',
|
||||
model: db.projects,
|
||||
as: 'project',
|
||||
searchField: 'name',
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
static get FIND_ALL_INCLUDES() {
|
||||
return [{ association: 'project' }, { association: 'source_element' }];
|
||||
}
|
||||
|
||||
static get CSV_FIELDS() {
|
||||
return [
|
||||
'id',
|
||||
'element_type',
|
||||
'name',
|
||||
'sort_order',
|
||||
'projectId',
|
||||
'snapshot_version',
|
||||
'createdAt',
|
||||
];
|
||||
}
|
||||
|
||||
static get AUTOCOMPLETE_FIELD() {
|
||||
return 'name';
|
||||
}
|
||||
|
||||
// Declarative field configuration using base class patterns
|
||||
static get JSON_FIELDS() {
|
||||
return ['settings_json'];
|
||||
}
|
||||
|
||||
static get FIELD_DEFAULTS() {
|
||||
return {
|
||||
element_type: { default: null },
|
||||
name: { default: null },
|
||||
sort_order: { default: 0 },
|
||||
source_element_id: { default: null },
|
||||
snapshot_version: { default: 1 },
|
||||
};
|
||||
}
|
||||
|
||||
static getFieldMapping(data) {
|
||||
// Apply base class transformations (JSON fields, defaults, transformers)
|
||||
const mapped = super.getFieldMapping(data);
|
||||
|
||||
// Custom mapping for projectId field (accepts both projectId and project)
|
||||
if (mapped.project && !mapped.projectId) {
|
||||
mapped.projectId = mapped.project;
|
||||
}
|
||||
|
||||
return {
|
||||
id: mapped.id || undefined,
|
||||
element_type: mapped.element_type,
|
||||
name: mapped.name,
|
||||
sort_order: mapped.sort_order,
|
||||
settings_json: mapped.settings_json,
|
||||
source_element_id: mapped.source_element_id,
|
||||
snapshot_version: mapped.snapshot_version,
|
||||
projectId: mapped.projectId,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom findAll with project filtering
|
||||
* Supports both 'project' and 'projectId' query params for consistency
|
||||
*/
|
||||
static async findAll(filter = {}, options = {}) {
|
||||
filter = filter || {};
|
||||
const limit = filter.limit || 0;
|
||||
const currentPage = +filter.page || 0;
|
||||
const offset = currentPage * limit;
|
||||
|
||||
let where = {};
|
||||
|
||||
// Support both 'project' and 'projectId' query params
|
||||
const projectFilter = filter.project || filter.projectId;
|
||||
const terms = projectFilter ? projectFilter.split('|') : [];
|
||||
const validUuids = Utils.filterValidUuids(terms);
|
||||
|
||||
let include = [
|
||||
{
|
||||
model: db.projects,
|
||||
as: 'project',
|
||||
where: projectFilter
|
||||
? {
|
||||
[Op.or]: [
|
||||
...(validUuids.length > 0
|
||||
? [{ id: { [Op.in]: validUuids } }]
|
||||
: []),
|
||||
{
|
||||
name: {
|
||||
[Op.or]: terms.map((term) => ({ [Op.iLike]: `%${term}%` })),
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
: {},
|
||||
},
|
||||
{
|
||||
model: db.element_type_defaults,
|
||||
as: 'source_element',
|
||||
required: false,
|
||||
},
|
||||
];
|
||||
|
||||
if (filter.id) {
|
||||
if (!Utils.isValidUuid(filter.id)) {
|
||||
return { rows: [], count: 0 };
|
||||
}
|
||||
where.id = filter.id;
|
||||
}
|
||||
|
||||
for (const field of this.SEARCHABLE_FIELDS) {
|
||||
if (filter[field]) {
|
||||
where[Op.and] = Utils.ilike(this.TABLE_NAME, field, filter[field]);
|
||||
}
|
||||
}
|
||||
|
||||
for (const field of this.RANGE_FIELDS) {
|
||||
const rangeKey = `${field}Range`;
|
||||
if (filter[rangeKey]) {
|
||||
const [start, end] = filter[rangeKey];
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where[field] = { ...where[field], [Op.gte]: start };
|
||||
}
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where[field] = { ...where[field], [Op.lte]: end };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const field of this.ENUM_FIELDS) {
|
||||
if (filter[field] !== undefined) {
|
||||
where[field] = filter[field];
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.createdAtRange) {
|
||||
const [start, end] = filter.createdAtRange;
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where.createdAt = { ...where.createdAt, [Op.gte]: start };
|
||||
}
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where.createdAt = { ...where.createdAt, [Op.lte]: end };
|
||||
}
|
||||
}
|
||||
|
||||
const queryOptions = {
|
||||
where,
|
||||
include,
|
||||
distinct: true,
|
||||
order:
|
||||
filter.field && filter.sort
|
||||
? [[filter.field, filter.sort]]
|
||||
: [['sort_order', 'asc']],
|
||||
transaction: options.transaction,
|
||||
};
|
||||
|
||||
if (!options.countOnly) {
|
||||
queryOptions.limit = limit ? Number(limit) : undefined;
|
||||
queryOptions.offset = offset ? Number(offset) : undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
const { rows, count } = await this.MODEL.findAndCountAll(queryOptions);
|
||||
return {
|
||||
rows: options.countOnly ? [] : rows,
|
||||
count,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error executing query:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find project element default by element type for a specific project
|
||||
*/
|
||||
static async findByElementType(projectId, elementType, options = {}) {
|
||||
return this.MODEL.findOne({
|
||||
where: {
|
||||
projectId,
|
||||
element_type: elementType,
|
||||
deletedAt: null,
|
||||
},
|
||||
...options,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Snapshot all global element defaults to a project
|
||||
* Used when creating a new project
|
||||
*/
|
||||
static async snapshotGlobalDefaults(projectId, options = {}) {
|
||||
const Element_type_defaultsDBApi = require('./element_type_defaults');
|
||||
|
||||
// Get all global defaults
|
||||
const globalDefaults = await Element_type_defaultsDBApi.findAll({});
|
||||
|
||||
if (!globalDefaults?.rows?.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
// Dedupe by element_type (keep first occurrence)
|
||||
// Prevents unique constraint violations if global defaults have duplicates
|
||||
const seenTypes = new Set();
|
||||
const dedupedDefaults = globalDefaults.rows.filter((row) => {
|
||||
if (seenTypes.has(row.element_type)) {
|
||||
console.warn(
|
||||
`Duplicate element_type in global defaults: ${row.element_type} (skipping)`,
|
||||
);
|
||||
return false;
|
||||
}
|
||||
seenTypes.add(row.element_type);
|
||||
return true;
|
||||
});
|
||||
|
||||
const now = new Date();
|
||||
const currentUserId = options.currentUser?.id || null;
|
||||
|
||||
// Create project defaults from global defaults
|
||||
const projectDefaults = await this.MODEL.bulkCreate(
|
||||
dedupedDefaults.map((globalDefault) => ({
|
||||
projectId,
|
||||
element_type: globalDefault.element_type,
|
||||
name: globalDefault.name,
|
||||
sort_order: globalDefault.sort_order,
|
||||
settings_json: globalDefault.default_settings_json,
|
||||
source_element_id: globalDefault.id,
|
||||
snapshot_version: 1,
|
||||
createdById: currentUserId,
|
||||
updatedById: currentUserId,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})),
|
||||
{
|
||||
transaction: options.transaction,
|
||||
returning: true,
|
||||
},
|
||||
);
|
||||
|
||||
return projectDefaults;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset a project element default to the current global default
|
||||
*/
|
||||
static async resetToGlobal(id, options = {}) {
|
||||
const Element_type_defaultsDBApi = require('./element_type_defaults');
|
||||
|
||||
// Ensure global defaults are initialized
|
||||
await Element_type_defaultsDBApi.ensureInitialized();
|
||||
|
||||
// Find the project default
|
||||
const projectDefault = await this.MODEL.findByPk(id);
|
||||
if (!projectDefault) {
|
||||
throw new Error('Project element default not found');
|
||||
}
|
||||
|
||||
// Find the matching global default
|
||||
const globalDefault = await Element_type_defaultsDBApi.MODEL.findOne({
|
||||
where: {
|
||||
element_type: projectDefault.element_type,
|
||||
deletedAt: null,
|
||||
},
|
||||
});
|
||||
|
||||
if (!globalDefault) {
|
||||
throw new Error(
|
||||
`No global default found for element type: ${projectDefault.element_type}`,
|
||||
);
|
||||
}
|
||||
|
||||
// Update with global settings and increment version
|
||||
const now = new Date();
|
||||
await projectDefault.update(
|
||||
{
|
||||
name: globalDefault.name,
|
||||
sort_order: globalDefault.sort_order,
|
||||
settings_json: globalDefault.default_settings_json,
|
||||
source_element_id: globalDefault.id,
|
||||
snapshot_version: projectDefault.snapshot_version + 1,
|
||||
updatedById: options.currentUser?.id || null,
|
||||
updatedAt: now,
|
||||
},
|
||||
{
|
||||
transaction: options.transaction,
|
||||
},
|
||||
);
|
||||
|
||||
return projectDefault.reload();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get diff between project default and current global default
|
||||
*/
|
||||
static async getDiffFromGlobal(id) {
|
||||
const Element_type_defaultsDBApi = require('./element_type_defaults');
|
||||
|
||||
// Ensure global defaults are initialized
|
||||
await Element_type_defaultsDBApi.ensureInitialized();
|
||||
|
||||
// Find the project default
|
||||
const projectDefault = await this.MODEL.findByPk(id);
|
||||
if (!projectDefault) {
|
||||
throw new Error('Project element default not found');
|
||||
}
|
||||
|
||||
// Find the matching global default
|
||||
const globalDefault = await Element_type_defaultsDBApi.MODEL.findOne({
|
||||
where: {
|
||||
element_type: projectDefault.element_type,
|
||||
deletedAt: null,
|
||||
},
|
||||
});
|
||||
|
||||
if (!globalDefault) {
|
||||
return {
|
||||
projectDefault,
|
||||
globalDefault: null,
|
||||
hasGlobalDefault: false,
|
||||
isDifferent: true,
|
||||
};
|
||||
}
|
||||
|
||||
// Parse JSON settings for comparison
|
||||
const projectSettings =
|
||||
typeof projectDefault.settings_json === 'string'
|
||||
? JSON.parse(projectDefault.settings_json || '{}')
|
||||
: projectDefault.settings_json || {};
|
||||
|
||||
const globalSettings =
|
||||
typeof globalDefault.default_settings_json === 'string'
|
||||
? JSON.parse(globalDefault.default_settings_json || '{}')
|
||||
: globalDefault.default_settings_json || {};
|
||||
|
||||
const isDifferent =
|
||||
JSON.stringify(projectSettings) !== JSON.stringify(globalSettings) ||
|
||||
projectDefault.name !== globalDefault.name ||
|
||||
projectDefault.sort_order !== globalDefault.sort_order;
|
||||
|
||||
return {
|
||||
projectDefault,
|
||||
globalDefault,
|
||||
hasGlobalDefault: true,
|
||||
isDifferent,
|
||||
projectSettings,
|
||||
globalSettings,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Project_element_defaultsDBApi;
|
||||
@ -1,501 +1,86 @@
|
||||
|
||||
const GenericDBApi = require('./base.api');
|
||||
const db = require('../models');
|
||||
const Utils = require('../utils');
|
||||
|
||||
class Project_membershipsDBApi extends GenericDBApi {
|
||||
static get MODEL() {
|
||||
return db.project_memberships;
|
||||
}
|
||||
|
||||
static get TABLE_NAME() {
|
||||
return 'project_memberships';
|
||||
}
|
||||
|
||||
const Sequelize = db.Sequelize;
|
||||
const Op = Sequelize.Op;
|
||||
static get SEARCHABLE_FIELDS() {
|
||||
return [];
|
||||
}
|
||||
|
||||
module.exports = class Project_membershipsDBApi {
|
||||
|
||||
static get RANGE_FIELDS() {
|
||||
return ['invited_at', 'accepted_at'];
|
||||
}
|
||||
|
||||
|
||||
static async create(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
static get ENUM_FIELDS() {
|
||||
return ['access_level', 'is_active'];
|
||||
}
|
||||
|
||||
const project_memberships = await db.project_memberships.create(
|
||||
{
|
||||
id: data.id || undefined,
|
||||
|
||||
access_level: data.access_level
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
is_active: data.is_active
|
||||
||
|
||||
false
|
||||
|
||||
,
|
||||
|
||||
invited_at: data.invited_at
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
accepted_at: data.accepted_at
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
importHash: data.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
static get CSV_FIELDS() {
|
||||
return [
|
||||
'id',
|
||||
'access_level',
|
||||
'is_active',
|
||||
'invited_at',
|
||||
'accepted_at',
|
||||
'createdAt',
|
||||
];
|
||||
}
|
||||
|
||||
|
||||
await project_memberships.setProject( data.project || null, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await project_memberships.setUser( data.user || null, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
static get AUTOCOMPLETE_FIELD() {
|
||||
return 'access_level';
|
||||
}
|
||||
|
||||
|
||||
static get ASSOCIATIONS() {
|
||||
return [
|
||||
{ field: 'project', setter: 'setProject', isArray: false },
|
||||
{ field: 'user', setter: 'setUser', isArray: false },
|
||||
];
|
||||
}
|
||||
|
||||
|
||||
static get FIND_BY_INCLUDES() {
|
||||
return [{ association: 'project' }, { association: 'user' }];
|
||||
}
|
||||
|
||||
return project_memberships;
|
||||
}
|
||||
|
||||
|
||||
static async bulkImport(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
// Prepare data - wrapping individual data transformations in a map() method
|
||||
const project_membershipsData = data.map((item, index) => ({
|
||||
id: item.id || undefined,
|
||||
|
||||
access_level: item.access_level
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
is_active: item.is_active
|
||||
||
|
||||
false
|
||||
|
||||
,
|
||||
|
||||
invited_at: item.invited_at
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
accepted_at: item.accepted_at
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
importHash: item.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
createdAt: new Date(Date.now() + index * 1000),
|
||||
}));
|
||||
|
||||
// Bulk create items
|
||||
const project_memberships = await db.project_memberships.bulkCreate(project_membershipsData, { transaction });
|
||||
|
||||
// For each item created, replace relation files
|
||||
|
||||
|
||||
return project_memberships;
|
||||
}
|
||||
|
||||
static async update(id, data, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
|
||||
const project_memberships = await db.project_memberships.findByPk(id, {transaction});
|
||||
|
||||
|
||||
|
||||
|
||||
const updatePayload = {};
|
||||
|
||||
if (data.access_level !== undefined) updatePayload.access_level = data.access_level;
|
||||
|
||||
|
||||
if (data.is_active !== undefined) updatePayload.is_active = data.is_active;
|
||||
|
||||
|
||||
if (data.invited_at !== undefined) updatePayload.invited_at = data.invited_at;
|
||||
|
||||
|
||||
if (data.accepted_at !== undefined) updatePayload.accepted_at = data.accepted_at;
|
||||
|
||||
|
||||
updatePayload.updatedById = currentUser.id;
|
||||
|
||||
await project_memberships.update(updatePayload, {transaction});
|
||||
|
||||
|
||||
|
||||
if (data.project !== undefined) {
|
||||
await project_memberships.setProject(
|
||||
|
||||
data.project,
|
||||
|
||||
{ transaction }
|
||||
);
|
||||
}
|
||||
|
||||
if (data.user !== undefined) {
|
||||
await project_memberships.setUser(
|
||||
|
||||
data.user,
|
||||
|
||||
{ transaction }
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
return project_memberships;
|
||||
}
|
||||
|
||||
static async deleteByIds(ids, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const project_memberships = await db.project_memberships.findAll({
|
||||
where: {
|
||||
id: {
|
||||
[Op.in]: ids,
|
||||
},
|
||||
},
|
||||
transaction,
|
||||
});
|
||||
|
||||
await db.sequelize.transaction(async (transaction) => {
|
||||
for (const record of project_memberships) {
|
||||
await record.update(
|
||||
{deletedBy: currentUser.id},
|
||||
{transaction}
|
||||
);
|
||||
}
|
||||
for (const record of project_memberships) {
|
||||
await record.destroy({transaction});
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
return project_memberships;
|
||||
}
|
||||
|
||||
static async remove(id, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const project_memberships = await db.project_memberships.findByPk(id, options);
|
||||
|
||||
await project_memberships.update({
|
||||
deletedBy: currentUser.id
|
||||
}, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await project_memberships.destroy({
|
||||
transaction
|
||||
});
|
||||
|
||||
return project_memberships;
|
||||
}
|
||||
|
||||
static async findBy(where, options) {
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const project_memberships = await db.project_memberships.findOne({
|
||||
where,
|
||||
transaction,
|
||||
});
|
||||
|
||||
if (!project_memberships) {
|
||||
return project_memberships;
|
||||
}
|
||||
|
||||
const output = project_memberships.get({plain: true});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
output.project = await project_memberships.getProject({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
output.user = await project_memberships.getUser({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
static async findAll(
|
||||
filter,
|
||||
options
|
||||
) {
|
||||
const limit = filter.limit || 0;
|
||||
let offset = 0;
|
||||
let where = {};
|
||||
const currentPage = +filter.page;
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
offset = currentPage * limit;
|
||||
|
||||
let include = [
|
||||
static get FIND_ALL_INCLUDES() {
|
||||
return [
|
||||
{ model: db.projects, as: 'project', required: false },
|
||||
{ model: db.users, as: 'user', required: false },
|
||||
];
|
||||
}
|
||||
|
||||
static get RELATION_FILTERS() {
|
||||
return [
|
||||
{
|
||||
filterKey: 'project',
|
||||
model: db.projects,
|
||||
as: 'project',
|
||||
|
||||
where: filter.project ? {
|
||||
[Op.or]: [
|
||||
{ id: { [Op.in]: filter.project.split('|').map(term => Utils.uuid(term)) } },
|
||||
{
|
||||
name: {
|
||||
[Op.or]: filter.project.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
|
||||
}
|
||||
},
|
||||
]
|
||||
} : {},
|
||||
|
||||
searchField: 'name',
|
||||
},
|
||||
|
||||
{
|
||||
filterKey: 'user',
|
||||
model: db.users,
|
||||
as: 'user',
|
||||
|
||||
where: filter.user ? {
|
||||
[Op.or]: [
|
||||
{ id: { [Op.in]: filter.user.split('|').map(term => Utils.uuid(term)) } },
|
||||
{
|
||||
firstName: {
|
||||
[Op.or]: filter.user.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
|
||||
}
|
||||
},
|
||||
]
|
||||
} : {},
|
||||
|
||||
searchField: 'firstName',
|
||||
},
|
||||
|
||||
|
||||
|
||||
];
|
||||
}
|
||||
|
||||
if (filter) {
|
||||
if (filter.id) {
|
||||
where = {
|
||||
...where,
|
||||
['id']: Utils.uuid(filter.id),
|
||||
};
|
||||
}
|
||||
static getFieldMapping(data) {
|
||||
return {
|
||||
id: data.id || undefined,
|
||||
access_level: data.access_level || null,
|
||||
is_active: data.is_active || false,
|
||||
invited_at: data.invited_at || null,
|
||||
accepted_at: data.accepted_at || null,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (filter.invited_atRange) {
|
||||
const [start, end] = filter.invited_atRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
invited_at: {
|
||||
...where.invited_at,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
invited_at: {
|
||||
...where.invited_at,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.accepted_atRange) {
|
||||
const [start, end] = filter.accepted_atRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
accepted_at: {
|
||||
...where.accepted_at,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
accepted_at: {
|
||||
...where.accepted_at,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (filter.active !== undefined) {
|
||||
where = {
|
||||
...where,
|
||||
active: filter.active === true || filter.active === 'true'
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
if (filter.access_level) {
|
||||
where = {
|
||||
...where,
|
||||
access_level: filter.access_level,
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.is_active) {
|
||||
where = {
|
||||
...where,
|
||||
is_active: filter.is_active,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (filter.createdAtRange) {
|
||||
const [start, end] = filter.createdAtRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
const queryOptions = {
|
||||
where,
|
||||
include,
|
||||
distinct: true,
|
||||
order: filter.field && filter.sort
|
||||
? [[filter.field, filter.sort]]
|
||||
: [['createdAt', 'desc']],
|
||||
transaction: options?.transaction,
|
||||
logging: console.log
|
||||
};
|
||||
|
||||
if (!options?.countOnly) {
|
||||
queryOptions.limit = limit ? Number(limit) : undefined;
|
||||
queryOptions.offset = offset ? Number(offset) : undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
const { rows, count } = await db.project_memberships.findAndCountAll(queryOptions);
|
||||
|
||||
return {
|
||||
rows: options?.countOnly ? [] : rows,
|
||||
count: count
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error executing query:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async findAllAutocomplete(query, limit, offset, ) {
|
||||
let where = {};
|
||||
|
||||
|
||||
|
||||
if (query) {
|
||||
where = {
|
||||
[Op.or]: [
|
||||
{ ['id']: Utils.uuid(query) },
|
||||
Utils.ilike(
|
||||
'project_memberships',
|
||||
'access_level',
|
||||
query,
|
||||
),
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
const records = await db.project_memberships.findAll({
|
||||
attributes: [ 'id', 'access_level' ],
|
||||
where,
|
||||
limit: limit ? Number(limit) : undefined,
|
||||
offset: offset ? Number(offset) : undefined,
|
||||
orderBy: [['access_level', 'ASC']],
|
||||
});
|
||||
|
||||
return records.map((record) => ({
|
||||
id: record.id,
|
||||
label: record.access_level,
|
||||
}));
|
||||
}
|
||||
|
||||
|
||||
};
|
||||
module.exports = Project_membershipsDBApi;
|
||||
|
||||
277
backend/src/db/api/project_transition_settings.js
Normal file
277
backend/src/db/api/project_transition_settings.js
Normal file
@ -0,0 +1,277 @@
|
||||
const GenericDBApi = require('./base.api');
|
||||
const db = require('../models');
|
||||
const Utils = require('../utils');
|
||||
const {
|
||||
applyRuntimeEnvironment,
|
||||
applyRuntimeProjectFilter,
|
||||
} = require('./runtime-context');
|
||||
|
||||
const Sequelize = db.Sequelize;
|
||||
const Op = Sequelize.Op;
|
||||
|
||||
class Project_transition_settingsDBApi extends GenericDBApi {
|
||||
static get MODEL() {
|
||||
return db.project_transition_settings;
|
||||
}
|
||||
|
||||
static get TABLE_NAME() {
|
||||
return 'project_transition_settings';
|
||||
}
|
||||
|
||||
static get SEARCHABLE_FIELDS() {
|
||||
return ['source_key', 'transition_type', 'easing', 'overlay_color'];
|
||||
}
|
||||
|
||||
static get RANGE_FIELDS() {
|
||||
return ['duration_ms'];
|
||||
}
|
||||
|
||||
static get ENUM_FIELDS() {
|
||||
return ['environment'];
|
||||
}
|
||||
|
||||
static get CSV_FIELDS() {
|
||||
return [
|
||||
'id',
|
||||
'environment',
|
||||
'source_key',
|
||||
'transition_type',
|
||||
'duration_ms',
|
||||
'easing',
|
||||
'overlay_color',
|
||||
'createdAt',
|
||||
];
|
||||
}
|
||||
|
||||
static get AUTOCOMPLETE_FIELD() {
|
||||
return 'transition_type';
|
||||
}
|
||||
|
||||
static get ASSOCIATIONS() {
|
||||
return [{ field: 'project', setter: 'setProject', isArray: false }];
|
||||
}
|
||||
|
||||
static getFieldMapping(data) {
|
||||
// Note: environment and projectId are NOT included here because they are
|
||||
// set explicitly in upsertForProject and should never be changed via data
|
||||
return {
|
||||
id: data.id || undefined,
|
||||
source_key: data.source_key || null,
|
||||
transition_type: data.transition_type || 'fade',
|
||||
duration_ms: data.duration_ms !== undefined ? data.duration_ms : 700,
|
||||
easing: data.easing || 'ease-in-out',
|
||||
overlay_color: data.overlay_color || '#000000',
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Find settings by project ID and environment.
|
||||
* This is the primary method for fetching transition settings.
|
||||
*
|
||||
* @param {string} projectId - Project ID
|
||||
* @param {string} environment - Environment (dev, stage, production)
|
||||
* @param {object} options - Query options
|
||||
* @returns {object|null} Settings record or null
|
||||
*/
|
||||
static async findByProjectAndEnvironment(
|
||||
projectId,
|
||||
environment,
|
||||
options = {},
|
||||
) {
|
||||
const transaction = options.transaction;
|
||||
|
||||
const record = await this.MODEL.findOne({
|
||||
where: {
|
||||
projectId,
|
||||
environment,
|
||||
},
|
||||
transaction,
|
||||
include: [
|
||||
{
|
||||
model: db.projects,
|
||||
as: 'project',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
if (!record) return null;
|
||||
return record.get({ plain: true });
|
||||
}
|
||||
|
||||
/**
|
||||
* Create or update settings for a project/environment combination.
|
||||
* Uses upsert semantics - creates if not exists, updates if exists.
|
||||
*
|
||||
* @param {string} projectId - Project ID
|
||||
* @param {string} environment - Environment (dev, stage, production)
|
||||
* @param {object} data - Settings data
|
||||
* @param {object} options - Query options
|
||||
* @returns {object} Created or updated record
|
||||
*/
|
||||
static async upsertForProject(projectId, environment, data, options = {}) {
|
||||
const transaction = options.transaction;
|
||||
const currentUser = options.currentUser;
|
||||
|
||||
// Check if record exists
|
||||
const existing = await this.MODEL.findOne({
|
||||
where: { projectId, environment },
|
||||
transaction,
|
||||
});
|
||||
|
||||
if (existing) {
|
||||
// Update existing record
|
||||
await existing.update(
|
||||
{
|
||||
...this.getFieldMapping(data),
|
||||
updatedById: currentUser?.id || null,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
return existing.get({ plain: true });
|
||||
}
|
||||
|
||||
// Create new record
|
||||
const newRecord = await this.MODEL.create(
|
||||
{
|
||||
...this.getFieldMapping(data),
|
||||
projectId,
|
||||
environment,
|
||||
createdById: currentUser?.id || null,
|
||||
updatedById: currentUser?.id || null,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
return newRecord.get({ plain: true });
|
||||
}
|
||||
|
||||
static async findBy(where, options = {}) {
|
||||
const transaction = options.transaction;
|
||||
const queryWhere = applyRuntimeEnvironment({ ...where }, options);
|
||||
const projectInclude = applyRuntimeProjectFilter(
|
||||
{ model: db.projects, as: 'project' },
|
||||
options,
|
||||
);
|
||||
|
||||
const record = await this.MODEL.findOne({
|
||||
where: queryWhere,
|
||||
transaction,
|
||||
include: [projectInclude],
|
||||
});
|
||||
|
||||
if (!record) return null;
|
||||
return record.get({ plain: true });
|
||||
}
|
||||
|
||||
static async findAll(filter = {}, options = {}) {
|
||||
filter = filter || {};
|
||||
const limit = filter.limit || 0;
|
||||
const currentPage = +filter.page || 0;
|
||||
const offset = currentPage * limit;
|
||||
|
||||
let where = {};
|
||||
|
||||
const terms = filter.project ? filter.project.split('|') : [];
|
||||
const validUuids = Utils.filterValidUuids(terms);
|
||||
|
||||
let include = [
|
||||
{
|
||||
model: db.projects,
|
||||
as: 'project',
|
||||
where: filter.project
|
||||
? {
|
||||
[Op.or]: [
|
||||
...(validUuids.length > 0
|
||||
? [{ id: { [Op.in]: validUuids } }]
|
||||
: []),
|
||||
{
|
||||
name: {
|
||||
[Op.or]: terms.map((term) => ({ [Op.iLike]: `%${term}%` })),
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
: {},
|
||||
},
|
||||
];
|
||||
|
||||
include[0] = applyRuntimeProjectFilter(include[0], options);
|
||||
|
||||
if (filter.id) {
|
||||
if (!Utils.isValidUuid(filter.id)) {
|
||||
return { rows: [], count: 0 };
|
||||
}
|
||||
where.id = filter.id;
|
||||
}
|
||||
|
||||
for (const field of this.SEARCHABLE_FIELDS) {
|
||||
if (filter[field]) {
|
||||
where[Op.and] = Utils.ilike(this.TABLE_NAME, field, filter[field]);
|
||||
}
|
||||
}
|
||||
|
||||
for (const field of this.RANGE_FIELDS) {
|
||||
const rangeKey = `${field}Range`;
|
||||
if (filter[rangeKey]) {
|
||||
const [start, end] = filter[rangeKey];
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where[field] = { ...where[field], [Op.gte]: start };
|
||||
}
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where[field] = { ...where[field], [Op.lte]: end };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const field of this.ENUM_FIELDS) {
|
||||
if (filter[field] !== undefined) {
|
||||
where[field] = filter[field];
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.active !== undefined) {
|
||||
where.active = filter.active === true || filter.active === 'true';
|
||||
}
|
||||
|
||||
if (filter.createdAtRange) {
|
||||
const [start, end] = filter.createdAtRange;
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where.createdAt = { ...where.createdAt, [Op.gte]: start };
|
||||
}
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where.createdAt = { ...where.createdAt, [Op.lte]: end };
|
||||
}
|
||||
}
|
||||
|
||||
where = applyRuntimeEnvironment(where, options);
|
||||
|
||||
const queryOptions = {
|
||||
where,
|
||||
include,
|
||||
distinct: true,
|
||||
order:
|
||||
filter.field && filter.sort
|
||||
? [[filter.field, filter.sort]]
|
||||
: [['createdAt', 'desc']],
|
||||
transaction: options.transaction,
|
||||
};
|
||||
|
||||
if (!options.countOnly) {
|
||||
queryOptions.limit = limit ? Number(limit) : undefined;
|
||||
queryOptions.offset = offset ? Number(offset) : undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
const { rows, count } = await this.MODEL.findAndCountAll(queryOptions);
|
||||
return {
|
||||
rows: options.countOnly ? [] : rows,
|
||||
count,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error executing query:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Project_transition_settingsDBApi;
|
||||
@ -1,699 +1,230 @@
|
||||
|
||||
const GenericDBApi = require('./base.api');
|
||||
const db = require('../models');
|
||||
const Utils = require('../utils');
|
||||
const {
|
||||
getRuntimeEnvironment,
|
||||
getRuntimeProjectSlug,
|
||||
} = require('./runtime-context');
|
||||
|
||||
|
||||
const { getRuntimeProjectSlug } = require('./runtime-context');
|
||||
|
||||
const Sequelize = db.Sequelize;
|
||||
const Op = Sequelize.Op;
|
||||
|
||||
module.exports = class ProjectsDBApi {
|
||||
|
||||
|
||||
|
||||
static async create(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const projects = await db.projects.create(
|
||||
{
|
||||
id: data.id || undefined,
|
||||
|
||||
name: data.name
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
slug: data.slug
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
description: data.description
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
phase: data.phase
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
logo_url: data.logo_url
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
favicon_url: data.favicon_url
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
og_image_url: data.og_image_url
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
theme_config_json: data.theme_config_json
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
custom_css_json: data.custom_css_json
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
cdn_base_url: data.cdn_base_url
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
entry_page_slug: data.entry_page_slug
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
is_deleted: data.is_deleted
|
||||
||
|
||||
false
|
||||
|
||||
,
|
||||
|
||||
deleted_at_time: data.deleted_at_time
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
importHash: data.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
return projects;
|
||||
}
|
||||
|
||||
|
||||
static async bulkImport(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
// Prepare data - wrapping individual data transformations in a map() method
|
||||
const projectsData = data.map((item, index) => ({
|
||||
id: item.id || undefined,
|
||||
|
||||
name: item.name
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
slug: item.slug
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
description: item.description
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
phase: item.phase
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
logo_url: item.logo_url
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
favicon_url: item.favicon_url
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
og_image_url: item.og_image_url
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
theme_config_json: item.theme_config_json
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
custom_css_json: item.custom_css_json
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
cdn_base_url: item.cdn_base_url
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
entry_page_slug: item.entry_page_slug
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
is_deleted: item.is_deleted
|
||||
||
|
||||
false
|
||||
|
||||
,
|
||||
|
||||
deleted_at_time: item.deleted_at_time
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
importHash: item.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
createdAt: new Date(Date.now() + index * 1000),
|
||||
}));
|
||||
|
||||
// Bulk create items
|
||||
const projects = await db.projects.bulkCreate(projectsData, { transaction });
|
||||
|
||||
// For each item created, replace relation files
|
||||
|
||||
|
||||
return projects;
|
||||
}
|
||||
|
||||
static async update(id, data, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
|
||||
const projects = await db.projects.findByPk(id, {transaction});
|
||||
|
||||
|
||||
|
||||
|
||||
const updatePayload = {};
|
||||
|
||||
if (data.name !== undefined) updatePayload.name = data.name;
|
||||
|
||||
|
||||
if (data.slug !== undefined) updatePayload.slug = data.slug;
|
||||
|
||||
|
||||
if (data.description !== undefined) updatePayload.description = data.description;
|
||||
|
||||
|
||||
if (data.phase !== undefined) updatePayload.phase = data.phase;
|
||||
|
||||
|
||||
if (data.logo_url !== undefined) updatePayload.logo_url = data.logo_url;
|
||||
|
||||
|
||||
if (data.favicon_url !== undefined) updatePayload.favicon_url = data.favicon_url;
|
||||
|
||||
|
||||
if (data.og_image_url !== undefined) updatePayload.og_image_url = data.og_image_url;
|
||||
|
||||
|
||||
if (data.theme_config_json !== undefined) updatePayload.theme_config_json = data.theme_config_json;
|
||||
|
||||
|
||||
if (data.custom_css_json !== undefined) updatePayload.custom_css_json = data.custom_css_json;
|
||||
|
||||
|
||||
if (data.cdn_base_url !== undefined) updatePayload.cdn_base_url = data.cdn_base_url;
|
||||
|
||||
|
||||
if (data.entry_page_slug !== undefined) updatePayload.entry_page_slug = data.entry_page_slug;
|
||||
|
||||
|
||||
if (data.is_deleted !== undefined) updatePayload.is_deleted = data.is_deleted;
|
||||
|
||||
|
||||
if (data.deleted_at_time !== undefined) updatePayload.deleted_at_time = data.deleted_at_time;
|
||||
|
||||
|
||||
updatePayload.updatedById = currentUser.id;
|
||||
|
||||
await projects.update(updatePayload, {transaction});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
return projects;
|
||||
}
|
||||
|
||||
static async deleteByIds(ids, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const projects = await db.projects.findAll({
|
||||
where: {
|
||||
id: {
|
||||
[Op.in]: ids,
|
||||
},
|
||||
},
|
||||
transaction,
|
||||
});
|
||||
|
||||
await db.sequelize.transaction(async (transaction) => {
|
||||
for (const record of projects) {
|
||||
await record.update(
|
||||
{deletedBy: currentUser.id},
|
||||
{transaction}
|
||||
);
|
||||
}
|
||||
for (const record of projects) {
|
||||
await record.destroy({transaction});
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
return projects;
|
||||
}
|
||||
|
||||
static async remove(id, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const projects = await db.projects.findByPk(id, options);
|
||||
|
||||
await projects.update({
|
||||
deletedBy: currentUser.id
|
||||
}, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await projects.destroy({
|
||||
transaction
|
||||
});
|
||||
|
||||
return projects;
|
||||
}
|
||||
|
||||
static async findBy(where, options) {
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
const runtimeEnvironment = getRuntimeEnvironment(options);
|
||||
const runtimeProjectSlug = getRuntimeProjectSlug(options);
|
||||
const queryWhere = { ...where };
|
||||
|
||||
if (runtimeEnvironment) {
|
||||
queryWhere.phase = runtimeEnvironment === 'production'
|
||||
? 'production'
|
||||
: { [Op.in]: ['stage', 'production'] };
|
||||
}
|
||||
|
||||
if (runtimeProjectSlug) {
|
||||
queryWhere.slug = runtimeProjectSlug;
|
||||
}
|
||||
|
||||
const projects = await db.projects.findOne(
|
||||
{ where: queryWhere, transaction },
|
||||
);
|
||||
|
||||
if (!projects) {
|
||||
return projects;
|
||||
}
|
||||
|
||||
const output = projects.get({plain: true});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
output.project_memberships_project = await projects.getProject_memberships_project({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
output.assets_project = await projects.getAssets_project({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
|
||||
output.presigned_url_requests_project = await projects.getPresigned_url_requests_project({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
output.tour_pages_project = await projects.getTour_pages_project({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
output.transitions_project = await projects.getTransitions_project({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
output.project_audio_tracks_project = await projects.getProject_audio_tracks_project({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
output.publish_events_project = await projects.getPublish_events_project({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
output.pwa_caches_project = await projects.getPwa_caches_project({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
output.access_logs_project = await projects.getAccess_logs_project({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
static async findAll(
|
||||
filter,
|
||||
options
|
||||
) {
|
||||
filter = filter || {};
|
||||
const limit = filter.limit || 0;
|
||||
let offset = 0;
|
||||
let where = {};
|
||||
const currentPage = +filter.page;
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
offset = currentPage * limit;
|
||||
|
||||
let include = [
|
||||
|
||||
class ProjectsDBApi extends GenericDBApi {
|
||||
static get MODEL() {
|
||||
return db.projects;
|
||||
}
|
||||
|
||||
static get TABLE_NAME() {
|
||||
return 'projects';
|
||||
}
|
||||
|
||||
static get SEARCHABLE_FIELDS() {
|
||||
return [
|
||||
'name',
|
||||
'slug',
|
||||
'description',
|
||||
'logo_url',
|
||||
'favicon_url',
|
||||
'og_image_url',
|
||||
];
|
||||
}
|
||||
|
||||
if (filter) {
|
||||
if (filter.id) {
|
||||
where = {
|
||||
...where,
|
||||
['id']: Utils.uuid(filter.id),
|
||||
};
|
||||
}
|
||||
static get RANGE_FIELDS() {
|
||||
return [];
|
||||
}
|
||||
|
||||
|
||||
if (filter.name) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'projects',
|
||||
'name',
|
||||
filter.name,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.slug) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'projects',
|
||||
'slug',
|
||||
filter.slug,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.description) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'projects',
|
||||
'description',
|
||||
filter.description,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.logo_url) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'projects',
|
||||
'logo_url',
|
||||
filter.logo_url,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.favicon_url) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'projects',
|
||||
'favicon_url',
|
||||
filter.favicon_url,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.og_image_url) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'projects',
|
||||
'og_image_url',
|
||||
filter.og_image_url,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.theme_config_json) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'projects',
|
||||
'theme_config_json',
|
||||
filter.theme_config_json,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.custom_css_json) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'projects',
|
||||
'custom_css_json',
|
||||
filter.custom_css_json,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.cdn_base_url) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'projects',
|
||||
'cdn_base_url',
|
||||
filter.cdn_base_url,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.entry_page_slug) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'projects',
|
||||
'entry_page_slug',
|
||||
filter.entry_page_slug,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
static get ENUM_FIELDS() {
|
||||
return [];
|
||||
}
|
||||
|
||||
|
||||
|
||||
static get CSV_FIELDS() {
|
||||
return ['id', 'name', 'slug', 'description', 'logo_url', 'createdAt'];
|
||||
}
|
||||
|
||||
|
||||
if (filter.deleted_at_timeRange) {
|
||||
const [start, end] = filter.deleted_at_timeRange;
|
||||
static get AUTOCOMPLETE_FIELD() {
|
||||
return 'name';
|
||||
}
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
deleted_at_time: {
|
||||
...where.deleted_at_time,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
static get ASSOCIATIONS() {
|
||||
return [];
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
deleted_at_time: {
|
||||
...where.deleted_at_time,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
static getFieldMapping(data) {
|
||||
// Use undefined for missing fields so they're skipped during update
|
||||
// Only include fields that are explicitly provided in data
|
||||
// Note: transition_settings moved to project_transition_settings table
|
||||
return {
|
||||
id: data.id || undefined,
|
||||
name: 'name' in data ? data.name || null : undefined,
|
||||
slug: 'slug' in data ? data.slug || null : undefined,
|
||||
description: 'description' in data ? data.description || null : undefined,
|
||||
logo_url: 'logo_url' in data ? data.logo_url || null : undefined,
|
||||
favicon_url: 'favicon_url' in data ? data.favicon_url || null : undefined,
|
||||
og_image_url:
|
||||
'og_image_url' in data ? data.og_image_url || null : undefined,
|
||||
design_width: 'design_width' in data ? data.design_width : undefined,
|
||||
design_height: 'design_height' in data ? data.design_height : undefined,
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.active !== undefined) {
|
||||
where = {
|
||||
...where,
|
||||
active: filter.active === true || filter.active === 'true'
|
||||
};
|
||||
}
|
||||
static get DEFAULT_INCLUDES() {
|
||||
return [];
|
||||
}
|
||||
|
||||
|
||||
if (filter.phase) {
|
||||
where = {
|
||||
...where,
|
||||
phase: filter.phase,
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.is_deleted) {
|
||||
where = {
|
||||
...where,
|
||||
is_deleted: filter.is_deleted,
|
||||
};
|
||||
}
|
||||
|
||||
static get ALL_INCLUDES() {
|
||||
return [
|
||||
{ association: 'project_memberships_project' },
|
||||
{ association: 'assets_project' },
|
||||
{ association: 'presigned_url_requests_project' },
|
||||
{ association: 'tour_pages_project' },
|
||||
{ association: 'project_audio_tracks_project' },
|
||||
{ association: 'publish_events_project' },
|
||||
{ association: 'pwa_caches_project' },
|
||||
{ association: 'access_logs_project' },
|
||||
];
|
||||
}
|
||||
|
||||
static async findBy(where, options = {}) {
|
||||
const transaction = options.transaction;
|
||||
const runtimeProjectSlug = getRuntimeProjectSlug(options);
|
||||
const queryWhere = { ...where };
|
||||
|
||||
|
||||
|
||||
if (filter.createdAtRange) {
|
||||
const [start, end] = filter.createdAtRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
const runtimeEnvironment = getRuntimeEnvironment(options);
|
||||
const runtimeProjectSlug = getRuntimeProjectSlug(options);
|
||||
|
||||
if (runtimeEnvironment) {
|
||||
where = {
|
||||
...where,
|
||||
phase: runtimeEnvironment,
|
||||
};
|
||||
}
|
||||
|
||||
if (runtimeProjectSlug) {
|
||||
where = {
|
||||
...where,
|
||||
slug: runtimeProjectSlug,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
const queryOptions = {
|
||||
where,
|
||||
include,
|
||||
distinct: true,
|
||||
order: filter.field && filter.sort
|
||||
? [[filter.field, filter.sort]]
|
||||
: [['createdAt', 'desc']],
|
||||
transaction: options?.transaction,
|
||||
logging: console.log
|
||||
};
|
||||
|
||||
if (!options?.countOnly) {
|
||||
queryOptions.limit = limit ? Number(limit) : undefined;
|
||||
queryOptions.offset = offset ? Number(offset) : undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
const { rows, count } = await db.projects.findAndCountAll(queryOptions);
|
||||
|
||||
return {
|
||||
rows: options?.countOnly ? [] : rows,
|
||||
count: count
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error executing query:', error);
|
||||
throw error;
|
||||
}
|
||||
// Runtime access: filter by project slug
|
||||
// Skip if finding by ID (unambiguous lookup)
|
||||
if (runtimeProjectSlug && !where.id) {
|
||||
queryWhere.slug = runtimeProjectSlug;
|
||||
}
|
||||
|
||||
static async findAllAutocomplete(query, limit, offset, ) {
|
||||
let where = {};
|
||||
|
||||
|
||||
const include =
|
||||
options.include !== undefined ? options.include : this.DEFAULT_INCLUDES;
|
||||
|
||||
if (query) {
|
||||
where = {
|
||||
[Op.or]: [
|
||||
{ ['id']: Utils.uuid(query) },
|
||||
Utils.ilike(
|
||||
'projects',
|
||||
'name',
|
||||
query,
|
||||
),
|
||||
],
|
||||
};
|
||||
const record = await this.MODEL.findOne({
|
||||
where: queryWhere,
|
||||
transaction,
|
||||
include,
|
||||
});
|
||||
|
||||
if (!record) return null;
|
||||
return record.get({ plain: true });
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new project and auto-snapshot global element defaults
|
||||
*/
|
||||
static async create(data, options = {}) {
|
||||
const transaction = options.transaction;
|
||||
|
||||
// Create the project using parent's create
|
||||
const project = await super.create(data, options);
|
||||
|
||||
// Auto-snapshot global element defaults to the new project
|
||||
// Errors propagate to service layer → transaction rollback → proper error to client
|
||||
const Project_element_defaultsDBApi = require('./project_element_defaults');
|
||||
await Project_element_defaultsDBApi.snapshotGlobalDefaults(project.id, {
|
||||
...options,
|
||||
transaction,
|
||||
});
|
||||
|
||||
return project;
|
||||
}
|
||||
|
||||
static async findAll(filter = {}, options = {}) {
|
||||
filter = filter || {};
|
||||
const limit = filter.limit || 0;
|
||||
const currentPage = +filter.page || 0;
|
||||
const offset = currentPage * limit;
|
||||
|
||||
let where = {};
|
||||
let include = [];
|
||||
|
||||
if (filter.id) {
|
||||
if (!Utils.isValidUuid(filter.id)) {
|
||||
return { rows: [], count: 0 };
|
||||
}
|
||||
where.id = filter.id;
|
||||
}
|
||||
|
||||
for (const field of this.SEARCHABLE_FIELDS) {
|
||||
if (filter[field]) {
|
||||
where[Op.and] = Utils.ilike(this.TABLE_NAME, field, filter[field]);
|
||||
}
|
||||
}
|
||||
|
||||
for (const field of this.RANGE_FIELDS) {
|
||||
const rangeKey = `${field}Range`;
|
||||
if (filter[rangeKey]) {
|
||||
const [start, end] = filter[rangeKey];
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where[field] = { ...where[field], [Op.gte]: start };
|
||||
}
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where[field] = { ...where[field], [Op.lte]: end };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const records = await db.projects.findAll({
|
||||
attributes: [ 'id', 'name' ],
|
||||
where,
|
||||
limit: limit ? Number(limit) : undefined,
|
||||
offset: offset ? Number(offset) : undefined,
|
||||
orderBy: [['name', 'ASC']],
|
||||
for (const field of this.ENUM_FIELDS) {
|
||||
if (filter[field] !== undefined) {
|
||||
where[field] = filter[field];
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.active !== undefined) {
|
||||
where.active = filter.active === true || filter.active === 'true';
|
||||
}
|
||||
|
||||
if (filter.createdAtRange) {
|
||||
const [start, end] = filter.createdAtRange;
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where.createdAt = { ...where.createdAt, [Op.gte]: start };
|
||||
}
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where.createdAt = { ...where.createdAt, [Op.lte]: end };
|
||||
}
|
||||
}
|
||||
|
||||
// Runtime access: filter by project slug
|
||||
const runtimeProjectSlug = getRuntimeProjectSlug(options);
|
||||
|
||||
if (runtimeProjectSlug) {
|
||||
where.slug = runtimeProjectSlug;
|
||||
}
|
||||
|
||||
try {
|
||||
if (options.countOnly) {
|
||||
const count = await this.MODEL.count({
|
||||
where,
|
||||
include,
|
||||
distinct: true,
|
||||
transaction: options.transaction,
|
||||
});
|
||||
|
||||
return records.map((record) => ({
|
||||
id: record.id,
|
||||
label: record.name,
|
||||
}));
|
||||
}
|
||||
return {
|
||||
rows: [],
|
||||
count,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
};
|
||||
const queryOptions = {
|
||||
where,
|
||||
include,
|
||||
distinct: true,
|
||||
order:
|
||||
filter.field && filter.sort
|
||||
? [[filter.field, filter.sort]]
|
||||
: [['createdAt', 'desc']],
|
||||
transaction: options.transaction,
|
||||
limit: limit ? Number(limit) : undefined,
|
||||
offset: offset ? Number(offset) : undefined,
|
||||
};
|
||||
|
||||
const { rows, count } = await this.MODEL.findAndCountAll(queryOptions);
|
||||
return {
|
||||
rows,
|
||||
count,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error executing query:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ProjectsDBApi;
|
||||
|
||||
@ -1,703 +1,105 @@
|
||||
|
||||
const GenericDBApi = require('./base.api');
|
||||
const db = require('../models');
|
||||
const Utils = require('../utils');
|
||||
|
||||
class Publish_eventsDBApi extends GenericDBApi {
|
||||
static get MODEL() {
|
||||
return db.publish_events;
|
||||
}
|
||||
|
||||
static get TABLE_NAME() {
|
||||
return 'publish_events';
|
||||
}
|
||||
|
||||
const Sequelize = db.Sequelize;
|
||||
const Op = Sequelize.Op;
|
||||
static get SEARCHABLE_FIELDS() {
|
||||
return ['title', 'description', 'error_message'];
|
||||
}
|
||||
|
||||
module.exports = class Publish_eventsDBApi {
|
||||
|
||||
static get RANGE_FIELDS() {
|
||||
return [
|
||||
'started_at',
|
||||
'finished_at',
|
||||
'pages_copied',
|
||||
'transitions_copied',
|
||||
'audios_copied',
|
||||
];
|
||||
}
|
||||
|
||||
|
||||
static async create(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
static get ENUM_FIELDS() {
|
||||
return ['from_environment', 'to_environment', 'status'];
|
||||
}
|
||||
|
||||
const publish_events = await db.publish_events.create(
|
||||
{
|
||||
id: data.id || undefined,
|
||||
|
||||
title: data.title
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
description: data.description
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
from_environment: data.from_environment
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
to_environment: data.to_environment
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
started_at: data.started_at
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
finished_at: data.finished_at
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
status: data.status
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
error_message: data.error_message
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
pages_copied: data.pages_copied
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
transitions_copied: data.transitions_copied
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
audios_copied: data.audios_copied
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
importHash: data.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
static get UUID_FIELDS() {
|
||||
return ['projectId'];
|
||||
}
|
||||
|
||||
|
||||
await publish_events.setProject( data.project || null, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await publish_events.setUser( data.user || null, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
static get CSV_FIELDS() {
|
||||
return [
|
||||
'id',
|
||||
'title',
|
||||
'description',
|
||||
'from_environment',
|
||||
'to_environment',
|
||||
'status',
|
||||
'pages_copied',
|
||||
'createdAt',
|
||||
];
|
||||
}
|
||||
|
||||
|
||||
static get AUTOCOMPLETE_FIELD() {
|
||||
return 'status';
|
||||
}
|
||||
|
||||
|
||||
static get ASSOCIATIONS() {
|
||||
return [
|
||||
{ field: 'project', setter: 'setProject', isArray: false },
|
||||
{ field: 'user', setter: 'setUser', isArray: false },
|
||||
];
|
||||
}
|
||||
|
||||
return publish_events;
|
||||
}
|
||||
|
||||
static get FIND_BY_INCLUDES() {
|
||||
return [{ association: 'project' }, { association: 'user' }];
|
||||
}
|
||||
|
||||
static async bulkImport(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
// Prepare data - wrapping individual data transformations in a map() method
|
||||
const publish_eventsData = data.map((item, index) => ({
|
||||
id: item.id || undefined,
|
||||
|
||||
title: item.title
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
description: item.description
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
from_environment: item.from_environment
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
to_environment: item.to_environment
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
started_at: item.started_at
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
finished_at: item.finished_at
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
status: item.status
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
error_message: item.error_message
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
pages_copied: item.pages_copied
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
transitions_copied: item.transitions_copied
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
audios_copied: item.audios_copied
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
importHash: item.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
createdAt: new Date(Date.now() + index * 1000),
|
||||
}));
|
||||
|
||||
// Bulk create items
|
||||
const publish_events = await db.publish_events.bulkCreate(publish_eventsData, { transaction });
|
||||
|
||||
// For each item created, replace relation files
|
||||
|
||||
|
||||
return publish_events;
|
||||
}
|
||||
|
||||
static async update(id, data, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
|
||||
const publish_events = await db.publish_events.findByPk(id, {transaction});
|
||||
|
||||
|
||||
|
||||
|
||||
const updatePayload = {};
|
||||
|
||||
if (data.title !== undefined) updatePayload.title = data.title;
|
||||
|
||||
|
||||
if (data.description !== undefined) updatePayload.description = data.description;
|
||||
|
||||
|
||||
if (data.from_environment !== undefined) updatePayload.from_environment = data.from_environment;
|
||||
|
||||
|
||||
if (data.to_environment !== undefined) updatePayload.to_environment = data.to_environment;
|
||||
|
||||
|
||||
if (data.started_at !== undefined) updatePayload.started_at = data.started_at;
|
||||
|
||||
|
||||
if (data.finished_at !== undefined) updatePayload.finished_at = data.finished_at;
|
||||
|
||||
|
||||
if (data.status !== undefined) updatePayload.status = data.status;
|
||||
|
||||
|
||||
if (data.error_message !== undefined) updatePayload.error_message = data.error_message;
|
||||
|
||||
|
||||
if (data.pages_copied !== undefined) updatePayload.pages_copied = data.pages_copied;
|
||||
|
||||
|
||||
if (data.transitions_copied !== undefined) updatePayload.transitions_copied = data.transitions_copied;
|
||||
|
||||
|
||||
if (data.audios_copied !== undefined) updatePayload.audios_copied = data.audios_copied;
|
||||
|
||||
|
||||
updatePayload.updatedById = currentUser.id;
|
||||
|
||||
await publish_events.update(updatePayload, {transaction});
|
||||
|
||||
|
||||
|
||||
if (data.project !== undefined) {
|
||||
await publish_events.setProject(
|
||||
|
||||
data.project,
|
||||
|
||||
{ transaction }
|
||||
);
|
||||
}
|
||||
|
||||
if (data.user !== undefined) {
|
||||
await publish_events.setUser(
|
||||
|
||||
data.user,
|
||||
|
||||
{ transaction }
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
return publish_events;
|
||||
}
|
||||
|
||||
static async deleteByIds(ids, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const publish_events = await db.publish_events.findAll({
|
||||
where: {
|
||||
id: {
|
||||
[Op.in]: ids,
|
||||
},
|
||||
},
|
||||
transaction,
|
||||
});
|
||||
|
||||
await db.sequelize.transaction(async (transaction) => {
|
||||
for (const record of publish_events) {
|
||||
await record.update(
|
||||
{deletedBy: currentUser.id},
|
||||
{transaction}
|
||||
);
|
||||
}
|
||||
for (const record of publish_events) {
|
||||
await record.destroy({transaction});
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
return publish_events;
|
||||
}
|
||||
|
||||
static async remove(id, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const publish_events = await db.publish_events.findByPk(id, options);
|
||||
|
||||
await publish_events.update({
|
||||
deletedBy: currentUser.id
|
||||
}, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await publish_events.destroy({
|
||||
transaction
|
||||
});
|
||||
|
||||
return publish_events;
|
||||
}
|
||||
|
||||
static async findBy(where, options) {
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const publish_events = await db.publish_events.findOne({
|
||||
where,
|
||||
transaction,
|
||||
});
|
||||
|
||||
if (!publish_events) {
|
||||
return publish_events;
|
||||
}
|
||||
|
||||
const output = publish_events.get({plain: true});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
output.project = await publish_events.getProject({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
output.user = await publish_events.getUser({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
static async findAll(
|
||||
filter,
|
||||
options
|
||||
) {
|
||||
const limit = filter.limit || 0;
|
||||
let offset = 0;
|
||||
let where = {};
|
||||
const currentPage = +filter.page;
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
offset = currentPage * limit;
|
||||
|
||||
let include = [
|
||||
static get FIND_ALL_INCLUDES() {
|
||||
return [
|
||||
{ model: db.projects, as: 'project', required: false },
|
||||
{ model: db.users, as: 'user', required: false },
|
||||
];
|
||||
}
|
||||
|
||||
static get RELATION_FILTERS() {
|
||||
return [
|
||||
{
|
||||
filterKey: 'project',
|
||||
model: db.projects,
|
||||
as: 'project',
|
||||
|
||||
where: filter.project ? {
|
||||
[Op.or]: [
|
||||
{ id: { [Op.in]: filter.project.split('|').map(term => Utils.uuid(term)) } },
|
||||
{
|
||||
name: {
|
||||
[Op.or]: filter.project.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
|
||||
}
|
||||
},
|
||||
]
|
||||
} : {},
|
||||
|
||||
searchField: 'name',
|
||||
},
|
||||
|
||||
{
|
||||
filterKey: 'user',
|
||||
model: db.users,
|
||||
as: 'user',
|
||||
|
||||
where: filter.user ? {
|
||||
[Op.or]: [
|
||||
{ id: { [Op.in]: filter.user.split('|').map(term => Utils.uuid(term)) } },
|
||||
{
|
||||
firstName: {
|
||||
[Op.or]: filter.user.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
|
||||
}
|
||||
},
|
||||
]
|
||||
} : {},
|
||||
|
||||
searchField: 'firstName',
|
||||
},
|
||||
|
||||
|
||||
|
||||
];
|
||||
}
|
||||
|
||||
if (filter) {
|
||||
if (filter.id) {
|
||||
where = {
|
||||
...where,
|
||||
['id']: Utils.uuid(filter.id),
|
||||
};
|
||||
}
|
||||
static getFieldMapping(data) {
|
||||
return {
|
||||
id: data.id || undefined,
|
||||
title: data.title || null,
|
||||
description: data.description || null,
|
||||
from_environment: data.from_environment || null,
|
||||
to_environment: data.to_environment || null,
|
||||
started_at: data.started_at || null,
|
||||
finished_at: data.finished_at || null,
|
||||
status: data.status || null,
|
||||
error_message: data.error_message || null,
|
||||
pages_copied: data.pages_copied || null,
|
||||
transitions_copied: data.transitions_copied || null,
|
||||
audios_copied: data.audios_copied || null,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (filter.title) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'publish_events',
|
||||
'title',
|
||||
filter.title,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.description) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'publish_events',
|
||||
'description',
|
||||
filter.description,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
if (filter.error_message) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'publish_events',
|
||||
'error_message',
|
||||
filter.error_message,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (filter.started_atRange) {
|
||||
const [start, end] = filter.started_atRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
started_at: {
|
||||
...where.started_at,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
started_at: {
|
||||
...where.started_at,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.finished_atRange) {
|
||||
const [start, end] = filter.finished_atRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
finished_at: {
|
||||
...where.finished_at,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
finished_at: {
|
||||
...where.finished_at,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.pages_copiedRange) {
|
||||
const [start, end] = filter.pages_copiedRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
pages_copied: {
|
||||
...where.pages_copied,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
pages_copied: {
|
||||
...where.pages_copied,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.transitions_copiedRange) {
|
||||
const [start, end] = filter.transitions_copiedRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
transitions_copied: {
|
||||
...where.transitions_copied,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
transitions_copied: {
|
||||
...where.transitions_copied,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.audios_copiedRange) {
|
||||
const [start, end] = filter.audios_copiedRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
audios_copied: {
|
||||
...where.audios_copied,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
audios_copied: {
|
||||
...where.audios_copied,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (filter.active !== undefined) {
|
||||
where = {
|
||||
...where,
|
||||
active: filter.active === true || filter.active === 'true'
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
if (filter.from_environment) {
|
||||
where = {
|
||||
...where,
|
||||
from_environment: filter.from_environment,
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.to_environment) {
|
||||
where = {
|
||||
...where,
|
||||
to_environment: filter.to_environment,
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.status) {
|
||||
where = {
|
||||
...where,
|
||||
status: filter.status,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (filter.createdAtRange) {
|
||||
const [start, end] = filter.createdAtRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
const queryOptions = {
|
||||
where,
|
||||
include,
|
||||
distinct: true,
|
||||
order: filter.field && filter.sort
|
||||
? [[filter.field, filter.sort]]
|
||||
: [['createdAt', 'desc']],
|
||||
transaction: options?.transaction,
|
||||
logging: console.log
|
||||
};
|
||||
|
||||
if (!options?.countOnly) {
|
||||
queryOptions.limit = limit ? Number(limit) : undefined;
|
||||
queryOptions.offset = offset ? Number(offset) : undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
const { rows, count } = await db.publish_events.findAndCountAll(queryOptions);
|
||||
|
||||
return {
|
||||
rows: options?.countOnly ? [] : rows,
|
||||
count: count
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error executing query:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async findAllAutocomplete(query, limit, offset, ) {
|
||||
let where = {};
|
||||
|
||||
|
||||
|
||||
if (query) {
|
||||
where = {
|
||||
[Op.or]: [
|
||||
{ ['id']: Utils.uuid(query) },
|
||||
Utils.ilike(
|
||||
'publish_events',
|
||||
'status',
|
||||
query,
|
||||
),
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
const records = await db.publish_events.findAll({
|
||||
attributes: [ 'id', 'status' ],
|
||||
where,
|
||||
limit: limit ? Number(limit) : undefined,
|
||||
offset: offset ? Number(offset) : undefined,
|
||||
orderBy: [['status', 'ASC']],
|
||||
});
|
||||
|
||||
return records.map((record) => ({
|
||||
id: record.id,
|
||||
label: record.status,
|
||||
}));
|
||||
}
|
||||
|
||||
|
||||
};
|
||||
module.exports = Publish_eventsDBApi;
|
||||
|
||||
@ -1,499 +1,76 @@
|
||||
|
||||
const GenericDBApi = require('./base.api');
|
||||
const db = require('../models');
|
||||
const Utils = require('../utils');
|
||||
|
||||
class Pwa_cachesDBApi extends GenericDBApi {
|
||||
static get MODEL() {
|
||||
return db.pwa_caches;
|
||||
}
|
||||
|
||||
static get TABLE_NAME() {
|
||||
return 'pwa_caches';
|
||||
}
|
||||
|
||||
const Sequelize = db.Sequelize;
|
||||
const Op = Sequelize.Op;
|
||||
static get SEARCHABLE_FIELDS() {
|
||||
return ['cache_version', 'manifest_json', 'asset_list_json'];
|
||||
}
|
||||
|
||||
module.exports = class Pwa_cachesDBApi {
|
||||
|
||||
static get RANGE_FIELDS() {
|
||||
return ['generated_at'];
|
||||
}
|
||||
|
||||
|
||||
static async create(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
static get ENUM_FIELDS() {
|
||||
return ['environment', 'is_active'];
|
||||
}
|
||||
|
||||
const pwa_caches = await db.pwa_caches.create(
|
||||
{
|
||||
id: data.id || undefined,
|
||||
|
||||
environment: data.environment
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
cache_version: data.cache_version
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
manifest_json: data.manifest_json
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
asset_list_json: data.asset_list_json
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
generated_at: data.generated_at
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
is_active: data.is_active
|
||||
||
|
||||
false
|
||||
|
||||
,
|
||||
|
||||
importHash: data.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
static get CSV_FIELDS() {
|
||||
return [
|
||||
'id',
|
||||
'environment',
|
||||
'cache_version',
|
||||
'is_active',
|
||||
'generated_at',
|
||||
'createdAt',
|
||||
];
|
||||
}
|
||||
|
||||
|
||||
await pwa_caches.setProject( data.project || null, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
static get AUTOCOMPLETE_FIELD() {
|
||||
return 'cache_version';
|
||||
}
|
||||
|
||||
|
||||
static get ASSOCIATIONS() {
|
||||
return [{ field: 'project', setter: 'setProject', isArray: false }];
|
||||
}
|
||||
|
||||
|
||||
static get FIND_BY_INCLUDES() {
|
||||
return [{ association: 'project' }];
|
||||
}
|
||||
|
||||
return pwa_caches;
|
||||
}
|
||||
|
||||
|
||||
static async bulkImport(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
// Prepare data - wrapping individual data transformations in a map() method
|
||||
const pwa_cachesData = data.map((item, index) => ({
|
||||
id: item.id || undefined,
|
||||
|
||||
environment: item.environment
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
cache_version: item.cache_version
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
manifest_json: item.manifest_json
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
asset_list_json: item.asset_list_json
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
generated_at: item.generated_at
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
is_active: item.is_active
|
||||
||
|
||||
false
|
||||
|
||||
,
|
||||
|
||||
importHash: item.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
createdAt: new Date(Date.now() + index * 1000),
|
||||
}));
|
||||
|
||||
// Bulk create items
|
||||
const pwa_caches = await db.pwa_caches.bulkCreate(pwa_cachesData, { transaction });
|
||||
|
||||
// For each item created, replace relation files
|
||||
|
||||
|
||||
return pwa_caches;
|
||||
}
|
||||
|
||||
static async update(id, data, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
|
||||
const pwa_caches = await db.pwa_caches.findByPk(id, {transaction});
|
||||
|
||||
|
||||
|
||||
|
||||
const updatePayload = {};
|
||||
|
||||
if (data.environment !== undefined) updatePayload.environment = data.environment;
|
||||
|
||||
|
||||
if (data.cache_version !== undefined) updatePayload.cache_version = data.cache_version;
|
||||
|
||||
|
||||
if (data.manifest_json !== undefined) updatePayload.manifest_json = data.manifest_json;
|
||||
|
||||
|
||||
if (data.asset_list_json !== undefined) updatePayload.asset_list_json = data.asset_list_json;
|
||||
|
||||
|
||||
if (data.generated_at !== undefined) updatePayload.generated_at = data.generated_at;
|
||||
|
||||
|
||||
if (data.is_active !== undefined) updatePayload.is_active = data.is_active;
|
||||
|
||||
|
||||
updatePayload.updatedById = currentUser.id;
|
||||
|
||||
await pwa_caches.update(updatePayload, {transaction});
|
||||
|
||||
|
||||
|
||||
if (data.project !== undefined) {
|
||||
await pwa_caches.setProject(
|
||||
|
||||
data.project,
|
||||
|
||||
{ transaction }
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
return pwa_caches;
|
||||
}
|
||||
|
||||
static async deleteByIds(ids, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const pwa_caches = await db.pwa_caches.findAll({
|
||||
where: {
|
||||
id: {
|
||||
[Op.in]: ids,
|
||||
},
|
||||
},
|
||||
transaction,
|
||||
});
|
||||
|
||||
await db.sequelize.transaction(async (transaction) => {
|
||||
for (const record of pwa_caches) {
|
||||
await record.update(
|
||||
{deletedBy: currentUser.id},
|
||||
{transaction}
|
||||
);
|
||||
}
|
||||
for (const record of pwa_caches) {
|
||||
await record.destroy({transaction});
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
return pwa_caches;
|
||||
}
|
||||
|
||||
static async remove(id, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const pwa_caches = await db.pwa_caches.findByPk(id, options);
|
||||
|
||||
await pwa_caches.update({
|
||||
deletedBy: currentUser.id
|
||||
}, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await pwa_caches.destroy({
|
||||
transaction
|
||||
});
|
||||
|
||||
return pwa_caches;
|
||||
}
|
||||
|
||||
static async findBy(where, options) {
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const pwa_caches = await db.pwa_caches.findOne({
|
||||
where,
|
||||
transaction,
|
||||
});
|
||||
|
||||
if (!pwa_caches) {
|
||||
return pwa_caches;
|
||||
}
|
||||
|
||||
const output = pwa_caches.get({plain: true});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
output.project = await pwa_caches.getProject({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
static async findAll(
|
||||
filter,
|
||||
options
|
||||
) {
|
||||
const limit = filter.limit || 0;
|
||||
let offset = 0;
|
||||
let where = {};
|
||||
const currentPage = +filter.page;
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
offset = currentPage * limit;
|
||||
|
||||
let include = [
|
||||
static get FIND_ALL_INCLUDES() {
|
||||
return [{ model: db.projects, as: 'project', required: false }];
|
||||
}
|
||||
|
||||
static get RELATION_FILTERS() {
|
||||
return [
|
||||
{
|
||||
filterKey: 'project',
|
||||
model: db.projects,
|
||||
as: 'project',
|
||||
|
||||
where: filter.project ? {
|
||||
[Op.or]: [
|
||||
{ id: { [Op.in]: filter.project.split('|').map(term => Utils.uuid(term)) } },
|
||||
{
|
||||
name: {
|
||||
[Op.or]: filter.project.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
|
||||
}
|
||||
},
|
||||
]
|
||||
} : {},
|
||||
|
||||
searchField: 'name',
|
||||
},
|
||||
|
||||
|
||||
|
||||
];
|
||||
}
|
||||
|
||||
if (filter) {
|
||||
if (filter.id) {
|
||||
where = {
|
||||
...where,
|
||||
['id']: Utils.uuid(filter.id),
|
||||
};
|
||||
}
|
||||
static getFieldMapping(data) {
|
||||
return {
|
||||
id: data.id || undefined,
|
||||
environment: data.environment || null,
|
||||
cache_version: data.cache_version || null,
|
||||
manifest_json: data.manifest_json || null,
|
||||
asset_list_json: data.asset_list_json || null,
|
||||
generated_at: data.generated_at || null,
|
||||
is_active: data.is_active || false,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (filter.cache_version) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'pwa_caches',
|
||||
'cache_version',
|
||||
filter.cache_version,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.manifest_json) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'pwa_caches',
|
||||
'manifest_json',
|
||||
filter.manifest_json,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.asset_list_json) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'pwa_caches',
|
||||
'asset_list_json',
|
||||
filter.asset_list_json,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (filter.generated_atRange) {
|
||||
const [start, end] = filter.generated_atRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
generated_at: {
|
||||
...where.generated_at,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
generated_at: {
|
||||
...where.generated_at,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (filter.active !== undefined) {
|
||||
where = {
|
||||
...where,
|
||||
active: filter.active === true || filter.active === 'true'
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
if (filter.environment) {
|
||||
where = {
|
||||
...where,
|
||||
environment: filter.environment,
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.is_active) {
|
||||
where = {
|
||||
...where,
|
||||
is_active: filter.is_active,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (filter.createdAtRange) {
|
||||
const [start, end] = filter.createdAtRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
const queryOptions = {
|
||||
where,
|
||||
include,
|
||||
distinct: true,
|
||||
order: filter.field && filter.sort
|
||||
? [[filter.field, filter.sort]]
|
||||
: [['createdAt', 'desc']],
|
||||
transaction: options?.transaction,
|
||||
logging: console.log
|
||||
};
|
||||
|
||||
if (!options?.countOnly) {
|
||||
queryOptions.limit = limit ? Number(limit) : undefined;
|
||||
queryOptions.offset = offset ? Number(offset) : undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
const { rows, count } = await db.pwa_caches.findAndCountAll(queryOptions);
|
||||
|
||||
return {
|
||||
rows: options?.countOnly ? [] : rows,
|
||||
count: count
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error executing query:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async findAllAutocomplete(query, limit, offset, ) {
|
||||
let where = {};
|
||||
|
||||
|
||||
|
||||
if (query) {
|
||||
where = {
|
||||
[Op.or]: [
|
||||
{ ['id']: Utils.uuid(query) },
|
||||
Utils.ilike(
|
||||
'pwa_caches',
|
||||
'cache_version',
|
||||
query,
|
||||
),
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
const records = await db.pwa_caches.findAll({
|
||||
attributes: [ 'id', 'cache_version' ],
|
||||
where,
|
||||
limit: limit ? Number(limit) : undefined,
|
||||
offset: offset ? Number(offset) : undefined,
|
||||
orderBy: [['cache_version', 'ASC']],
|
||||
});
|
||||
|
||||
return records.map((record) => ({
|
||||
id: record.id,
|
||||
label: record.cache_version,
|
||||
}));
|
||||
}
|
||||
|
||||
|
||||
};
|
||||
module.exports = Pwa_cachesDBApi;
|
||||
|
||||
@ -1,405 +1,71 @@
|
||||
|
||||
const GenericDBApi = require('./base.api');
|
||||
const db = require('../models');
|
||||
const Utils = require('../utils');
|
||||
|
||||
class RolesDBApi extends GenericDBApi {
|
||||
static get MODEL() {
|
||||
return db.roles;
|
||||
}
|
||||
|
||||
static get TABLE_NAME() {
|
||||
return 'roles';
|
||||
}
|
||||
|
||||
const Sequelize = db.Sequelize;
|
||||
const Op = Sequelize.Op;
|
||||
static get SEARCHABLE_FIELDS() {
|
||||
return ['name', 'role_customization'];
|
||||
}
|
||||
|
||||
module.exports = class RolesDBApi {
|
||||
|
||||
static get RANGE_FIELDS() {
|
||||
return [];
|
||||
}
|
||||
|
||||
|
||||
static async create(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
static get ENUM_FIELDS() {
|
||||
return [];
|
||||
}
|
||||
|
||||
const roles = await db.roles.create(
|
||||
{
|
||||
id: data.id || undefined,
|
||||
|
||||
name: data.name
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
role_customization: data.role_customization
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
importHash: data.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
static get CSV_FIELDS() {
|
||||
return ['id', 'name', 'role_customization', 'createdAt'];
|
||||
}
|
||||
|
||||
|
||||
static get AUTOCOMPLETE_FIELD() {
|
||||
return 'name';
|
||||
}
|
||||
|
||||
|
||||
await roles.setPermissions(data.permissions || [], {
|
||||
transaction,
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
return roles;
|
||||
}
|
||||
|
||||
|
||||
static async bulkImport(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
// Prepare data - wrapping individual data transformations in a map() method
|
||||
const rolesData = data.map((item, index) => ({
|
||||
id: item.id || undefined,
|
||||
|
||||
name: item.name
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
role_customization: item.role_customization
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
importHash: item.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
createdAt: new Date(Date.now() + index * 1000),
|
||||
}));
|
||||
|
||||
// Bulk create items
|
||||
const roles = await db.roles.bulkCreate(rolesData, { transaction });
|
||||
|
||||
// For each item created, replace relation files
|
||||
|
||||
|
||||
return roles;
|
||||
}
|
||||
|
||||
static async update(id, data, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
|
||||
const roles = await db.roles.findByPk(id, {transaction});
|
||||
|
||||
|
||||
|
||||
|
||||
const updatePayload = {};
|
||||
|
||||
if (data.name !== undefined) updatePayload.name = data.name;
|
||||
|
||||
|
||||
if (data.role_customization !== undefined) updatePayload.role_customization = data.role_customization;
|
||||
|
||||
|
||||
updatePayload.updatedById = currentUser.id;
|
||||
|
||||
await roles.update(updatePayload, {transaction});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (data.permissions !== undefined) {
|
||||
await roles.setPermissions(data.permissions, { transaction });
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
return roles;
|
||||
}
|
||||
|
||||
static async deleteByIds(ids, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const roles = await db.roles.findAll({
|
||||
where: {
|
||||
id: {
|
||||
[Op.in]: ids,
|
||||
},
|
||||
},
|
||||
transaction,
|
||||
});
|
||||
|
||||
await db.sequelize.transaction(async (transaction) => {
|
||||
for (const record of roles) {
|
||||
await record.update(
|
||||
{deletedBy: currentUser.id},
|
||||
{transaction}
|
||||
);
|
||||
}
|
||||
for (const record of roles) {
|
||||
await record.destroy({transaction});
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
return roles;
|
||||
}
|
||||
|
||||
static async remove(id, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const roles = await db.roles.findByPk(id, options);
|
||||
|
||||
await roles.update({
|
||||
deletedBy: currentUser.id
|
||||
}, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await roles.destroy({
|
||||
transaction
|
||||
});
|
||||
|
||||
return roles;
|
||||
}
|
||||
|
||||
static async findBy(where, options) {
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const roles = await db.roles.findOne({
|
||||
where,
|
||||
transaction,
|
||||
});
|
||||
|
||||
if (!roles) {
|
||||
return roles;
|
||||
}
|
||||
|
||||
const output = roles.get({plain: true});
|
||||
|
||||
|
||||
output.users_app_role = await roles.getUsers_app_role({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
output.permissions = await roles.getPermissions({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
static async findAll(
|
||||
filter,
|
||||
options
|
||||
) {
|
||||
const limit = filter.limit || 0;
|
||||
let offset = 0;
|
||||
let where = {};
|
||||
const currentPage = +filter.page;
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
offset = currentPage * limit;
|
||||
|
||||
let include = [
|
||||
static get ASSOCIATIONS() {
|
||||
return [{ field: 'permissions', setter: 'setPermissions', isArray: true }];
|
||||
}
|
||||
|
||||
static get FIND_BY_INCLUDES() {
|
||||
return [{ association: 'users_app_role' }, { association: 'permissions' }];
|
||||
}
|
||||
|
||||
static get FIND_ALL_INCLUDES() {
|
||||
return [
|
||||
{
|
||||
model: db.permissions,
|
||||
as: 'permissions',
|
||||
required: false,
|
||||
},
|
||||
|
||||
|
||||
];
|
||||
}
|
||||
|
||||
if (filter) {
|
||||
if (filter.id) {
|
||||
where = {
|
||||
...where,
|
||||
['id']: Utils.uuid(filter.id),
|
||||
};
|
||||
}
|
||||
static get RELATION_FILTERS() {
|
||||
return [
|
||||
{
|
||||
filterKey: 'permissions',
|
||||
model: db.permissions,
|
||||
as: 'permissions_filter',
|
||||
searchField: 'name',
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
|
||||
if (filter.name) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'roles',
|
||||
'name',
|
||||
filter.name,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.role_customization) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'roles',
|
||||
'role_customization',
|
||||
filter.role_customization,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
static getFieldMapping(data) {
|
||||
return {
|
||||
id: data.id || undefined,
|
||||
name: data.name || null,
|
||||
role_customization: data.role_customization || null,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (filter.active !== undefined) {
|
||||
where = {
|
||||
...where,
|
||||
active: filter.active === true || filter.active === 'true'
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (filter.permissions) {
|
||||
const searchTerms = filter.permissions.split('|');
|
||||
|
||||
include = [
|
||||
{
|
||||
model: db.permissions,
|
||||
as: 'permissions_filter',
|
||||
required: searchTerms.length > 0,
|
||||
where: searchTerms.length > 0 ? {
|
||||
[Op.or]: [
|
||||
{ id: { [Op.in]: searchTerms.map(term => Utils.uuid(term)) } },
|
||||
{
|
||||
name: {
|
||||
[Op.or]: searchTerms.map(term => ({ [Op.iLike]: `%${term}%` }))
|
||||
}
|
||||
}
|
||||
]
|
||||
} : undefined
|
||||
},
|
||||
...include,
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
if (filter.createdAtRange) {
|
||||
const [start, end] = filter.createdAtRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
const queryOptions = {
|
||||
where,
|
||||
include,
|
||||
distinct: true,
|
||||
order: filter.field && filter.sort
|
||||
? [[filter.field, filter.sort]]
|
||||
: [['createdAt', 'desc']],
|
||||
transaction: options?.transaction,
|
||||
logging: console.log
|
||||
};
|
||||
|
||||
if (!options?.countOnly) {
|
||||
queryOptions.limit = limit ? Number(limit) : undefined;
|
||||
queryOptions.offset = offset ? Number(offset) : undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
const { rows, count } = await db.roles.findAndCountAll(queryOptions);
|
||||
|
||||
return {
|
||||
rows: options?.countOnly ? [] : rows,
|
||||
count: count
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error executing query:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async findAllAutocomplete(query, limit, offset, ) {
|
||||
let where = {};
|
||||
|
||||
|
||||
|
||||
if (query) {
|
||||
where = {
|
||||
[Op.or]: [
|
||||
{ ['id']: Utils.uuid(query) },
|
||||
Utils.ilike(
|
||||
'roles',
|
||||
'name',
|
||||
query,
|
||||
),
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
const records = await db.roles.findAll({
|
||||
attributes: [ 'id', 'name' ],
|
||||
where,
|
||||
limit: limit ? Number(limit) : undefined,
|
||||
offset: offset ? Number(offset) : undefined,
|
||||
orderBy: [['name', 'ASC']],
|
||||
});
|
||||
|
||||
return records.map((record) => ({
|
||||
id: record.id,
|
||||
label: record.name,
|
||||
}));
|
||||
}
|
||||
|
||||
|
||||
};
|
||||
module.exports = RolesDBApi;
|
||||
|
||||
@ -1,25 +1,32 @@
|
||||
/**
|
||||
* Runtime Context Helpers
|
||||
* For route-based environment access via X-Runtime-Environment header
|
||||
*/
|
||||
|
||||
function getRuntimeContext(options = {}) {
|
||||
return (options || {}).runtimeContext || null;
|
||||
}
|
||||
|
||||
function getRuntimeEnvironment(options = {}) {
|
||||
const runtimeContext = getRuntimeContext(options);
|
||||
|
||||
if (!runtimeContext) return null;
|
||||
if (runtimeContext.mode === 'stage') return 'stage';
|
||||
if (runtimeContext.mode === 'production') return 'production';
|
||||
|
||||
// Read from header (route-based mode)
|
||||
// SECURITY: Only allow 'production' and 'stage' from header
|
||||
// to prevent unauthorized access to dev data
|
||||
if (runtimeContext.headerEnvironment === 'production') return 'production';
|
||||
if (runtimeContext.headerEnvironment === 'stage') return 'stage';
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function getRuntimeProjectSlug(options = {}) {
|
||||
const runtimeContext = getRuntimeContext(options);
|
||||
return runtimeContext?.projectSlug || null;
|
||||
return runtimeContext?.headerProjectSlug || null;
|
||||
}
|
||||
|
||||
function applyRuntimeEnvironment(where = {}, options = {}) {
|
||||
const environment = getRuntimeEnvironment(options);
|
||||
|
||||
if (!environment) return where;
|
||||
|
||||
return {
|
||||
@ -30,7 +37,6 @@ function applyRuntimeEnvironment(where = {}, options = {}) {
|
||||
|
||||
function applyRuntimeProjectFilter(projectInclude = {}, options = {}) {
|
||||
const projectSlug = getRuntimeProjectSlug(options);
|
||||
|
||||
if (!projectSlug) return projectInclude;
|
||||
|
||||
return {
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
|
||||
const GenericDBApi = require('./base.api');
|
||||
const db = require('../models');
|
||||
const Utils = require('../utils');
|
||||
const {
|
||||
@ -6,640 +6,263 @@ const {
|
||||
applyRuntimeProjectFilter,
|
||||
} = require('./runtime-context');
|
||||
|
||||
|
||||
|
||||
const Sequelize = db.Sequelize;
|
||||
const Op = Sequelize.Op;
|
||||
|
||||
module.exports = class Tour_pagesDBApi {
|
||||
|
||||
class Tour_pagesDBApi extends GenericDBApi {
|
||||
static get MODEL() {
|
||||
return db.tour_pages;
|
||||
}
|
||||
|
||||
|
||||
static async create(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
const projectId = data.project || data.projectId || null;
|
||||
static get TABLE_NAME() {
|
||||
return 'tour_pages';
|
||||
}
|
||||
|
||||
const tour_pages = await db.tour_pages.create(
|
||||
{
|
||||
id: data.id || undefined,
|
||||
|
||||
environment: data.environment
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
source_key: data.source_key
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
name: data.name
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
slug: data.slug
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
sort_order: data.sort_order
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
background_image_url: data.background_image_url
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
background_video_url: data.background_video_url
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
background_audio_url: data.background_audio_url
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
background_loop: data.background_loop
|
||||
||
|
||||
false
|
||||
|
||||
,
|
||||
|
||||
requires_auth: data.requires_auth
|
||||
||
|
||||
false
|
||||
|
||||
,
|
||||
|
||||
ui_schema_json: data.ui_schema_json
|
||||
||
|
||||
null
|
||||
,
|
||||
projectId,
|
||||
|
||||
importHash: data.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
},
|
||||
{ transaction },
|
||||
static get SEARCHABLE_FIELDS() {
|
||||
return [
|
||||
'source_key',
|
||||
'name',
|
||||
'slug',
|
||||
'background_image_url',
|
||||
'background_video_url',
|
||||
'background_audio_url',
|
||||
'ui_schema_json',
|
||||
];
|
||||
}
|
||||
|
||||
static get RANGE_FIELDS() {
|
||||
return ['sort_order'];
|
||||
}
|
||||
|
||||
static get ENUM_FIELDS() {
|
||||
return ['environment', 'background_loop', 'requires_auth'];
|
||||
}
|
||||
|
||||
static get UUID_FIELDS() {
|
||||
return ['projectId'];
|
||||
}
|
||||
|
||||
static get CSV_FIELDS() {
|
||||
return [
|
||||
'id',
|
||||
'environment',
|
||||
'source_key',
|
||||
'name',
|
||||
'slug',
|
||||
'sort_order',
|
||||
'createdAt',
|
||||
];
|
||||
}
|
||||
|
||||
static get AUTOCOMPLETE_FIELD() {
|
||||
return 'name';
|
||||
}
|
||||
|
||||
static get ASSOCIATIONS() {
|
||||
return [{ field: 'project', setter: 'setProject', isArray: false }];
|
||||
}
|
||||
|
||||
static getFieldMapping(data) {
|
||||
return {
|
||||
id: data.id || undefined,
|
||||
environment: data.environment || null,
|
||||
source_key: data.source_key || null,
|
||||
name: data.name || null,
|
||||
slug: data.slug || null,
|
||||
sort_order: data.sort_order || null,
|
||||
background_image_url: data.background_image_url || null,
|
||||
background_video_url: data.background_video_url || null,
|
||||
background_audio_url: data.background_audio_url || null,
|
||||
background_loop: data.background_loop || false,
|
||||
background_video_autoplay:
|
||||
data.background_video_autoplay !== undefined
|
||||
? data.background_video_autoplay
|
||||
: true,
|
||||
background_video_loop:
|
||||
data.background_video_loop !== undefined
|
||||
? data.background_video_loop
|
||||
: true,
|
||||
background_video_muted:
|
||||
data.background_video_muted !== undefined
|
||||
? data.background_video_muted
|
||||
: true,
|
||||
background_video_start_time:
|
||||
data.background_video_start_time !== undefined
|
||||
? data.background_video_start_time
|
||||
: null,
|
||||
background_video_end_time:
|
||||
data.background_video_end_time !== undefined
|
||||
? data.background_video_end_time
|
||||
: null,
|
||||
design_width: data.design_width !== undefined ? data.design_width : null,
|
||||
design_height:
|
||||
data.design_height !== undefined ? data.design_height : null,
|
||||
requires_auth: data.requires_auth || false,
|
||||
ui_schema_json: data.ui_schema_json || null,
|
||||
};
|
||||
}
|
||||
|
||||
static async create(data, options = {}) {
|
||||
const currentUser = options.currentUser || { id: null };
|
||||
const transaction = options.transaction;
|
||||
const projectId = data.project || data.projectId || null;
|
||||
|
||||
const record = await this.MODEL.create(
|
||||
{
|
||||
...this.getFieldMapping(data),
|
||||
projectId,
|
||||
importHash: data.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
|
||||
await tour_pages.setProject(projectId, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await record.setProject(projectId, { transaction });
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
return tour_pages;
|
||||
}
|
||||
|
||||
|
||||
static async bulkImport(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
// Prepare data - wrapping individual data transformations in a map() method
|
||||
const tour_pagesData = data.map((item, index) => ({
|
||||
id: item.id || undefined,
|
||||
|
||||
environment: item.environment
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
source_key: item.source_key
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
name: item.name
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
slug: item.slug
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
sort_order: item.sort_order
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
background_image_url: item.background_image_url
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
background_video_url: item.background_video_url
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
background_audio_url: item.background_audio_url
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
background_loop: item.background_loop
|
||||
||
|
||||
false
|
||||
|
||||
,
|
||||
|
||||
requires_auth: item.requires_auth
|
||||
||
|
||||
false
|
||||
|
||||
,
|
||||
|
||||
ui_schema_json: item.ui_schema_json
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
importHash: item.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
createdAt: new Date(Date.now() + index * 1000),
|
||||
}));
|
||||
|
||||
// Bulk create items
|
||||
const tour_pages = await db.tour_pages.bulkCreate(tour_pagesData, { transaction });
|
||||
|
||||
// For each item created, replace relation files
|
||||
|
||||
|
||||
return tour_pages;
|
||||
}
|
||||
|
||||
static async update(id, data, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
|
||||
const tour_pages = await db.tour_pages.findByPk(id, {transaction});
|
||||
|
||||
|
||||
|
||||
|
||||
const updatePayload = {};
|
||||
|
||||
if (data.environment !== undefined) updatePayload.environment = data.environment;
|
||||
|
||||
|
||||
if (data.source_key !== undefined) updatePayload.source_key = data.source_key;
|
||||
|
||||
|
||||
if (data.name !== undefined) updatePayload.name = data.name;
|
||||
|
||||
|
||||
if (data.slug !== undefined) updatePayload.slug = data.slug;
|
||||
|
||||
|
||||
if (data.sort_order !== undefined) updatePayload.sort_order = data.sort_order;
|
||||
|
||||
|
||||
if (data.background_image_url !== undefined) updatePayload.background_image_url = data.background_image_url;
|
||||
|
||||
|
||||
if (data.background_video_url !== undefined) updatePayload.background_video_url = data.background_video_url;
|
||||
|
||||
|
||||
if (data.background_audio_url !== undefined) updatePayload.background_audio_url = data.background_audio_url;
|
||||
|
||||
|
||||
if (data.background_loop !== undefined) updatePayload.background_loop = data.background_loop;
|
||||
|
||||
|
||||
if (data.requires_auth !== undefined) updatePayload.requires_auth = data.requires_auth;
|
||||
|
||||
|
||||
if (data.ui_schema_json !== undefined) updatePayload.ui_schema_json = data.ui_schema_json;
|
||||
|
||||
|
||||
updatePayload.updatedById = currentUser.id;
|
||||
|
||||
await tour_pages.update(updatePayload, {transaction});
|
||||
|
||||
|
||||
|
||||
if (data.project !== undefined) {
|
||||
await tour_pages.setProject(
|
||||
|
||||
data.project,
|
||||
|
||||
{ transaction }
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
return tour_pages;
|
||||
}
|
||||
|
||||
static async deleteByIds(ids, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const tour_pages = await db.tour_pages.findAll({
|
||||
where: {
|
||||
id: {
|
||||
[Op.in]: ids,
|
||||
},
|
||||
},
|
||||
transaction,
|
||||
});
|
||||
|
||||
await db.sequelize.transaction(async (transaction) => {
|
||||
for (const record of tour_pages) {
|
||||
await record.update(
|
||||
{deletedBy: currentUser.id},
|
||||
{transaction}
|
||||
);
|
||||
}
|
||||
for (const record of tour_pages) {
|
||||
await record.destroy({transaction});
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
return tour_pages;
|
||||
}
|
||||
|
||||
static async remove(id, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const tour_pages = await db.tour_pages.findByPk(id, options);
|
||||
|
||||
await tour_pages.update({
|
||||
deletedBy: currentUser.id
|
||||
}, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await tour_pages.destroy({
|
||||
transaction
|
||||
});
|
||||
|
||||
return tour_pages;
|
||||
}
|
||||
|
||||
static async findBy(where, options) {
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
const queryWhere = applyRuntimeEnvironment({ ...where }, options);
|
||||
const projectInclude = applyRuntimeProjectFilter(
|
||||
{
|
||||
model: db.projects,
|
||||
as: 'project',
|
||||
},
|
||||
options,
|
||||
);
|
||||
|
||||
const tour_pages = await db.tour_pages.findOne(
|
||||
{ where: queryWhere, include: [projectInclude], transaction },
|
||||
);
|
||||
|
||||
if (!tour_pages) {
|
||||
return tour_pages;
|
||||
}
|
||||
|
||||
const output = tour_pages.get({plain: true});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
output.page_elements_page = await tour_pages.getPage_elements_page({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
output.page_links_from_page = await tour_pages.getPage_links_from_page({
|
||||
transaction
|
||||
});
|
||||
|
||||
output.page_links_to_page = await tour_pages.getPage_links_to_page({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
output.project = await tour_pages.getProject({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
static async findAll(
|
||||
filter,
|
||||
options
|
||||
) {
|
||||
filter = filter || {};
|
||||
const limit = filter.limit || 0;
|
||||
let offset = 0;
|
||||
let where = {};
|
||||
const currentPage = +filter.page;
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
offset = currentPage * limit;
|
||||
|
||||
let include = [
|
||||
return record;
|
||||
}
|
||||
|
||||
static async findBy(where, options = {}) {
|
||||
const transaction = options.transaction;
|
||||
const queryWhere = applyRuntimeEnvironment({ ...where }, options);
|
||||
const projectInclude = applyRuntimeProjectFilter(
|
||||
{
|
||||
model: db.projects,
|
||||
as: 'project',
|
||||
|
||||
where: filter.project ? {
|
||||
[Op.or]: [
|
||||
{ id: { [Op.in]: filter.project.split('|').map(term => Utils.uuid(term)) } },
|
||||
{
|
||||
name: {
|
||||
[Op.or]: filter.project.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
|
||||
}
|
||||
},
|
||||
]
|
||||
} : {},
|
||||
|
||||
},
|
||||
options,
|
||||
);
|
||||
|
||||
const record = await this.MODEL.findOne({
|
||||
where: queryWhere,
|
||||
transaction,
|
||||
include: [projectInclude],
|
||||
});
|
||||
|
||||
if (!record) return null;
|
||||
return record.get({ plain: true });
|
||||
}
|
||||
|
||||
static async findAll(filter = {}, options = {}) {
|
||||
filter = filter || {};
|
||||
const limit = filter.limit || 0;
|
||||
const currentPage = +filter.page || 0;
|
||||
const offset = currentPage * limit;
|
||||
|
||||
let where = {};
|
||||
|
||||
const terms = filter.project ? filter.project.split('|') : [];
|
||||
const validUuids = Utils.filterValidUuids(terms);
|
||||
|
||||
let include = [
|
||||
{
|
||||
model: db.projects,
|
||||
as: 'project',
|
||||
where: filter.project
|
||||
? {
|
||||
[Op.or]: [
|
||||
...(validUuids.length > 0
|
||||
? [{ id: { [Op.in]: validUuids } }]
|
||||
: []),
|
||||
{
|
||||
name: {
|
||||
[Op.or]: terms.map((term) => ({ [Op.iLike]: `%${term}%` })),
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
: {},
|
||||
},
|
||||
];
|
||||
include[0] = applyRuntimeProjectFilter(include[0], options);
|
||||
|
||||
if (filter) {
|
||||
if (filter.id) {
|
||||
where = {
|
||||
...where,
|
||||
['id']: Utils.uuid(filter.id),
|
||||
};
|
||||
}
|
||||
include[0] = applyRuntimeProjectFilter(include[0], options);
|
||||
|
||||
|
||||
if (filter.source_key) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'tour_pages',
|
||||
'source_key',
|
||||
filter.source_key,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.name) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'tour_pages',
|
||||
'name',
|
||||
filter.name,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.slug) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'tour_pages',
|
||||
'slug',
|
||||
filter.slug,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.background_image_url) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'tour_pages',
|
||||
'background_image_url',
|
||||
filter.background_image_url,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.background_video_url) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'tour_pages',
|
||||
'background_video_url',
|
||||
filter.background_video_url,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.background_audio_url) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'tour_pages',
|
||||
'background_audio_url',
|
||||
filter.background_audio_url,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.ui_schema_json) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'tour_pages',
|
||||
'ui_schema_json',
|
||||
filter.ui_schema_json,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (filter.sort_orderRange) {
|
||||
const [start, end] = filter.sort_orderRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
sort_order: {
|
||||
...where.sort_order,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
sort_order: {
|
||||
...where.sort_order,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (filter.active !== undefined) {
|
||||
where = {
|
||||
...where,
|
||||
active: filter.active === true || filter.active === 'true'
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
if (filter.environment) {
|
||||
where = {
|
||||
...where,
|
||||
environment: filter.environment,
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.background_loop) {
|
||||
where = {
|
||||
...where,
|
||||
background_loop: filter.background_loop,
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.requires_auth) {
|
||||
where = {
|
||||
...where,
|
||||
requires_auth: filter.requires_auth,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (filter.createdAtRange) {
|
||||
const [start, end] = filter.createdAtRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
where = applyRuntimeEnvironment(where, options);
|
||||
|
||||
|
||||
|
||||
const queryOptions = {
|
||||
where,
|
||||
include,
|
||||
distinct: true,
|
||||
order: filter.field && filter.sort
|
||||
? [[filter.field, filter.sort]]
|
||||
: [['createdAt', 'desc']],
|
||||
transaction: options?.transaction,
|
||||
logging: console.log
|
||||
};
|
||||
|
||||
if (!options?.countOnly) {
|
||||
queryOptions.limit = limit ? Number(limit) : undefined;
|
||||
queryOptions.offset = offset ? Number(offset) : undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
const { rows, count } = await db.tour_pages.findAndCountAll(queryOptions);
|
||||
|
||||
return {
|
||||
rows: options?.countOnly ? [] : rows,
|
||||
count: count
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error executing query:', error);
|
||||
throw error;
|
||||
}
|
||||
if (filter.id) {
|
||||
if (!Utils.isValidUuid(filter.id)) {
|
||||
return { rows: [], count: 0 };
|
||||
}
|
||||
where.id = filter.id;
|
||||
}
|
||||
|
||||
static async findAllAutocomplete(query, limit, offset, ) {
|
||||
let where = {};
|
||||
|
||||
|
||||
|
||||
if (query) {
|
||||
where = {
|
||||
[Op.or]: [
|
||||
{ ['id']: Utils.uuid(query) },
|
||||
Utils.ilike(
|
||||
'tour_pages',
|
||||
'name',
|
||||
query,
|
||||
),
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
const records = await db.tour_pages.findAll({
|
||||
attributes: [ 'id', 'name' ],
|
||||
where,
|
||||
limit: limit ? Number(limit) : undefined,
|
||||
offset: offset ? Number(offset) : undefined,
|
||||
orderBy: [['name', 'ASC']],
|
||||
});
|
||||
|
||||
return records.map((record) => ({
|
||||
id: record.id,
|
||||
label: record.name,
|
||||
}));
|
||||
for (const field of this.SEARCHABLE_FIELDS) {
|
||||
if (filter[field]) {
|
||||
where[Op.and] = Utils.ilike(this.TABLE_NAME, field, filter[field]);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
};
|
||||
for (const field of this.RANGE_FIELDS) {
|
||||
const rangeKey = `${field}Range`;
|
||||
if (filter[rangeKey]) {
|
||||
const [start, end] = filter[rangeKey];
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where[field] = { ...where[field], [Op.gte]: start };
|
||||
}
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where[field] = { ...where[field], [Op.lte]: end };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const field of this.ENUM_FIELDS) {
|
||||
if (filter[field] !== undefined) {
|
||||
where[field] = filter[field];
|
||||
}
|
||||
}
|
||||
|
||||
// Validate and filter by UUID fields (e.g., projectId)
|
||||
for (const field of this.UUID_FIELDS) {
|
||||
if (filter[field] !== undefined) {
|
||||
if (!Utils.isValidUuid(filter[field])) {
|
||||
return { rows: [], count: 0 };
|
||||
}
|
||||
where[field] = filter[field];
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.active !== undefined) {
|
||||
where.active = filter.active === true || filter.active === 'true';
|
||||
}
|
||||
|
||||
if (filter.createdAtRange) {
|
||||
const [start, end] = filter.createdAtRange;
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where.createdAt = { ...where.createdAt, [Op.gte]: start };
|
||||
}
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where.createdAt = { ...where.createdAt, [Op.lte]: end };
|
||||
}
|
||||
}
|
||||
|
||||
where = applyRuntimeEnvironment(where, options);
|
||||
|
||||
const queryOptions = {
|
||||
where,
|
||||
include,
|
||||
distinct: true,
|
||||
order:
|
||||
filter.field && filter.sort
|
||||
? [[filter.field, filter.sort]]
|
||||
: [['createdAt', 'desc']],
|
||||
transaction: options.transaction,
|
||||
};
|
||||
|
||||
if (!options.countOnly) {
|
||||
queryOptions.limit = limit ? Number(limit) : undefined;
|
||||
queryOptions.offset = offset ? Number(offset) : undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
const { rows, count } = await this.MODEL.findAndCountAll(queryOptions);
|
||||
return {
|
||||
rows: options.countOnly ? [] : rows,
|
||||
count,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error executing query:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Tour_pagesDBApi;
|
||||
|
||||
@ -1,565 +0,0 @@
|
||||
|
||||
const db = require('../models');
|
||||
const Utils = require('../utils');
|
||||
const {
|
||||
applyRuntimeEnvironment,
|
||||
applyRuntimeProjectFilter,
|
||||
} = require('./runtime-context');
|
||||
|
||||
|
||||
|
||||
const Sequelize = db.Sequelize;
|
||||
const Op = Sequelize.Op;
|
||||
|
||||
module.exports = class TransitionsDBApi {
|
||||
|
||||
|
||||
|
||||
static async create(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const transitions = await db.transitions.create(
|
||||
{
|
||||
id: data.id || undefined,
|
||||
|
||||
environment: data.environment
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
source_key: data.source_key
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
name: data.name
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
slug: data.slug
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
video_url: data.video_url
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
audio_url: data.audio_url
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
supports_reverse: data.supports_reverse
|
||||
||
|
||||
false
|
||||
|
||||
,
|
||||
|
||||
duration_sec: data.duration_sec
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
importHash: data.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
|
||||
await transitions.setProject( data.project || null, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
return transitions;
|
||||
}
|
||||
|
||||
|
||||
static async bulkImport(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
// Prepare data - wrapping individual data transformations in a map() method
|
||||
const transitionsData = data.map((item, index) => ({
|
||||
id: item.id || undefined,
|
||||
|
||||
environment: item.environment
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
source_key: item.source_key
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
name: item.name
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
slug: item.slug
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
video_url: item.video_url
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
audio_url: item.audio_url
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
supports_reverse: item.supports_reverse
|
||||
||
|
||||
false
|
||||
|
||||
,
|
||||
|
||||
duration_sec: item.duration_sec
|
||||
||
|
||||
null
|
||||
,
|
||||
|
||||
importHash: item.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
createdAt: new Date(Date.now() + index * 1000),
|
||||
}));
|
||||
|
||||
// Bulk create items
|
||||
const transitions = await db.transitions.bulkCreate(transitionsData, { transaction });
|
||||
|
||||
// For each item created, replace relation files
|
||||
|
||||
|
||||
return transitions;
|
||||
}
|
||||
|
||||
static async update(id, data, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
|
||||
const transitions = await db.transitions.findByPk(id, {transaction});
|
||||
|
||||
|
||||
|
||||
|
||||
const updatePayload = {};
|
||||
|
||||
if (data.environment !== undefined) updatePayload.environment = data.environment;
|
||||
|
||||
|
||||
if (data.source_key !== undefined) updatePayload.source_key = data.source_key;
|
||||
|
||||
|
||||
if (data.name !== undefined) updatePayload.name = data.name;
|
||||
|
||||
|
||||
if (data.slug !== undefined) updatePayload.slug = data.slug;
|
||||
|
||||
|
||||
if (data.video_url !== undefined) updatePayload.video_url = data.video_url;
|
||||
|
||||
|
||||
if (data.audio_url !== undefined) updatePayload.audio_url = data.audio_url;
|
||||
|
||||
|
||||
if (data.supports_reverse !== undefined) updatePayload.supports_reverse = data.supports_reverse;
|
||||
|
||||
|
||||
if (data.duration_sec !== undefined) updatePayload.duration_sec = data.duration_sec;
|
||||
|
||||
|
||||
updatePayload.updatedById = currentUser.id;
|
||||
|
||||
await transitions.update(updatePayload, {transaction});
|
||||
|
||||
|
||||
|
||||
if (data.project !== undefined) {
|
||||
await transitions.setProject(
|
||||
|
||||
data.project,
|
||||
|
||||
{ transaction }
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
return transitions;
|
||||
}
|
||||
|
||||
static async deleteByIds(ids, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const transitions = await db.transitions.findAll({
|
||||
where: {
|
||||
id: {
|
||||
[Op.in]: ids,
|
||||
},
|
||||
},
|
||||
transaction,
|
||||
});
|
||||
|
||||
await db.sequelize.transaction(async (transaction) => {
|
||||
for (const record of transitions) {
|
||||
await record.update(
|
||||
{deletedBy: currentUser.id},
|
||||
{transaction}
|
||||
);
|
||||
}
|
||||
for (const record of transitions) {
|
||||
await record.destroy({transaction});
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
return transitions;
|
||||
}
|
||||
|
||||
static async remove(id, options) {
|
||||
const currentUser = (options && options.currentUser) || {id: null};
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const transitions = await db.transitions.findByPk(id, options);
|
||||
|
||||
await transitions.update({
|
||||
deletedBy: currentUser.id
|
||||
}, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await transitions.destroy({
|
||||
transaction
|
||||
});
|
||||
|
||||
return transitions;
|
||||
}
|
||||
|
||||
static async findBy(where, options) {
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
const queryWhere = applyRuntimeEnvironment({ ...where }, options);
|
||||
const projectInclude = applyRuntimeProjectFilter(
|
||||
{
|
||||
model: db.projects,
|
||||
as: 'project',
|
||||
},
|
||||
options,
|
||||
);
|
||||
|
||||
const transitions = await db.transitions.findOne(
|
||||
{ where: queryWhere, include: [projectInclude], transaction },
|
||||
);
|
||||
|
||||
if (!transitions) {
|
||||
return transitions;
|
||||
}
|
||||
|
||||
const output = transitions.get({plain: true});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
output.page_links_transition = await transitions.getPage_links_transition({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
output.project = await transitions.getProject({
|
||||
transaction
|
||||
});
|
||||
|
||||
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
static async findAll(
|
||||
filter,
|
||||
options
|
||||
) {
|
||||
filter = filter || {};
|
||||
const limit = filter.limit || 0;
|
||||
let offset = 0;
|
||||
let where = {};
|
||||
const currentPage = +filter.page;
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
offset = currentPage * limit;
|
||||
|
||||
let include = [
|
||||
|
||||
{
|
||||
model: db.projects,
|
||||
as: 'project',
|
||||
|
||||
where: filter.project ? {
|
||||
[Op.or]: [
|
||||
{ id: { [Op.in]: filter.project.split('|').map(term => Utils.uuid(term)) } },
|
||||
{
|
||||
name: {
|
||||
[Op.or]: filter.project.split('|').map(term => ({ [Op.iLike]: `%${term}%` }))
|
||||
}
|
||||
},
|
||||
]
|
||||
} : {},
|
||||
|
||||
},
|
||||
|
||||
|
||||
|
||||
];
|
||||
include[0] = applyRuntimeProjectFilter(include[0], options);
|
||||
|
||||
if (filter) {
|
||||
if (filter.id) {
|
||||
where = {
|
||||
...where,
|
||||
['id']: Utils.uuid(filter.id),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
if (filter.source_key) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'transitions',
|
||||
'source_key',
|
||||
filter.source_key,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.name) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'transitions',
|
||||
'name',
|
||||
filter.name,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.slug) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'transitions',
|
||||
'slug',
|
||||
filter.slug,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.video_url) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'transitions',
|
||||
'video_url',
|
||||
filter.video_url,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.audio_url) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'transitions',
|
||||
'audio_url',
|
||||
filter.audio_url,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (filter.duration_secRange) {
|
||||
const [start, end] = filter.duration_secRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
duration_sec: {
|
||||
...where.duration_sec,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
duration_sec: {
|
||||
...where.duration_sec,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (filter.active !== undefined) {
|
||||
where = {
|
||||
...where,
|
||||
active: filter.active === true || filter.active === 'true'
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
if (filter.environment) {
|
||||
where = {
|
||||
...where,
|
||||
environment: filter.environment,
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.supports_reverse) {
|
||||
where = {
|
||||
...where,
|
||||
supports_reverse: filter.supports_reverse,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (filter.createdAtRange) {
|
||||
const [start, end] = filter.createdAtRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
where = applyRuntimeEnvironment(where, options);
|
||||
|
||||
|
||||
|
||||
const queryOptions = {
|
||||
where,
|
||||
include,
|
||||
distinct: true,
|
||||
order: filter.field && filter.sort
|
||||
? [[filter.field, filter.sort]]
|
||||
: [['createdAt', 'desc']],
|
||||
transaction: options?.transaction,
|
||||
logging: console.log
|
||||
};
|
||||
|
||||
if (!options?.countOnly) {
|
||||
queryOptions.limit = limit ? Number(limit) : undefined;
|
||||
queryOptions.offset = offset ? Number(offset) : undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
const { rows, count } = await db.transitions.findAndCountAll(queryOptions);
|
||||
|
||||
return {
|
||||
rows: options?.countOnly ? [] : rows,
|
||||
count: count
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error executing query:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async findAllAutocomplete(query, limit, offset, ) {
|
||||
let where = {};
|
||||
|
||||
|
||||
|
||||
if (query) {
|
||||
where = {
|
||||
[Op.or]: [
|
||||
{ ['id']: Utils.uuid(query) },
|
||||
Utils.ilike(
|
||||
'transitions',
|
||||
'name',
|
||||
query,
|
||||
),
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
const records = await db.transitions.findAll({
|
||||
attributes: [ 'id', 'name' ],
|
||||
where,
|
||||
limit: limit ? Number(limit) : undefined,
|
||||
offset: offset ? Number(offset) : undefined,
|
||||
orderBy: [['name', 'ASC']],
|
||||
});
|
||||
|
||||
return records.map((record) => ({
|
||||
id: record.id,
|
||||
label: record.name,
|
||||
}));
|
||||
}
|
||||
|
||||
|
||||
};
|
||||
@ -1,233 +0,0 @@
|
||||
const db = require('../models');
|
||||
const Utils = require('../utils');
|
||||
|
||||
const Sequelize = db.Sequelize;
|
||||
const Op = Sequelize.Op;
|
||||
|
||||
module.exports = class Ui_elementsDBApi {
|
||||
static async create(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const ui_elements = await db.ui_elements.create(
|
||||
{
|
||||
id: data.id || undefined,
|
||||
element_type: data.element_type ?? null,
|
||||
name: data.name ?? null,
|
||||
settings_json: data.settings_json ?? null,
|
||||
sort_order: data.sort_order ?? 0,
|
||||
importHash: data.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
return ui_elements;
|
||||
}
|
||||
|
||||
static async bulkImport(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const uiElementsData = data.map((item, index) => ({
|
||||
id: item.id || undefined,
|
||||
element_type: item.element_type ?? null,
|
||||
name: item.name ?? null,
|
||||
settings_json: item.settings_json ?? null,
|
||||
sort_order: item.sort_order ?? 0,
|
||||
importHash: item.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
createdAt: new Date(Date.now() + index * 1000),
|
||||
}));
|
||||
|
||||
const ui_elements = await db.ui_elements.bulkCreate(uiElementsData, { transaction });
|
||||
|
||||
return ui_elements;
|
||||
}
|
||||
|
||||
static async update(id, data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const ui_elements = await db.ui_elements.findByPk(id, { transaction });
|
||||
|
||||
const updatePayload = {};
|
||||
|
||||
if (data.element_type !== undefined) updatePayload.element_type = data.element_type;
|
||||
if (data.name !== undefined) updatePayload.name = data.name;
|
||||
if (data.settings_json !== undefined) updatePayload.settings_json = data.settings_json;
|
||||
if (data.sort_order !== undefined) updatePayload.sort_order = data.sort_order;
|
||||
|
||||
updatePayload.updatedById = currentUser.id;
|
||||
|
||||
await ui_elements.update(updatePayload, { transaction });
|
||||
|
||||
return ui_elements;
|
||||
}
|
||||
|
||||
static async deleteByIds(ids, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const ui_elements = await db.ui_elements.findAll({
|
||||
where: {
|
||||
id: {
|
||||
[Op.in]: ids,
|
||||
},
|
||||
},
|
||||
transaction,
|
||||
});
|
||||
|
||||
await db.sequelize.transaction(async (innerTransaction) => {
|
||||
for (const record of ui_elements) {
|
||||
await record.update({ deletedBy: currentUser.id }, { transaction: innerTransaction });
|
||||
}
|
||||
for (const record of ui_elements) {
|
||||
await record.destroy({ transaction: innerTransaction });
|
||||
}
|
||||
});
|
||||
|
||||
return ui_elements;
|
||||
}
|
||||
|
||||
static async remove(id, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const ui_elements = await db.ui_elements.findByPk(id, options);
|
||||
|
||||
await ui_elements.update(
|
||||
{
|
||||
deletedBy: currentUser.id,
|
||||
},
|
||||
{
|
||||
transaction,
|
||||
},
|
||||
);
|
||||
|
||||
await ui_elements.destroy({
|
||||
transaction,
|
||||
});
|
||||
|
||||
return ui_elements;
|
||||
}
|
||||
|
||||
static async findBy(where, options) {
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const ui_elements = await db.ui_elements.findOne({ where, transaction });
|
||||
|
||||
if (!ui_elements) {
|
||||
return ui_elements;
|
||||
}
|
||||
|
||||
return ui_elements.get({ plain: true });
|
||||
}
|
||||
|
||||
static async findAll(filter, options) {
|
||||
filter = filter || {};
|
||||
const limit = Number(filter.limit) || 0;
|
||||
const currentPage = Number(filter.page) || 0;
|
||||
const offset = limit ? currentPage * limit : undefined;
|
||||
let where = {};
|
||||
|
||||
if (filter.id) {
|
||||
where = {
|
||||
...where,
|
||||
id: Utils.uuid(filter.id),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.name) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike('ui_elements', 'name', filter.name),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.element_type) {
|
||||
where = {
|
||||
...where,
|
||||
element_type: filter.element_type,
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.sort_orderRange) {
|
||||
const [start, end] = filter.sort_orderRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
sort_order: {
|
||||
...where.sort_order,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
sort_order: {
|
||||
...where.sort_order,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
let { orderBy = null } = options || {};
|
||||
if (!orderBy) {
|
||||
const sort = filter.sort || 'desc';
|
||||
const field = filter.field || 'createdAt';
|
||||
orderBy = [[field, sort]];
|
||||
}
|
||||
|
||||
const { rows, count } = await db.ui_elements.findAndCountAll({
|
||||
where,
|
||||
limit: limit || undefined,
|
||||
offset,
|
||||
order: orderBy,
|
||||
});
|
||||
|
||||
return {
|
||||
rows,
|
||||
count,
|
||||
};
|
||||
}
|
||||
|
||||
static async findAllAutocomplete(query, limit) {
|
||||
let where = {};
|
||||
|
||||
if (query) {
|
||||
where = {
|
||||
[Op.or]: [
|
||||
{
|
||||
id: {
|
||||
[Op.eq]: Utils.uuid(query),
|
||||
},
|
||||
},
|
||||
{
|
||||
name: {
|
||||
[Op.iLike]: `%${query}%`,
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
const records = await db.ui_elements.findAll({
|
||||
attributes: ['id', 'name'],
|
||||
where,
|
||||
limit: Number(limit) || undefined,
|
||||
order: [['name', 'ASC']],
|
||||
});
|
||||
|
||||
return records.map((record) => ({
|
||||
id: record.id,
|
||||
label: record.name,
|
||||
}));
|
||||
}
|
||||
};
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,5 +1,3 @@
|
||||
|
||||
|
||||
module.exports = {
|
||||
production: {
|
||||
dialect: 'postgres',
|
||||
@ -8,8 +6,10 @@ module.exports = {
|
||||
database: process.env.DB_NAME,
|
||||
host: process.env.DB_HOST,
|
||||
port: process.env.DB_PORT,
|
||||
logging: console.log,
|
||||
logging: false,
|
||||
seederStorage: 'sequelize',
|
||||
migrationStorage: 'sequelize',
|
||||
migrationStorageTableName: 'SequelizeMeta',
|
||||
},
|
||||
development: {
|
||||
username: 'postgres',
|
||||
@ -19,15 +19,19 @@ module.exports = {
|
||||
host: process.env.DB_HOST || 'localhost',
|
||||
logging: console.log,
|
||||
seederStorage: 'sequelize',
|
||||
migrationStorage: 'sequelize',
|
||||
migrationStorageTableName: 'SequelizeMeta',
|
||||
},
|
||||
dev_stage: {
|
||||
dialect: 'postgres',
|
||||
username: process.env.DB_USER,
|
||||
password: process.env.DB_PASS,
|
||||
database: process.env.DB_NAME,
|
||||
host: process.env.DB_HOST,
|
||||
port: process.env.DB_PORT,
|
||||
logging: console.log,
|
||||
seederStorage: 'sequelize',
|
||||
migrationStorage: 'sequelize',
|
||||
migrationStorageTableName: 'SequelizeMeta',
|
||||
},
|
||||
dev_stage: {
|
||||
dialect: 'postgres',
|
||||
username: process.env.DB_USER,
|
||||
password: process.env.DB_PASS,
|
||||
database: process.env.DB_NAME,
|
||||
host: process.env.DB_HOST,
|
||||
port: process.env.DB_PORT,
|
||||
logging: console.log,
|
||||
seederStorage: 'sequelize',
|
||||
}
|
||||
};
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,124 +0,0 @@
|
||||
module.exports = {
|
||||
async up(queryInterface, Sequelize) {
|
||||
const transaction = await queryInterface.sequelize.transaction();
|
||||
|
||||
try {
|
||||
const rows = await queryInterface.sequelize.query(
|
||||
"SELECT to_regclass('public.files') AS regclass_name;",
|
||||
{
|
||||
transaction,
|
||||
type: Sequelize.QueryTypes.SELECT,
|
||||
},
|
||||
);
|
||||
const tableName = rows[0].regclass_name;
|
||||
|
||||
if (tableName) {
|
||||
await transaction.commit();
|
||||
return;
|
||||
}
|
||||
|
||||
await queryInterface.createTable(
|
||||
'files',
|
||||
{
|
||||
id: {
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
defaultValue: Sequelize.DataTypes.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
belongsTo: {
|
||||
type: Sequelize.DataTypes.STRING(255),
|
||||
allowNull: true,
|
||||
},
|
||||
belongsToId: {
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
allowNull: true,
|
||||
},
|
||||
belongsToColumn: {
|
||||
type: Sequelize.DataTypes.STRING(255),
|
||||
allowNull: true,
|
||||
},
|
||||
name: {
|
||||
type: Sequelize.DataTypes.STRING(2083),
|
||||
allowNull: false,
|
||||
},
|
||||
sizeInBytes: {
|
||||
type: Sequelize.DataTypes.INTEGER,
|
||||
allowNull: true,
|
||||
},
|
||||
privateUrl: {
|
||||
type: Sequelize.DataTypes.STRING(2083),
|
||||
allowNull: true,
|
||||
},
|
||||
publicUrl: {
|
||||
type: Sequelize.DataTypes.STRING(2083),
|
||||
allowNull: false,
|
||||
},
|
||||
createdAt: {
|
||||
type: Sequelize.DataTypes.DATE,
|
||||
allowNull: false,
|
||||
},
|
||||
updatedAt: {
|
||||
type: Sequelize.DataTypes.DATE,
|
||||
allowNull: false,
|
||||
},
|
||||
deletedAt: {
|
||||
type: Sequelize.DataTypes.DATE,
|
||||
allowNull: true,
|
||||
},
|
||||
createdById: {
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
allowNull: true,
|
||||
references: {
|
||||
key: 'id',
|
||||
model: 'users',
|
||||
},
|
||||
onDelete: 'SET NULL',
|
||||
onUpdate: 'CASCADE',
|
||||
},
|
||||
updatedById: {
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
allowNull: true,
|
||||
references: {
|
||||
key: 'id',
|
||||
model: 'users',
|
||||
},
|
||||
onDelete: 'SET NULL',
|
||||
onUpdate: 'CASCADE',
|
||||
},
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await transaction.commit();
|
||||
} catch (err) {
|
||||
await transaction.rollback();
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
async down(queryInterface, Sequelize) {
|
||||
const transaction = await queryInterface.sequelize.transaction();
|
||||
|
||||
try {
|
||||
const rows = await queryInterface.sequelize.query(
|
||||
"SELECT to_regclass('public.files') AS regclass_name;",
|
||||
{
|
||||
transaction,
|
||||
type: Sequelize.QueryTypes.SELECT,
|
||||
},
|
||||
);
|
||||
const tableName = rows[0].regclass_name;
|
||||
|
||||
if (!tableName) {
|
||||
await transaction.commit();
|
||||
return;
|
||||
}
|
||||
|
||||
await queryInterface.dropTable('files', { transaction });
|
||||
await transaction.commit();
|
||||
} catch (err) {
|
||||
await transaction.rollback();
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
};
|
||||
@ -1,95 +0,0 @@
|
||||
module.exports = {
|
||||
async up(queryInterface, Sequelize) {
|
||||
const transaction = await queryInterface.sequelize.transaction();
|
||||
|
||||
try {
|
||||
const rows = await queryInterface.sequelize.query(
|
||||
"SELECT to_regclass('public.\"usersCustom_permissionsPermissions\"') AS regclass_name;",
|
||||
{
|
||||
transaction,
|
||||
type: Sequelize.QueryTypes.SELECT,
|
||||
},
|
||||
);
|
||||
const tableName = rows[0].regclass_name;
|
||||
|
||||
if (tableName) {
|
||||
await transaction.commit();
|
||||
return;
|
||||
}
|
||||
|
||||
await queryInterface.createTable(
|
||||
'usersCustom_permissionsPermissions',
|
||||
{
|
||||
createdAt: {
|
||||
type: Sequelize.DataTypes.DATE,
|
||||
allowNull: false,
|
||||
},
|
||||
updatedAt: {
|
||||
type: Sequelize.DataTypes.DATE,
|
||||
allowNull: false,
|
||||
},
|
||||
users_custom_permissionsId: {
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
allowNull: false,
|
||||
primaryKey: true,
|
||||
references: {
|
||||
model: 'users',
|
||||
key: 'id',
|
||||
},
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
},
|
||||
permissionId: {
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
allowNull: false,
|
||||
primaryKey: true,
|
||||
references: {
|
||||
model: 'permissions',
|
||||
key: 'id',
|
||||
},
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
},
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.addIndex(
|
||||
'usersCustom_permissionsPermissions',
|
||||
['permissionId'],
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await transaction.commit();
|
||||
} catch (err) {
|
||||
await transaction.rollback();
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
async down(queryInterface, Sequelize) {
|
||||
const transaction = await queryInterface.sequelize.transaction();
|
||||
|
||||
try {
|
||||
const rows = await queryInterface.sequelize.query(
|
||||
"SELECT to_regclass('public.\"usersCustom_permissionsPermissions\"') AS regclass_name;",
|
||||
{
|
||||
transaction,
|
||||
type: Sequelize.QueryTypes.SELECT,
|
||||
},
|
||||
);
|
||||
const tableName = rows[0].regclass_name;
|
||||
|
||||
if (!tableName) {
|
||||
await transaction.commit();
|
||||
return;
|
||||
}
|
||||
|
||||
await queryInterface.dropTable('usersCustom_permissionsPermissions', { transaction });
|
||||
await transaction.commit();
|
||||
} catch (err) {
|
||||
await transaction.rollback();
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
};
|
||||
@ -1,123 +0,0 @@
|
||||
module.exports = {
|
||||
async up(queryInterface, Sequelize) {
|
||||
const transaction = await queryInterface.sequelize.transaction();
|
||||
|
||||
try {
|
||||
const tableRows = await queryInterface.sequelize.query(
|
||||
"SELECT to_regclass('public.page_elements') AS regclass_name;",
|
||||
{
|
||||
transaction,
|
||||
type: Sequelize.QueryTypes.SELECT,
|
||||
},
|
||||
);
|
||||
|
||||
if (!tableRows[0]?.regclass_name) {
|
||||
await transaction.commit();
|
||||
return;
|
||||
}
|
||||
|
||||
const nullableRows = await queryInterface.sequelize.query(
|
||||
`SELECT is_nullable
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = 'page_elements'
|
||||
AND column_name = 'pageId';`,
|
||||
{
|
||||
transaction,
|
||||
type: Sequelize.QueryTypes.SELECT,
|
||||
},
|
||||
);
|
||||
|
||||
if (!nullableRows.length || nullableRows[0].is_nullable === 'YES') {
|
||||
await transaction.commit();
|
||||
return;
|
||||
}
|
||||
|
||||
await queryInterface.changeColumn(
|
||||
'page_elements',
|
||||
'pageId',
|
||||
{
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
allowNull: true,
|
||||
references: {
|
||||
model: 'tour_pages',
|
||||
key: 'id',
|
||||
},
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await transaction.commit();
|
||||
} catch (err) {
|
||||
await transaction.rollback();
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
async down(queryInterface, Sequelize) {
|
||||
const transaction = await queryInterface.sequelize.transaction();
|
||||
|
||||
try {
|
||||
const tableRows = await queryInterface.sequelize.query(
|
||||
"SELECT to_regclass('public.page_elements') AS regclass_name;",
|
||||
{
|
||||
transaction,
|
||||
type: Sequelize.QueryTypes.SELECT,
|
||||
},
|
||||
);
|
||||
|
||||
if (!tableRows[0]?.regclass_name) {
|
||||
await transaction.commit();
|
||||
return;
|
||||
}
|
||||
|
||||
const nullableRows = await queryInterface.sequelize.query(
|
||||
`SELECT is_nullable
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = 'page_elements'
|
||||
AND column_name = 'pageId';`,
|
||||
{
|
||||
transaction,
|
||||
type: Sequelize.QueryTypes.SELECT,
|
||||
},
|
||||
);
|
||||
|
||||
if (!nullableRows.length || nullableRows[0].is_nullable === 'NO') {
|
||||
await transaction.commit();
|
||||
return;
|
||||
}
|
||||
|
||||
const nullCountRows = await queryInterface.sequelize.query(
|
||||
'SELECT COUNT(*)::int AS count FROM "page_elements" WHERE "pageId" IS NULL;',
|
||||
{
|
||||
transaction,
|
||||
type: Sequelize.QueryTypes.SELECT,
|
||||
},
|
||||
);
|
||||
|
||||
if (Number(nullCountRows[0]?.count || 0) > 0) {
|
||||
throw new Error('Cannot make page_elements.pageId NOT NULL because NULL values exist.');
|
||||
}
|
||||
|
||||
await queryInterface.changeColumn(
|
||||
'page_elements',
|
||||
'pageId',
|
||||
{
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
allowNull: false,
|
||||
references: {
|
||||
model: 'tour_pages',
|
||||
key: 'id',
|
||||
},
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await transaction.commit();
|
||||
} catch (err) {
|
||||
await transaction.rollback();
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
};
|
||||
@ -1,105 +0,0 @@
|
||||
module.exports = {
|
||||
async up(queryInterface, Sequelize) {
|
||||
const transaction = await queryInterface.sequelize.transaction();
|
||||
|
||||
try {
|
||||
const tableRows = await queryInterface.sequelize.query(
|
||||
"SELECT to_regclass('public.page_elements') AS regclass_name;",
|
||||
{
|
||||
transaction,
|
||||
type: Sequelize.QueryTypes.SELECT,
|
||||
},
|
||||
);
|
||||
|
||||
if (!tableRows[0]?.regclass_name) {
|
||||
await transaction.commit();
|
||||
return;
|
||||
}
|
||||
|
||||
const nullableRows = await queryInterface.sequelize.query(
|
||||
`SELECT is_nullable
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = 'page_elements'
|
||||
AND column_name = 'pageId';`,
|
||||
{
|
||||
transaction,
|
||||
type: Sequelize.QueryTypes.SELECT,
|
||||
},
|
||||
);
|
||||
|
||||
if (!nullableRows.length || nullableRows[0].is_nullable === 'YES') {
|
||||
await transaction.commit();
|
||||
return;
|
||||
}
|
||||
|
||||
await queryInterface.sequelize.query(
|
||||
'ALTER TABLE "page_elements" ALTER COLUMN "pageId" DROP NOT NULL;',
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await transaction.commit();
|
||||
} catch (err) {
|
||||
await transaction.rollback();
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
async down(queryInterface, Sequelize) {
|
||||
const transaction = await queryInterface.sequelize.transaction();
|
||||
|
||||
try {
|
||||
const tableRows = await queryInterface.sequelize.query(
|
||||
"SELECT to_regclass('public.page_elements') AS regclass_name;",
|
||||
{
|
||||
transaction,
|
||||
type: Sequelize.QueryTypes.SELECT,
|
||||
},
|
||||
);
|
||||
|
||||
if (!tableRows[0]?.regclass_name) {
|
||||
await transaction.commit();
|
||||
return;
|
||||
}
|
||||
|
||||
const nullableRows = await queryInterface.sequelize.query(
|
||||
`SELECT is_nullable
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = 'page_elements'
|
||||
AND column_name = 'pageId';`,
|
||||
{
|
||||
transaction,
|
||||
type: Sequelize.QueryTypes.SELECT,
|
||||
},
|
||||
);
|
||||
|
||||
if (!nullableRows.length || nullableRows[0].is_nullable === 'NO') {
|
||||
await transaction.commit();
|
||||
return;
|
||||
}
|
||||
|
||||
const nullCountRows = await queryInterface.sequelize.query(
|
||||
'SELECT COUNT(*)::int AS count FROM "page_elements" WHERE "pageId" IS NULL;',
|
||||
{
|
||||
transaction,
|
||||
type: Sequelize.QueryTypes.SELECT,
|
||||
},
|
||||
);
|
||||
|
||||
if (Number(nullCountRows[0]?.count || 0) > 0) {
|
||||
throw new Error('Cannot make page_elements.pageId NOT NULL because NULL values exist.');
|
||||
}
|
||||
|
||||
await queryInterface.sequelize.query(
|
||||
'ALTER TABLE "page_elements" ALTER COLUMN "pageId" SET NOT NULL;',
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await transaction.commit();
|
||||
} catch (err) {
|
||||
await transaction.rollback();
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
};
|
||||
@ -1,108 +0,0 @@
|
||||
module.exports = {
|
||||
async up(queryInterface, Sequelize) {
|
||||
const transaction = await queryInterface.sequelize.transaction();
|
||||
|
||||
try {
|
||||
const tableRows = await queryInterface.sequelize.query(
|
||||
"SELECT to_regclass('public.ui_elements') AS regclass_name;",
|
||||
{
|
||||
transaction,
|
||||
type: Sequelize.QueryTypes.SELECT,
|
||||
},
|
||||
);
|
||||
|
||||
if (tableRows[0]?.regclass_name) {
|
||||
await transaction.commit();
|
||||
return;
|
||||
}
|
||||
|
||||
await queryInterface.createTable(
|
||||
'ui_elements',
|
||||
{
|
||||
id: {
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
defaultValue: Sequelize.DataTypes.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
element_type: {
|
||||
type: Sequelize.DataTypes.TEXT,
|
||||
allowNull: false,
|
||||
},
|
||||
name: {
|
||||
type: Sequelize.DataTypes.TEXT,
|
||||
allowNull: true,
|
||||
},
|
||||
settings_json: {
|
||||
type: Sequelize.DataTypes.TEXT,
|
||||
allowNull: true,
|
||||
},
|
||||
sort_order: {
|
||||
type: Sequelize.DataTypes.INTEGER,
|
||||
allowNull: false,
|
||||
defaultValue: 0,
|
||||
},
|
||||
createdById: {
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
allowNull: true,
|
||||
references: {
|
||||
model: 'users',
|
||||
key: 'id',
|
||||
},
|
||||
},
|
||||
updatedById: {
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
allowNull: true,
|
||||
references: {
|
||||
model: 'users',
|
||||
key: 'id',
|
||||
},
|
||||
},
|
||||
createdAt: { type: Sequelize.DataTypes.DATE },
|
||||
updatedAt: { type: Sequelize.DataTypes.DATE },
|
||||
deletedAt: { type: Sequelize.DataTypes.DATE },
|
||||
importHash: {
|
||||
type: Sequelize.DataTypes.STRING(255),
|
||||
allowNull: true,
|
||||
unique: true,
|
||||
},
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.addIndex('ui_elements', ['element_type'], { transaction });
|
||||
await queryInterface.addIndex('ui_elements', ['sort_order'], { transaction });
|
||||
await queryInterface.addIndex('ui_elements', ['deletedAt'], { transaction });
|
||||
|
||||
await transaction.commit();
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
async down(queryInterface, Sequelize) {
|
||||
const transaction = await queryInterface.sequelize.transaction();
|
||||
|
||||
try {
|
||||
const tableRows = await queryInterface.sequelize.query(
|
||||
"SELECT to_regclass('public.ui_elements') AS regclass_name;",
|
||||
{
|
||||
transaction,
|
||||
type: Sequelize.QueryTypes.SELECT,
|
||||
},
|
||||
);
|
||||
|
||||
if (!tableRows[0]?.regclass_name) {
|
||||
await transaction.commit();
|
||||
return;
|
||||
}
|
||||
|
||||
await queryInterface.dropTable('ui_elements', { transaction });
|
||||
|
||||
await transaction.commit();
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
};
|
||||
@ -1,85 +0,0 @@
|
||||
module.exports = {
|
||||
async up(queryInterface, Sequelize) {
|
||||
const transaction = await queryInterface.sequelize.transaction();
|
||||
|
||||
try {
|
||||
const tableRows = await queryInterface.sequelize.query(
|
||||
"SELECT to_regclass('public.publish_events') AS regclass_name;",
|
||||
{
|
||||
transaction,
|
||||
type: Sequelize.QueryTypes.SELECT,
|
||||
},
|
||||
);
|
||||
|
||||
if (!tableRows[0]?.regclass_name) {
|
||||
await transaction.commit();
|
||||
return;
|
||||
}
|
||||
|
||||
const tableDefinition = await queryInterface.describeTable('publish_events', { transaction });
|
||||
|
||||
if (!tableDefinition.title) {
|
||||
await queryInterface.addColumn(
|
||||
'publish_events',
|
||||
'title',
|
||||
{
|
||||
type: Sequelize.DataTypes.STRING,
|
||||
allowNull: true,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
}
|
||||
|
||||
if (!tableDefinition.description) {
|
||||
await queryInterface.addColumn(
|
||||
'publish_events',
|
||||
'description',
|
||||
{
|
||||
type: Sequelize.DataTypes.TEXT,
|
||||
allowNull: true,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
}
|
||||
|
||||
await transaction.commit();
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
async down(queryInterface, Sequelize) {
|
||||
const transaction = await queryInterface.sequelize.transaction();
|
||||
|
||||
try {
|
||||
const tableRows = await queryInterface.sequelize.query(
|
||||
"SELECT to_regclass('public.publish_events') AS regclass_name;",
|
||||
{
|
||||
transaction,
|
||||
type: Sequelize.QueryTypes.SELECT,
|
||||
},
|
||||
);
|
||||
|
||||
if (!tableRows[0]?.regclass_name) {
|
||||
await transaction.commit();
|
||||
return;
|
||||
}
|
||||
|
||||
const tableDefinition = await queryInterface.describeTable('publish_events', { transaction });
|
||||
|
||||
if (tableDefinition.description) {
|
||||
await queryInterface.removeColumn('publish_events', 'description', { transaction });
|
||||
}
|
||||
|
||||
if (tableDefinition.title) {
|
||||
await queryInterface.removeColumn('publish_events', 'title', { transaction });
|
||||
}
|
||||
|
||||
await transaction.commit();
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
};
|
||||
@ -0,0 +1,274 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Migration to add foreign key constraints to all model associations.
|
||||
* This enforces referential integrity at the database level.
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
async up(queryInterface) {
|
||||
const transaction = await queryInterface.sequelize.transaction();
|
||||
|
||||
try {
|
||||
// Helper to add FK constraint safely (checks if exists first)
|
||||
const addForeignKey = async (
|
||||
tableName,
|
||||
columnName,
|
||||
references,
|
||||
onDelete = 'CASCADE',
|
||||
onUpdate = 'CASCADE',
|
||||
) => {
|
||||
const constraintName = `${tableName}_${columnName}_fkey`;
|
||||
|
||||
// Check if constraint already exists
|
||||
const [results] = await queryInterface.sequelize.query(
|
||||
`SELECT constraint_name FROM information_schema.table_constraints
|
||||
WHERE table_name = '${tableName}' AND constraint_name = '${constraintName}'`,
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
if (results.length === 0) {
|
||||
await queryInterface.addConstraint(tableName, {
|
||||
fields: [columnName],
|
||||
type: 'foreign key',
|
||||
name: constraintName,
|
||||
references: {
|
||||
table: references.table,
|
||||
field: references.field,
|
||||
},
|
||||
onDelete,
|
||||
onUpdate,
|
||||
transaction,
|
||||
});
|
||||
console.log(`Added FK constraint: ${constraintName}`);
|
||||
} else {
|
||||
console.log(`FK constraint already exists: ${constraintName}`);
|
||||
}
|
||||
};
|
||||
|
||||
// asset_variants -> assets
|
||||
await addForeignKey(
|
||||
'asset_variants',
|
||||
'assetId',
|
||||
{ table: 'assets', field: 'id' },
|
||||
'CASCADE',
|
||||
'CASCADE',
|
||||
);
|
||||
|
||||
// page_elements -> tour_pages
|
||||
await addForeignKey(
|
||||
'page_elements',
|
||||
'pageId',
|
||||
{ table: 'tour_pages', field: 'id' },
|
||||
'CASCADE',
|
||||
'CASCADE',
|
||||
);
|
||||
|
||||
// page_links -> tour_pages (from_page)
|
||||
await addForeignKey(
|
||||
'page_links',
|
||||
'from_pageId',
|
||||
{ table: 'tour_pages', field: 'id' },
|
||||
'CASCADE',
|
||||
'CASCADE',
|
||||
);
|
||||
|
||||
// page_links -> tour_pages (to_page)
|
||||
await addForeignKey(
|
||||
'page_links',
|
||||
'to_pageId',
|
||||
{ table: 'tour_pages', field: 'id' },
|
||||
'SET NULL',
|
||||
'CASCADE',
|
||||
);
|
||||
|
||||
// page_links -> transitions
|
||||
await addForeignKey(
|
||||
'page_links',
|
||||
'transitionId',
|
||||
{ table: 'transitions', field: 'id' },
|
||||
'SET NULL',
|
||||
'CASCADE',
|
||||
);
|
||||
|
||||
// assets -> projects
|
||||
await addForeignKey(
|
||||
'assets',
|
||||
'projectId',
|
||||
{ table: 'projects', field: 'id' },
|
||||
'CASCADE',
|
||||
'CASCADE',
|
||||
);
|
||||
|
||||
// tour_pages -> projects
|
||||
await addForeignKey(
|
||||
'tour_pages',
|
||||
'projectId',
|
||||
{ table: 'projects', field: 'id' },
|
||||
'CASCADE',
|
||||
'CASCADE',
|
||||
);
|
||||
|
||||
// transitions -> projects
|
||||
await addForeignKey(
|
||||
'transitions',
|
||||
'projectId',
|
||||
{ table: 'projects', field: 'id' },
|
||||
'CASCADE',
|
||||
'CASCADE',
|
||||
);
|
||||
|
||||
// project_memberships -> projects
|
||||
await addForeignKey(
|
||||
'project_memberships',
|
||||
'projectId',
|
||||
{ table: 'projects', field: 'id' },
|
||||
'CASCADE',
|
||||
'CASCADE',
|
||||
);
|
||||
|
||||
// project_memberships -> users
|
||||
await addForeignKey(
|
||||
'project_memberships',
|
||||
'userId',
|
||||
{ table: 'users', field: 'id' },
|
||||
'CASCADE',
|
||||
'CASCADE',
|
||||
);
|
||||
|
||||
// presigned_url_requests -> projects
|
||||
await addForeignKey(
|
||||
'presigned_url_requests',
|
||||
'projectId',
|
||||
{ table: 'projects', field: 'id' },
|
||||
'CASCADE',
|
||||
'CASCADE',
|
||||
);
|
||||
|
||||
// presigned_url_requests -> users
|
||||
await addForeignKey(
|
||||
'presigned_url_requests',
|
||||
'userId',
|
||||
{ table: 'users', field: 'id' },
|
||||
'CASCADE',
|
||||
'CASCADE',
|
||||
);
|
||||
|
||||
// project_audio_tracks -> projects
|
||||
await addForeignKey(
|
||||
'project_audio_tracks',
|
||||
'projectId',
|
||||
{ table: 'projects', field: 'id' },
|
||||
'CASCADE',
|
||||
'CASCADE',
|
||||
);
|
||||
|
||||
// publish_events -> projects
|
||||
await addForeignKey(
|
||||
'publish_events',
|
||||
'projectId',
|
||||
{ table: 'projects', field: 'id' },
|
||||
'CASCADE',
|
||||
'CASCADE',
|
||||
);
|
||||
|
||||
// publish_events -> users (SET NULL to preserve audit trail)
|
||||
await addForeignKey(
|
||||
'publish_events',
|
||||
'userId',
|
||||
{ table: 'users', field: 'id' },
|
||||
'SET NULL',
|
||||
'CASCADE',
|
||||
);
|
||||
|
||||
// pwa_caches -> projects
|
||||
await addForeignKey(
|
||||
'pwa_caches',
|
||||
'projectId',
|
||||
{ table: 'projects', field: 'id' },
|
||||
'CASCADE',
|
||||
'CASCADE',
|
||||
);
|
||||
|
||||
// access_logs -> projects
|
||||
await addForeignKey(
|
||||
'access_logs',
|
||||
'projectId',
|
||||
{ table: 'projects', field: 'id' },
|
||||
'CASCADE',
|
||||
'CASCADE',
|
||||
);
|
||||
|
||||
// access_logs -> users (SET NULL to preserve audit trail)
|
||||
await addForeignKey(
|
||||
'access_logs',
|
||||
'userId',
|
||||
{ table: 'users', field: 'id' },
|
||||
'SET NULL',
|
||||
'CASCADE',
|
||||
);
|
||||
|
||||
// users -> roles (SET NULL so deleting role doesn't delete users)
|
||||
await addForeignKey(
|
||||
'users',
|
||||
'app_roleId',
|
||||
{ table: 'roles', field: 'id' },
|
||||
'SET NULL',
|
||||
'CASCADE',
|
||||
);
|
||||
|
||||
await transaction.commit();
|
||||
console.log('All FK constraints added successfully');
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
async down(queryInterface) {
|
||||
const transaction = await queryInterface.sequelize.transaction();
|
||||
|
||||
try {
|
||||
const dropForeignKey = async (tableName, columnName) => {
|
||||
const constraintName = `${tableName}_${columnName}_fkey`;
|
||||
try {
|
||||
await queryInterface.removeConstraint(tableName, constraintName, {
|
||||
transaction,
|
||||
});
|
||||
console.log(`Removed FK constraint: ${constraintName}`);
|
||||
} catch (error) {
|
||||
console.log(
|
||||
`FK constraint not found (may not exist): ${constraintName}`,
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
// Remove all FK constraints in reverse order
|
||||
await dropForeignKey('users', 'app_roleId');
|
||||
await dropForeignKey('access_logs', 'userId');
|
||||
await dropForeignKey('access_logs', 'projectId');
|
||||
await dropForeignKey('pwa_caches', 'projectId');
|
||||
await dropForeignKey('publish_events', 'userId');
|
||||
await dropForeignKey('publish_events', 'projectId');
|
||||
await dropForeignKey('project_audio_tracks', 'projectId');
|
||||
await dropForeignKey('presigned_url_requests', 'userId');
|
||||
await dropForeignKey('presigned_url_requests', 'projectId');
|
||||
await dropForeignKey('project_memberships', 'userId');
|
||||
await dropForeignKey('project_memberships', 'projectId');
|
||||
await dropForeignKey('transitions', 'projectId');
|
||||
await dropForeignKey('tour_pages', 'projectId');
|
||||
await dropForeignKey('assets', 'projectId');
|
||||
await dropForeignKey('page_links', 'transitionId');
|
||||
await dropForeignKey('page_links', 'to_pageId');
|
||||
await dropForeignKey('page_links', 'from_pageId');
|
||||
await dropForeignKey('page_elements', 'pageId');
|
||||
await dropForeignKey('asset_variants', 'assetId');
|
||||
|
||||
await transaction.commit();
|
||||
console.log('All FK constraints removed successfully');
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
};
|
||||
@ -0,0 +1,126 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Migration to remove redundant deletion tracking columns.
|
||||
*
|
||||
* The `is_deleted` and `deleted_at_time` columns are redundant because:
|
||||
* - Sequelize's `paranoid: true` mode already uses `deletedAt` for soft-delete
|
||||
* - These columns were set but never queried for filtering
|
||||
*
|
||||
* IMPORTANT: This migration should only be run after verifying no external
|
||||
* systems depend on these columns. Consider backing up data first.
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
async up(queryInterface) {
|
||||
const transaction = await queryInterface.sequelize.transaction();
|
||||
|
||||
try {
|
||||
// Helper to safely remove column if it exists
|
||||
const removeColumnIfExists = async (tableName, columnName) => {
|
||||
const [results] = await queryInterface.sequelize.query(
|
||||
`SELECT column_name FROM information_schema.columns
|
||||
WHERE table_name = '${tableName}' AND column_name = '${columnName}'`,
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
if (results.length > 0) {
|
||||
await queryInterface.removeColumn(tableName, columnName, {
|
||||
transaction,
|
||||
});
|
||||
console.log(`Removed column: ${tableName}.${columnName}`);
|
||||
} else {
|
||||
console.log(
|
||||
`Column does not exist (skipping): ${tableName}.${columnName}`,
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
// Remove is_deleted index from assets first (if exists)
|
||||
try {
|
||||
await queryInterface.removeIndex('assets', 'assets_is_deleted', {
|
||||
transaction,
|
||||
});
|
||||
console.log('Removed index: assets_is_deleted');
|
||||
} catch (error) {
|
||||
console.log('Index assets_is_deleted not found (may not exist)');
|
||||
}
|
||||
|
||||
// Remove redundant columns from assets table
|
||||
await removeColumnIfExists('assets', 'is_deleted');
|
||||
await removeColumnIfExists('assets', 'deleted_at_time');
|
||||
|
||||
// Remove redundant columns from projects table
|
||||
await removeColumnIfExists('projects', 'is_deleted');
|
||||
await removeColumnIfExists('projects', 'deleted_at_time');
|
||||
|
||||
await transaction.commit();
|
||||
console.log('Redundant deletion columns removed successfully');
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
async down(queryInterface, Sequelize) {
|
||||
const transaction = await queryInterface.sequelize.transaction();
|
||||
|
||||
try {
|
||||
// Re-add columns to assets table
|
||||
await queryInterface.addColumn(
|
||||
'assets',
|
||||
'is_deleted',
|
||||
{
|
||||
type: Sequelize.BOOLEAN,
|
||||
allowNull: false,
|
||||
defaultValue: false,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.addColumn(
|
||||
'assets',
|
||||
'deleted_at_time',
|
||||
{
|
||||
type: Sequelize.DATE,
|
||||
allowNull: true,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
// Re-add index
|
||||
await queryInterface.addIndex('assets', ['is_deleted'], {
|
||||
name: 'assets_is_deleted',
|
||||
transaction,
|
||||
});
|
||||
|
||||
// Re-add columns to projects table
|
||||
await queryInterface.addColumn(
|
||||
'projects',
|
||||
'is_deleted',
|
||||
{
|
||||
type: Sequelize.BOOLEAN,
|
||||
allowNull: false,
|
||||
defaultValue: false,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.addColumn(
|
||||
'projects',
|
||||
'deleted_at_time',
|
||||
{
|
||||
type: Sequelize.DATE,
|
||||
allowNull: true,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await transaction.commit();
|
||||
console.log('Redundant deletion columns restored successfully');
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
};
|
||||
@ -0,0 +1,79 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Migration: Rename ui_elements table to element_type_defaults
|
||||
*
|
||||
* This migration renames the table for better clarity:
|
||||
* - ui_elements contained GLOBAL platform-wide default settings
|
||||
* - The new name element_type_defaults better describes this purpose
|
||||
* - Adds index on deletedAt for soft delete queries
|
||||
*/
|
||||
module.exports = {
|
||||
async up(queryInterface, Sequelize) {
|
||||
// Check if old table exists
|
||||
const tableExists = await queryInterface.sequelize.query(
|
||||
`SELECT EXISTS (
|
||||
SELECT FROM information_schema.tables
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = 'ui_elements'
|
||||
);`,
|
||||
{ type: Sequelize.QueryTypes.SELECT },
|
||||
);
|
||||
|
||||
if (!tableExists[0]?.exists) {
|
||||
console.log('Table ui_elements does not exist, skipping rename');
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if new table already exists (migration may have been partially run)
|
||||
const newTableExists = await queryInterface.sequelize.query(
|
||||
`SELECT EXISTS (
|
||||
SELECT FROM information_schema.tables
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = 'element_type_defaults'
|
||||
);`,
|
||||
{ type: Sequelize.QueryTypes.SELECT },
|
||||
);
|
||||
|
||||
if (newTableExists[0]?.exists) {
|
||||
console.log(
|
||||
'Table element_type_defaults already exists, skipping rename',
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Rename table
|
||||
await queryInterface.renameTable('ui_elements', 'element_type_defaults');
|
||||
|
||||
// Update any sequences (PostgreSQL auto-creates these for SERIAL columns, but UUID doesn't need them)
|
||||
// No sequence updates needed since we use UUID primary keys
|
||||
|
||||
console.log('Successfully renamed ui_elements to element_type_defaults');
|
||||
},
|
||||
|
||||
async down(queryInterface, Sequelize) {
|
||||
// Check if new table exists
|
||||
const tableExists = await queryInterface.sequelize.query(
|
||||
`SELECT EXISTS (
|
||||
SELECT FROM information_schema.tables
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = 'element_type_defaults'
|
||||
);`,
|
||||
{ type: Sequelize.QueryTypes.SELECT },
|
||||
);
|
||||
|
||||
if (!tableExists[0]?.exists) {
|
||||
console.log(
|
||||
'Table element_type_defaults does not exist, skipping rollback',
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Rename table back
|
||||
await queryInterface.renameTable('element_type_defaults', 'ui_elements');
|
||||
|
||||
console.log(
|
||||
'Successfully rolled back: renamed element_type_defaults to ui_elements',
|
||||
);
|
||||
},
|
||||
};
|
||||
@ -0,0 +1,178 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Migration: Convert page_elements.element_type from ENUM to TEXT
|
||||
*
|
||||
* This migration:
|
||||
* 1. Converts element_type column from ENUM to TEXT for flexibility
|
||||
* 2. Maps nav_button to navigation_next or navigation_prev based on content_json.navType
|
||||
* 3. Drops the old ENUM type
|
||||
*
|
||||
* Benefits of TEXT over ENUM:
|
||||
* - Flexibility to add new element types without migrations
|
||||
* - No ENUM sync issues between environments
|
||||
* - Application-level validation ensures type safety
|
||||
*/
|
||||
module.exports = {
|
||||
async up(queryInterface, Sequelize) {
|
||||
const transaction = await queryInterface.sequelize.transaction();
|
||||
|
||||
try {
|
||||
// Step 1: Create a temporary TEXT column
|
||||
await queryInterface.addColumn(
|
||||
'page_elements',
|
||||
'element_type_text',
|
||||
{
|
||||
type: Sequelize.TEXT,
|
||||
allowNull: true,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
// Step 2: Copy ENUM values to TEXT column
|
||||
await queryInterface.sequelize.query(
|
||||
`UPDATE page_elements SET element_type_text = element_type::TEXT`,
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
// Step 3: Drop the old ENUM column
|
||||
await queryInterface.removeColumn('page_elements', 'element_type', {
|
||||
transaction,
|
||||
});
|
||||
|
||||
// Step 4: Rename TEXT column to element_type
|
||||
await queryInterface.renameColumn(
|
||||
'page_elements',
|
||||
'element_type_text',
|
||||
'element_type',
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
// Step 5: Add NOT NULL constraint
|
||||
await queryInterface.changeColumn(
|
||||
'page_elements',
|
||||
'element_type',
|
||||
{
|
||||
type: Sequelize.TEXT,
|
||||
allowNull: false,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
// Step 6: Now map nav_button to specific navigation types (column is TEXT now)
|
||||
// Forward navigation (default if navType not specified)
|
||||
await queryInterface.sequelize.query(
|
||||
`UPDATE page_elements
|
||||
SET element_type = 'navigation_next'
|
||||
WHERE element_type = 'nav_button'
|
||||
AND (
|
||||
content_json IS NULL
|
||||
OR content_json::jsonb->>'navType' = 'forward'
|
||||
OR content_json::jsonb->>'navType' IS NULL
|
||||
)`,
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
// Back navigation
|
||||
await queryInterface.sequelize.query(
|
||||
`UPDATE page_elements
|
||||
SET element_type = 'navigation_prev'
|
||||
WHERE element_type = 'nav_button'
|
||||
AND content_json IS NOT NULL
|
||||
AND content_json::jsonb->>'navType' = 'back'`,
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
// Step 7: Drop the old ENUM type if it exists
|
||||
await queryInterface.sequelize.query(
|
||||
`DROP TYPE IF EXISTS "enum_page_elements_element_type"`,
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await transaction.commit();
|
||||
console.log(
|
||||
'Successfully converted element_type from ENUM to TEXT and mapped nav_button types',
|
||||
);
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
async down(queryInterface, _Sequelize) {
|
||||
const transaction = await queryInterface.sequelize.transaction();
|
||||
|
||||
try {
|
||||
// Step 1: Map navigation types back to nav_button (before creating ENUM)
|
||||
await queryInterface.sequelize.query(
|
||||
`UPDATE page_elements
|
||||
SET element_type = 'nav_button'
|
||||
WHERE element_type IN ('navigation_next', 'navigation_prev')`,
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
// Step 2: Drop any existing ENUM types that might conflict
|
||||
await queryInterface.sequelize.query(
|
||||
`DROP TYPE IF EXISTS "enum_page_elements_element_type" CASCADE`,
|
||||
{ transaction },
|
||||
);
|
||||
await queryInterface.sequelize.query(
|
||||
`DROP TYPE IF EXISTS "enum_page_elements_element_type_enum" CASCADE`,
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
// Step 3: Create the ENUM type with original values
|
||||
await queryInterface.sequelize.query(
|
||||
`CREATE TYPE "enum_page_elements_element_type" AS ENUM (
|
||||
'nav_button',
|
||||
'spot',
|
||||
'description',
|
||||
'tooltip',
|
||||
'gallery',
|
||||
'carousel',
|
||||
'logo',
|
||||
'video_player',
|
||||
'popup'
|
||||
)`,
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
// Step 4: Add ENUM column directly via raw SQL to avoid Sequelize creating another type
|
||||
await queryInterface.sequelize.query(
|
||||
`ALTER TABLE page_elements ADD COLUMN element_type_enum "enum_page_elements_element_type"`,
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
// Step 5: Copy TEXT values to ENUM column
|
||||
await queryInterface.sequelize.query(
|
||||
`UPDATE page_elements SET element_type_enum = element_type::"enum_page_elements_element_type"`,
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
// Step 6: Drop TEXT column
|
||||
await queryInterface.removeColumn('page_elements', 'element_type', {
|
||||
transaction,
|
||||
});
|
||||
|
||||
// Step 7: Rename ENUM column
|
||||
await queryInterface.renameColumn(
|
||||
'page_elements',
|
||||
'element_type_enum',
|
||||
'element_type',
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
// Step 8: Add NOT NULL constraint
|
||||
await queryInterface.sequelize.query(
|
||||
`ALTER TABLE page_elements ALTER COLUMN element_type SET NOT NULL`,
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await transaction.commit();
|
||||
console.log('Successfully reverted element_type from TEXT to ENUM');
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
};
|
||||
@ -0,0 +1,187 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Migration: Create project_element_defaults table
|
||||
*
|
||||
* This table stores project-specific element default settings that override
|
||||
* the global element_type_defaults. Key design decisions:
|
||||
*
|
||||
* - element_type is TEXT (not ENUM) for flexibility
|
||||
* - source_element_id is optional FK for audit trail (SET NULL on global delete)
|
||||
* - snapshot_version tracks generations for "check for updates" feature
|
||||
* - NO environment field - applies across all environments for consistent branding
|
||||
* - Unique constraint on (projectId, element_type) ensures one override per type per project
|
||||
*/
|
||||
module.exports = {
|
||||
async up(queryInterface, Sequelize) {
|
||||
// Check if table already exists
|
||||
const tableExists = await queryInterface.sequelize.query(
|
||||
`SELECT EXISTS (
|
||||
SELECT FROM information_schema.tables
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = 'project_element_defaults'
|
||||
);`,
|
||||
{ type: Sequelize.QueryTypes.SELECT },
|
||||
);
|
||||
|
||||
if (tableExists[0]?.exists) {
|
||||
console.log(
|
||||
'Table project_element_defaults already exists, skipping creation',
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
await queryInterface.createTable('project_element_defaults', {
|
||||
id: {
|
||||
type: Sequelize.UUID,
|
||||
defaultValue: Sequelize.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
element_type: {
|
||||
type: Sequelize.TEXT,
|
||||
allowNull: false,
|
||||
},
|
||||
name: {
|
||||
type: Sequelize.TEXT,
|
||||
allowNull: true,
|
||||
},
|
||||
sort_order: {
|
||||
type: Sequelize.INTEGER,
|
||||
allowNull: false,
|
||||
defaultValue: 0,
|
||||
},
|
||||
settings_json: {
|
||||
type: Sequelize.TEXT,
|
||||
allowNull: true,
|
||||
},
|
||||
source_element_id: {
|
||||
type: Sequelize.UUID,
|
||||
allowNull: true,
|
||||
references: {
|
||||
model: 'element_type_defaults',
|
||||
key: 'id',
|
||||
},
|
||||
onDelete: 'SET NULL',
|
||||
onUpdate: 'CASCADE',
|
||||
},
|
||||
snapshot_version: {
|
||||
type: Sequelize.INTEGER,
|
||||
allowNull: false,
|
||||
defaultValue: 1,
|
||||
},
|
||||
projectId: {
|
||||
type: Sequelize.UUID,
|
||||
allowNull: false,
|
||||
references: {
|
||||
model: 'projects',
|
||||
key: 'id',
|
||||
},
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
},
|
||||
createdById: {
|
||||
type: Sequelize.UUID,
|
||||
allowNull: true,
|
||||
references: {
|
||||
model: 'users',
|
||||
key: 'id',
|
||||
},
|
||||
onDelete: 'SET NULL',
|
||||
onUpdate: 'CASCADE',
|
||||
},
|
||||
updatedById: {
|
||||
type: Sequelize.UUID,
|
||||
allowNull: true,
|
||||
references: {
|
||||
model: 'users',
|
||||
key: 'id',
|
||||
},
|
||||
onDelete: 'SET NULL',
|
||||
onUpdate: 'CASCADE',
|
||||
},
|
||||
importHash: {
|
||||
type: Sequelize.STRING(255),
|
||||
allowNull: true,
|
||||
unique: true,
|
||||
},
|
||||
createdAt: {
|
||||
type: Sequelize.DATE,
|
||||
allowNull: false,
|
||||
},
|
||||
updatedAt: {
|
||||
type: Sequelize.DATE,
|
||||
allowNull: false,
|
||||
},
|
||||
deletedAt: {
|
||||
type: Sequelize.DATE,
|
||||
allowNull: true,
|
||||
},
|
||||
});
|
||||
|
||||
// Add indexes
|
||||
await queryInterface.addIndex('project_element_defaults', ['projectId'], {
|
||||
name: 'project_element_defaults_projectId',
|
||||
});
|
||||
|
||||
await queryInterface.addIndex(
|
||||
'project_element_defaults',
|
||||
['projectId', 'element_type'],
|
||||
{
|
||||
name: 'project_element_defaults_projectId_element_type',
|
||||
unique: true,
|
||||
where: { deletedAt: null },
|
||||
},
|
||||
);
|
||||
|
||||
await queryInterface.addIndex(
|
||||
'project_element_defaults',
|
||||
['element_type'],
|
||||
{
|
||||
name: 'project_element_defaults_element_type',
|
||||
},
|
||||
);
|
||||
|
||||
await queryInterface.addIndex(
|
||||
'project_element_defaults',
|
||||
['source_element_id'],
|
||||
{
|
||||
name: 'project_element_defaults_source_element_id',
|
||||
},
|
||||
);
|
||||
|
||||
await queryInterface.addIndex('project_element_defaults', ['deletedAt'], {
|
||||
name: 'project_element_defaults_deletedAt',
|
||||
});
|
||||
|
||||
console.log('Successfully created project_element_defaults table');
|
||||
},
|
||||
|
||||
async down(queryInterface, _Sequelize) {
|
||||
// Drop indexes first
|
||||
await queryInterface.removeIndex(
|
||||
'project_element_defaults',
|
||||
'project_element_defaults_projectId',
|
||||
);
|
||||
await queryInterface.removeIndex(
|
||||
'project_element_defaults',
|
||||
'project_element_defaults_projectId_element_type',
|
||||
);
|
||||
await queryInterface.removeIndex(
|
||||
'project_element_defaults',
|
||||
'project_element_defaults_element_type',
|
||||
);
|
||||
await queryInterface.removeIndex(
|
||||
'project_element_defaults',
|
||||
'project_element_defaults_source_element_id',
|
||||
);
|
||||
await queryInterface.removeIndex(
|
||||
'project_element_defaults',
|
||||
'project_element_defaults_deletedAt',
|
||||
);
|
||||
|
||||
// Drop table
|
||||
await queryInterface.dropTable('project_element_defaults');
|
||||
|
||||
console.log('Successfully dropped project_element_defaults table');
|
||||
},
|
||||
};
|
||||
@ -0,0 +1,276 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Migration: Backfill project_element_defaults for existing projects
|
||||
*
|
||||
* For each existing project that doesn't have project_element_defaults,
|
||||
* create a snapshot of the current global element_type_defaults.
|
||||
*/
|
||||
|
||||
// Default element types to ensure they exist before backfilling
|
||||
const DEFAULT_ELEMENT_TYPES = [
|
||||
{
|
||||
element_type: 'navigation_next',
|
||||
name: 'Navigation Forward Button',
|
||||
sort_order: 1,
|
||||
settings_json: JSON.stringify({
|
||||
label: 'Navigation: Forward',
|
||||
navLabel: 'Forward',
|
||||
navType: 'forward',
|
||||
navDisabled: false,
|
||||
transitionReverseMode: 'auto_reverse',
|
||||
transitionDurationSec: 0.7,
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
}),
|
||||
},
|
||||
{
|
||||
element_type: 'navigation_prev',
|
||||
name: 'Navigation Back Button',
|
||||
sort_order: 2,
|
||||
settings_json: JSON.stringify({
|
||||
label: 'Navigation: Back',
|
||||
navLabel: 'Back',
|
||||
navType: 'back',
|
||||
navDisabled: false,
|
||||
transitionReverseMode: 'auto_reverse',
|
||||
transitionDurationSec: 0.7,
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
}),
|
||||
},
|
||||
{
|
||||
element_type: 'tooltip',
|
||||
name: 'Tooltip',
|
||||
sort_order: 3,
|
||||
settings_json: JSON.stringify({
|
||||
label: 'Tooltip',
|
||||
tooltipTitle: 'Tooltip title',
|
||||
tooltipText: 'Tooltip text',
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
}),
|
||||
},
|
||||
{
|
||||
element_type: 'description',
|
||||
name: 'Description',
|
||||
sort_order: 4,
|
||||
settings_json: JSON.stringify({
|
||||
label: 'Description',
|
||||
descriptionTitle: 'TITLE',
|
||||
descriptionText: '',
|
||||
descriptionTitleFontSize: '48px',
|
||||
descriptionTextFontSize: '36px',
|
||||
descriptionTitleFontFamily: 'inherit',
|
||||
descriptionTextFontFamily: 'inherit',
|
||||
descriptionTitleColor: '#000000',
|
||||
descriptionTextColor: '#4B5563',
|
||||
descriptionBackgroundColor: 'transparent',
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
}),
|
||||
},
|
||||
{
|
||||
element_type: 'gallery',
|
||||
name: 'Gallery',
|
||||
sort_order: 5,
|
||||
settings_json: JSON.stringify({
|
||||
label: 'Gallery',
|
||||
galleryCards: [{ imageUrl: '', title: 'Card 1', description: '' }],
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
}),
|
||||
},
|
||||
{
|
||||
element_type: 'carousel',
|
||||
name: 'Carousel',
|
||||
sort_order: 6,
|
||||
settings_json: JSON.stringify({
|
||||
label: 'Carousel',
|
||||
carouselSlides: [{ imageUrl: '', caption: 'Slide 1' }],
|
||||
carouselPrevIconUrl: '',
|
||||
carouselNextIconUrl: '',
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
}),
|
||||
},
|
||||
{
|
||||
element_type: 'video_player',
|
||||
name: 'Video Player',
|
||||
sort_order: 7,
|
||||
settings_json: JSON.stringify({
|
||||
label: 'Video Player',
|
||||
mediaUrl: '',
|
||||
mediaAutoplay: true,
|
||||
mediaLoop: true,
|
||||
mediaMuted: true,
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
}),
|
||||
},
|
||||
{
|
||||
element_type: 'audio_player',
|
||||
name: 'Audio Player',
|
||||
sort_order: 8,
|
||||
settings_json: JSON.stringify({
|
||||
label: 'Audio Player',
|
||||
mediaUrl: '',
|
||||
mediaAutoplay: true,
|
||||
mediaLoop: true,
|
||||
mediaMuted: false,
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
}),
|
||||
},
|
||||
];
|
||||
|
||||
module.exports = {
|
||||
async up(queryInterface, Sequelize) {
|
||||
// First, ensure element_type_defaults has all default rows
|
||||
// This is needed because the API's lazy initialization won't have run yet during migration
|
||||
const [existingTypes] = await queryInterface.sequelize.query(
|
||||
`SELECT element_type FROM element_type_defaults WHERE "deletedAt" IS NULL`,
|
||||
{ type: Sequelize.QueryTypes.SELECT },
|
||||
);
|
||||
|
||||
const existingTypeSet = new Set(
|
||||
Array.isArray(existingTypes)
|
||||
? existingTypes.map((t) => t.element_type)
|
||||
: existingTypes
|
||||
? [existingTypes.element_type]
|
||||
: [],
|
||||
);
|
||||
|
||||
// Insert missing element types
|
||||
for (const defaultType of DEFAULT_ELEMENT_TYPES) {
|
||||
if (!existingTypeSet.has(defaultType.element_type)) {
|
||||
await queryInterface.sequelize.query(
|
||||
`INSERT INTO element_type_defaults (id, element_type, name, sort_order, settings_json, "createdAt", "updatedAt")
|
||||
VALUES (gen_random_uuid(), :element_type, :name, :sort_order, :settings_json, NOW(), NOW())`,
|
||||
{
|
||||
replacements: {
|
||||
element_type: defaultType.element_type,
|
||||
name: defaultType.name,
|
||||
sort_order: defaultType.sort_order,
|
||||
settings_json: defaultType.settings_json,
|
||||
},
|
||||
},
|
||||
);
|
||||
console.log(
|
||||
`Created missing element_type_default: ${defaultType.element_type}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Get all existing projects
|
||||
const [projects] = await queryInterface.sequelize.query(
|
||||
`SELECT id FROM projects WHERE "deletedAt" IS NULL`,
|
||||
{ type: Sequelize.QueryTypes.SELECT },
|
||||
);
|
||||
|
||||
if (!projects || projects.length === 0) {
|
||||
console.log('No projects found, skipping backfill');
|
||||
return;
|
||||
}
|
||||
|
||||
// Get all global element type defaults (now guaranteed to have all types)
|
||||
const [globalDefaults] = await queryInterface.sequelize.query(
|
||||
`SELECT id, element_type, name, sort_order, settings_json
|
||||
FROM element_type_defaults
|
||||
WHERE "deletedAt" IS NULL`,
|
||||
{ type: Sequelize.QueryTypes.SELECT },
|
||||
);
|
||||
|
||||
if (!globalDefaults || globalDefaults.length === 0) {
|
||||
console.log('No global element type defaults found, skipping backfill');
|
||||
return;
|
||||
}
|
||||
|
||||
const projectIds = Array.isArray(projects)
|
||||
? projects.map((p) => p.id)
|
||||
: [projects.id];
|
||||
const globalDefaultRows = Array.isArray(globalDefaults)
|
||||
? globalDefaults
|
||||
: [globalDefaults];
|
||||
|
||||
// For each project, add any missing element type defaults
|
||||
for (const projectId of projectIds) {
|
||||
// Get existing element types for this project
|
||||
const [existingDefaults] = await queryInterface.sequelize.query(
|
||||
`SELECT element_type FROM project_element_defaults
|
||||
WHERE "projectId" = :projectId AND "deletedAt" IS NULL`,
|
||||
{
|
||||
replacements: { projectId },
|
||||
type: Sequelize.QueryTypes.SELECT,
|
||||
},
|
||||
);
|
||||
|
||||
const existingProjectTypes = new Set(
|
||||
Array.isArray(existingDefaults)
|
||||
? existingDefaults.map((d) => d.element_type)
|
||||
: existingDefaults
|
||||
? [existingDefaults.element_type]
|
||||
: [],
|
||||
);
|
||||
|
||||
// Create project element defaults for missing types
|
||||
let addedCount = 0;
|
||||
for (const globalDefault of globalDefaultRows) {
|
||||
if (existingProjectTypes.has(globalDefault.element_type)) {
|
||||
continue; // Already has this type
|
||||
}
|
||||
|
||||
await queryInterface.sequelize.query(
|
||||
`INSERT INTO project_element_defaults
|
||||
(id, element_type, name, sort_order, settings_json, source_element_id, snapshot_version, "projectId", "createdAt", "updatedAt")
|
||||
VALUES (
|
||||
gen_random_uuid(),
|
||||
:element_type,
|
||||
:name,
|
||||
:sort_order,
|
||||
:settings_json,
|
||||
:source_element_id,
|
||||
1,
|
||||
:projectId,
|
||||
NOW(),
|
||||
NOW()
|
||||
)`,
|
||||
{
|
||||
replacements: {
|
||||
element_type: globalDefault.element_type,
|
||||
name: globalDefault.name,
|
||||
sort_order: globalDefault.sort_order,
|
||||
settings_json: globalDefault.settings_json,
|
||||
source_element_id: globalDefault.id,
|
||||
projectId,
|
||||
},
|
||||
type: Sequelize.QueryTypes.INSERT,
|
||||
},
|
||||
);
|
||||
addedCount++;
|
||||
}
|
||||
|
||||
if (addedCount > 0) {
|
||||
console.log(
|
||||
`Backfilled ${addedCount} element defaults for project ${projectId}`,
|
||||
);
|
||||
} else {
|
||||
console.log(`Project ${projectId} already has all element defaults`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(
|
||||
'Successfully backfilled project_element_defaults for existing projects',
|
||||
);
|
||||
},
|
||||
|
||||
async down(queryInterface, _Sequelize) {
|
||||
// Delete all project_element_defaults with snapshot_version = 1
|
||||
// (only the ones we created during backfill)
|
||||
await queryInterface.sequelize.query(
|
||||
`DELETE FROM project_element_defaults WHERE snapshot_version = 1`,
|
||||
);
|
||||
|
||||
console.log('Successfully removed backfilled project_element_defaults');
|
||||
},
|
||||
};
|
||||
@ -0,0 +1,52 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Migration: Fix project_audio_tracks.environment NULL constraint
|
||||
*
|
||||
* Unlike tour_pages and transitions, project_audio_tracks.environment allows NULL.
|
||||
* This migration fixes it to match other models - NOT NULL with default 'dev'.
|
||||
*/
|
||||
module.exports = {
|
||||
async up(queryInterface, Sequelize) {
|
||||
// Check if column exists
|
||||
const [columns] = await queryInterface.sequelize.query(
|
||||
`SELECT column_name FROM information_schema.columns
|
||||
WHERE table_name = 'project_audio_tracks' AND column_name = 'environment'`,
|
||||
{ type: Sequelize.QueryTypes.SELECT },
|
||||
);
|
||||
|
||||
if (!columns) {
|
||||
console.log(
|
||||
'Column project_audio_tracks.environment does not exist, skipping',
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Set NULL values to 'dev'
|
||||
await queryInterface.sequelize.query(
|
||||
`UPDATE project_audio_tracks SET environment = 'dev' WHERE environment IS NULL`,
|
||||
);
|
||||
|
||||
// Alter column to NOT NULL with default
|
||||
await queryInterface.changeColumn('project_audio_tracks', 'environment', {
|
||||
type: Sequelize.ENUM('dev', 'stage', 'production'),
|
||||
allowNull: false,
|
||||
defaultValue: 'dev',
|
||||
});
|
||||
|
||||
console.log(
|
||||
'Successfully fixed project_audio_tracks.environment to NOT NULL with default dev',
|
||||
);
|
||||
},
|
||||
|
||||
async down(queryInterface, Sequelize) {
|
||||
// Revert to allow NULL
|
||||
await queryInterface.changeColumn('project_audio_tracks', 'environment', {
|
||||
type: Sequelize.ENUM('dev', 'stage', 'production'),
|
||||
allowNull: true,
|
||||
defaultValue: 'dev',
|
||||
});
|
||||
|
||||
console.log('Reverted project_audio_tracks.environment to allow NULL');
|
||||
},
|
||||
};
|
||||
208
backend/src/db/migrations/20260326000006-copy-dev-to-stage.js
Normal file
208
backend/src/db/migrations/20260326000006-copy-dev-to-stage.js
Normal file
@ -0,0 +1,208 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Migration: Copy existing dev content to stage environment
|
||||
*
|
||||
* This migration initializes the stage environment for existing projects
|
||||
* by copying all dev content to stage. This establishes the new workflow
|
||||
* where constructor edits dev, then explicitly saves to stage.
|
||||
*/
|
||||
module.exports = {
|
||||
async up(queryInterface, Sequelize) {
|
||||
// Get all projects
|
||||
const projects = await queryInterface.sequelize.query(
|
||||
`SELECT id FROM projects WHERE "deletedAt" IS NULL`,
|
||||
{ type: Sequelize.QueryTypes.SELECT },
|
||||
);
|
||||
|
||||
if (!projects || projects.length === 0) {
|
||||
console.log('No projects found, skipping dev to stage copy');
|
||||
return;
|
||||
}
|
||||
|
||||
for (const project of projects) {
|
||||
const projectId = project.id;
|
||||
|
||||
// Check if stage content already exists
|
||||
const [stageCheck] = await queryInterface.sequelize.query(
|
||||
`SELECT COUNT(*)::int as count FROM tour_pages
|
||||
WHERE "projectId" = '${projectId}' AND environment = 'stage' AND "deletedAt" IS NULL`,
|
||||
{ type: Sequelize.QueryTypes.SELECT },
|
||||
);
|
||||
|
||||
if (stageCheck?.count > 0) {
|
||||
console.log(`Project ${projectId} already has stage content, skipping`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Get dev pages count first
|
||||
const [devPageCount] = await queryInterface.sequelize.query(
|
||||
`SELECT COUNT(*)::int as count FROM tour_pages
|
||||
WHERE "projectId" = '${projectId}' AND environment = 'dev' AND "deletedAt" IS NULL`,
|
||||
{ type: Sequelize.QueryTypes.SELECT },
|
||||
);
|
||||
|
||||
if (!devPageCount || devPageCount.count === 0) {
|
||||
console.log(`Project ${projectId} has no dev content, skipping`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Copy pages with direct INSERT...SELECT
|
||||
await queryInterface.sequelize.query(`
|
||||
INSERT INTO tour_pages
|
||||
(id, slug, name, sort_order, background_image_url, background_video_url, background_audio_url, background_loop, requires_auth, ui_schema_json, "projectId", environment, source_key, "createdAt", "updatedAt", "createdById", "updatedById")
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
slug,
|
||||
name,
|
||||
sort_order,
|
||||
background_image_url,
|
||||
background_video_url,
|
||||
background_audio_url,
|
||||
background_loop,
|
||||
requires_auth,
|
||||
ui_schema_json,
|
||||
"projectId",
|
||||
'stage',
|
||||
id::text,
|
||||
NOW(),
|
||||
NOW(),
|
||||
"createdById",
|
||||
"updatedById"
|
||||
FROM tour_pages
|
||||
WHERE "projectId" = '${projectId}' AND environment = 'dev' AND "deletedAt" IS NULL
|
||||
`);
|
||||
|
||||
// Copy transitions
|
||||
await queryInterface.sequelize.query(`
|
||||
INSERT INTO transitions
|
||||
(id, name, slug, video_url, audio_url, supports_reverse, duration_sec, "projectId", environment, source_key, "createdAt", "updatedAt", "createdById", "updatedById")
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
name,
|
||||
slug,
|
||||
video_url,
|
||||
audio_url,
|
||||
supports_reverse,
|
||||
duration_sec,
|
||||
"projectId",
|
||||
'stage',
|
||||
id::text,
|
||||
NOW(),
|
||||
NOW(),
|
||||
"createdById",
|
||||
"updatedById"
|
||||
FROM transitions
|
||||
WHERE "projectId" = '${projectId}' AND environment = 'dev' AND "deletedAt" IS NULL
|
||||
`);
|
||||
|
||||
// Copy audio tracks
|
||||
await queryInterface.sequelize.query(`
|
||||
INSERT INTO project_audio_tracks
|
||||
(id, name, slug, url, "loop", volume, sort_order, is_enabled, "projectId", environment, source_key, "createdAt", "updatedAt", "createdById", "updatedById")
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
name,
|
||||
slug,
|
||||
url,
|
||||
"loop",
|
||||
volume,
|
||||
sort_order,
|
||||
is_enabled,
|
||||
"projectId",
|
||||
'stage',
|
||||
id::text,
|
||||
NOW(),
|
||||
NOW(),
|
||||
"createdById",
|
||||
"updatedById"
|
||||
FROM project_audio_tracks
|
||||
WHERE "projectId" = '${projectId}' AND environment = 'dev' AND "deletedAt" IS NULL
|
||||
`);
|
||||
|
||||
// Copy page elements using a subquery to map page IDs
|
||||
await queryInterface.sequelize.query(`
|
||||
INSERT INTO page_elements
|
||||
(id, element_type, name, sort_order, is_visible, x_percent, y_percent, width_percent, height_percent, rotation_deg, style_json, content_json, "pageId", "createdAt", "updatedAt", "createdById", "updatedById")
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
pe.element_type,
|
||||
pe.name,
|
||||
pe.sort_order,
|
||||
pe.is_visible,
|
||||
pe.x_percent,
|
||||
pe.y_percent,
|
||||
pe.width_percent,
|
||||
pe.height_percent,
|
||||
pe.rotation_deg,
|
||||
pe.style_json,
|
||||
pe.content_json,
|
||||
stage_page.id,
|
||||
NOW(),
|
||||
NOW(),
|
||||
pe."createdById",
|
||||
pe."updatedById"
|
||||
FROM page_elements pe
|
||||
INNER JOIN tour_pages dev_page ON pe."pageId" = dev_page.id
|
||||
INNER JOIN tour_pages stage_page ON stage_page.source_key = dev_page.id::text AND stage_page.environment = 'stage'
|
||||
WHERE dev_page."projectId" = '${projectId}'
|
||||
AND dev_page.environment = 'dev'
|
||||
AND dev_page."deletedAt" IS NULL
|
||||
AND pe."deletedAt" IS NULL
|
||||
`);
|
||||
|
||||
// Copy page links using subqueries to map page and transition IDs
|
||||
await queryInterface.sequelize.query(`
|
||||
INSERT INTO page_links
|
||||
(id, trigger_selector, external_url, "from_pageId", "to_pageId", "transitionId", "createdAt", "updatedAt", "createdById", "updatedById")
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
pl.trigger_selector,
|
||||
pl.external_url,
|
||||
stage_from.id,
|
||||
stage_to.id,
|
||||
stage_transition.id,
|
||||
NOW(),
|
||||
NOW(),
|
||||
pl."createdById",
|
||||
pl."updatedById"
|
||||
FROM page_links pl
|
||||
INNER JOIN tour_pages dev_from ON pl."from_pageId" = dev_from.id
|
||||
INNER JOIN tour_pages stage_from ON stage_from.source_key = dev_from.id::text AND stage_from.environment = 'stage'
|
||||
LEFT JOIN tour_pages dev_to ON pl."to_pageId" = dev_to.id
|
||||
LEFT JOIN tour_pages stage_to ON stage_to.source_key = dev_to.id::text AND stage_to.environment = 'stage'
|
||||
LEFT JOIN transitions dev_transition ON pl."transitionId" = dev_transition.id
|
||||
LEFT JOIN transitions stage_transition ON stage_transition.source_key = dev_transition.id::text AND stage_transition.environment = 'stage'
|
||||
WHERE dev_from."projectId" = '${projectId}'
|
||||
AND dev_from.environment = 'dev'
|
||||
AND dev_from."deletedAt" IS NULL
|
||||
AND pl."deletedAt" IS NULL
|
||||
`);
|
||||
|
||||
console.log(`Copied dev content to stage for project ${projectId}`);
|
||||
}
|
||||
|
||||
console.log('Successfully copied dev content to stage for all projects');
|
||||
},
|
||||
|
||||
async down(queryInterface, _Sequelize) {
|
||||
// Delete all stage content that has a source_key (meaning it was created by this migration)
|
||||
await queryInterface.sequelize.query(
|
||||
`DELETE FROM page_links WHERE "from_pageId" IN (SELECT id FROM tour_pages WHERE environment = 'stage' AND source_key IS NOT NULL)`,
|
||||
);
|
||||
await queryInterface.sequelize.query(
|
||||
`DELETE FROM page_elements WHERE "pageId" IN (SELECT id FROM tour_pages WHERE environment = 'stage' AND source_key IS NOT NULL)`,
|
||||
);
|
||||
await queryInterface.sequelize.query(
|
||||
`DELETE FROM tour_pages WHERE environment = 'stage' AND source_key IS NOT NULL`,
|
||||
);
|
||||
await queryInterface.sequelize.query(
|
||||
`DELETE FROM transitions WHERE environment = 'stage' AND source_key IS NOT NULL`,
|
||||
);
|
||||
await queryInterface.sequelize.query(
|
||||
`DELETE FROM project_audio_tracks WHERE environment = 'stage' AND source_key IS NOT NULL`,
|
||||
);
|
||||
|
||||
console.log('Removed stage content created by migration');
|
||||
},
|
||||
};
|
||||
@ -0,0 +1,58 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Migration: Enforce environment NOT NULL on all environment-aware tables
|
||||
*
|
||||
* This migration ensures that:
|
||||
* 1. All NULL environment values are set to 'dev'
|
||||
* 2. environment column is NOT NULL with default 'dev'
|
||||
*
|
||||
* This prevents data leaks where pages without environment could appear in production.
|
||||
*/
|
||||
module.exports = {
|
||||
async up(queryInterface, _Sequelize) {
|
||||
// Fix any NULL environments in tour_pages
|
||||
await queryInterface.sequelize.query(
|
||||
`UPDATE tour_pages SET environment = 'dev' WHERE environment IS NULL`,
|
||||
);
|
||||
|
||||
// Fix any NULL environments in transitions
|
||||
await queryInterface.sequelize.query(
|
||||
`UPDATE transitions SET environment = 'dev' WHERE environment IS NULL`,
|
||||
);
|
||||
|
||||
// Add NOT NULL constraint with default to tour_pages.environment
|
||||
await queryInterface.sequelize.query(`
|
||||
ALTER TABLE tour_pages
|
||||
ALTER COLUMN environment SET NOT NULL,
|
||||
ALTER COLUMN environment SET DEFAULT 'dev'
|
||||
`);
|
||||
|
||||
// Add NOT NULL constraint with default to transitions.environment
|
||||
await queryInterface.sequelize.query(`
|
||||
ALTER TABLE transitions
|
||||
ALTER COLUMN environment SET NOT NULL,
|
||||
ALTER COLUMN environment SET DEFAULT 'dev'
|
||||
`);
|
||||
|
||||
console.log('Successfully enforced NOT NULL on environment columns');
|
||||
},
|
||||
|
||||
async down(queryInterface, _Sequelize) {
|
||||
// Remove NOT NULL constraint from tour_pages.environment
|
||||
await queryInterface.sequelize.query(`
|
||||
ALTER TABLE tour_pages
|
||||
ALTER COLUMN environment DROP NOT NULL,
|
||||
ALTER COLUMN environment DROP DEFAULT
|
||||
`);
|
||||
|
||||
// Remove NOT NULL constraint from transitions.environment
|
||||
await queryInterface.sequelize.query(`
|
||||
ALTER TABLE transitions
|
||||
ALTER COLUMN environment DROP NOT NULL,
|
||||
ALTER COLUMN environment DROP DEFAULT
|
||||
`);
|
||||
|
||||
console.log('Removed NOT NULL constraint from environment columns');
|
||||
},
|
||||
};
|
||||
@ -0,0 +1,31 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Remove project.phase column - it's redundant.
|
||||
* Runtime access is controlled by tour_pages.environment, not project.phase.
|
||||
*/
|
||||
module.exports = {
|
||||
async up(queryInterface, _Sequelize) {
|
||||
// Drop the phase column
|
||||
await queryInterface.removeColumn('projects', 'phase');
|
||||
|
||||
// Drop the ENUM type
|
||||
await queryInterface.sequelize.query(
|
||||
`DROP TYPE IF EXISTS "enum_projects_phase";`,
|
||||
);
|
||||
},
|
||||
|
||||
async down(queryInterface, Sequelize) {
|
||||
// Recreate the ENUM type
|
||||
await queryInterface.sequelize.query(`
|
||||
CREATE TYPE "enum_projects_phase" AS ENUM ('dev', 'stage', 'production');
|
||||
`);
|
||||
|
||||
// Recreate the column with default 'dev'
|
||||
await queryInterface.addColumn('projects', 'phase', {
|
||||
type: Sequelize.ENUM('dev', 'stage', 'production'),
|
||||
allowNull: false,
|
||||
defaultValue: 'dev',
|
||||
});
|
||||
},
|
||||
};
|
||||
@ -0,0 +1,20 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Migration: Remove entry_page_slug from projects table
|
||||
*
|
||||
* The entry page is now determined by the first page by sort_order,
|
||||
* making entry_page_slug redundant.
|
||||
*/
|
||||
module.exports = {
|
||||
async up(queryInterface, _Sequelize) {
|
||||
await queryInterface.removeColumn('projects', 'entry_page_slug');
|
||||
},
|
||||
|
||||
async down(queryInterface, Sequelize) {
|
||||
await queryInterface.addColumn('projects', 'entry_page_slug', {
|
||||
type: Sequelize.TEXT,
|
||||
allowNull: true,
|
||||
});
|
||||
},
|
||||
};
|
||||
@ -0,0 +1,171 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Migration: Convert targetPageId to targetPageSlug in ui_schema_json
|
||||
*
|
||||
* This migration converts navigation elements from using page UUIDs (targetPageId)
|
||||
* to using page slugs (targetPageSlug). This fixes the ID remapping issue when
|
||||
* pages are copied between environments (dev -> stage -> production).
|
||||
*
|
||||
* Slugs are unique within project+environment and identical across environments,
|
||||
* eliminating the need for ID remapping during publish.
|
||||
*/
|
||||
module.exports = {
|
||||
async up(queryInterface, Sequelize) {
|
||||
const transaction = await queryInterface.sequelize.transaction();
|
||||
|
||||
try {
|
||||
// Get all tour pages with their ui_schema_json
|
||||
const [tourPages] = await queryInterface.sequelize.query(
|
||||
`SELECT id, "projectId", environment, slug, ui_schema_json FROM tour_pages WHERE ui_schema_json IS NOT NULL`,
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
// Build a lookup map: pageId -> { projectId, environment, slug }
|
||||
const pageInfoById = new Map();
|
||||
tourPages.forEach((page) => {
|
||||
pageInfoById.set(page.id, {
|
||||
projectId: page.projectId,
|
||||
environment: page.environment,
|
||||
slug: page.slug,
|
||||
});
|
||||
});
|
||||
|
||||
// Process each page and convert targetPageId to targetPageSlug
|
||||
for (const page of tourPages) {
|
||||
try {
|
||||
const uiSchema =
|
||||
typeof page.ui_schema_json === 'string'
|
||||
? JSON.parse(page.ui_schema_json)
|
||||
: page.ui_schema_json;
|
||||
|
||||
if (!uiSchema || !Array.isArray(uiSchema.elements)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let hasChanges = false;
|
||||
|
||||
uiSchema.elements.forEach((element) => {
|
||||
// Convert targetPageId to targetPageSlug
|
||||
if (
|
||||
element.targetPageId &&
|
||||
typeof element.targetPageId === 'string'
|
||||
) {
|
||||
const targetPageInfo = pageInfoById.get(element.targetPageId);
|
||||
if (targetPageInfo && targetPageInfo.slug) {
|
||||
// Only convert if target page is in the same project and environment
|
||||
if (
|
||||
targetPageInfo.projectId === page.projectId &&
|
||||
targetPageInfo.environment === page.environment
|
||||
) {
|
||||
element.targetPageSlug = targetPageInfo.slug;
|
||||
delete element.targetPageId;
|
||||
hasChanges = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (hasChanges) {
|
||||
await queryInterface.sequelize.query(
|
||||
`UPDATE tour_pages SET ui_schema_json = :json WHERE id = :id`,
|
||||
{
|
||||
replacements: {
|
||||
json: JSON.stringify(uiSchema),
|
||||
id: page.id,
|
||||
},
|
||||
type: Sequelize.QueryTypes.UPDATE,
|
||||
transaction,
|
||||
},
|
||||
);
|
||||
}
|
||||
} catch (parseError) {
|
||||
// Skip pages with invalid JSON
|
||||
console.warn(`Skipping page ${page.id}: ${parseError.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
await transaction.commit();
|
||||
console.log(
|
||||
'Migration complete: Converted targetPageId to targetPageSlug',
|
||||
);
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
async down(queryInterface, Sequelize) {
|
||||
const transaction = await queryInterface.sequelize.transaction();
|
||||
|
||||
try {
|
||||
// Get all tour pages
|
||||
const [tourPages] = await queryInterface.sequelize.query(
|
||||
`SELECT id, "projectId", environment, slug, ui_schema_json FROM tour_pages WHERE ui_schema_json IS NOT NULL`,
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
// Build lookup: (projectId, environment, slug) -> pageId
|
||||
const pageIdByKey = new Map();
|
||||
tourPages.forEach((page) => {
|
||||
const key = `${page.projectId}:${page.environment}:${page.slug}`;
|
||||
pageIdByKey.set(key, page.id);
|
||||
});
|
||||
|
||||
// Process each page and convert targetPageSlug back to targetPageId
|
||||
for (const page of tourPages) {
|
||||
try {
|
||||
const uiSchema =
|
||||
typeof page.ui_schema_json === 'string'
|
||||
? JSON.parse(page.ui_schema_json)
|
||||
: page.ui_schema_json;
|
||||
|
||||
if (!uiSchema || !Array.isArray(uiSchema.elements)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let hasChanges = false;
|
||||
|
||||
uiSchema.elements.forEach((element) => {
|
||||
if (
|
||||
element.targetPageSlug &&
|
||||
typeof element.targetPageSlug === 'string'
|
||||
) {
|
||||
const key = `${page.projectId}:${page.environment}:${element.targetPageSlug}`;
|
||||
const targetPageId = pageIdByKey.get(key);
|
||||
if (targetPageId) {
|
||||
element.targetPageId = targetPageId;
|
||||
delete element.targetPageSlug;
|
||||
hasChanges = true;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (hasChanges) {
|
||||
await queryInterface.sequelize.query(
|
||||
`UPDATE tour_pages SET ui_schema_json = :json WHERE id = :id`,
|
||||
{
|
||||
replacements: {
|
||||
json: JSON.stringify(uiSchema),
|
||||
id: page.id,
|
||||
},
|
||||
type: Sequelize.QueryTypes.UPDATE,
|
||||
transaction,
|
||||
},
|
||||
);
|
||||
}
|
||||
} catch (parseError) {
|
||||
console.warn(`Skipping page ${page.id}: ${parseError.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
await transaction.commit();
|
||||
console.log(
|
||||
'Rollback complete: Converted targetPageSlug back to targetPageId',
|
||||
);
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
};
|
||||
@ -0,0 +1,100 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Migration: Drop page_elements table
|
||||
*
|
||||
* This table was designed for storing individual page elements but was never used.
|
||||
* All element data is stored in tour_pages.ui_schema_json instead.
|
||||
*/
|
||||
module.exports = {
|
||||
async up(queryInterface, _Sequelize) {
|
||||
// Verify the table is empty before dropping
|
||||
const [results] = await queryInterface.sequelize.query(
|
||||
'SELECT COUNT(*) as count FROM page_elements',
|
||||
);
|
||||
const count = parseInt(results[0].count, 10);
|
||||
|
||||
if (count > 0) {
|
||||
throw new Error(
|
||||
`Cannot drop page_elements table: it contains ${count} records. Please migrate or delete them first.`,
|
||||
);
|
||||
}
|
||||
|
||||
await queryInterface.dropTable('page_elements');
|
||||
console.log('Dropped page_elements table (was empty)');
|
||||
},
|
||||
|
||||
async down(queryInterface, Sequelize) {
|
||||
// Recreate the page_elements table
|
||||
await queryInterface.createTable('page_elements', {
|
||||
id: {
|
||||
type: Sequelize.UUID,
|
||||
defaultValue: Sequelize.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
pageId: {
|
||||
type: Sequelize.UUID,
|
||||
allowNull: false,
|
||||
references: {
|
||||
model: 'tour_pages',
|
||||
key: 'id',
|
||||
},
|
||||
onUpdate: 'CASCADE',
|
||||
onDelete: 'CASCADE',
|
||||
},
|
||||
element_type: {
|
||||
type: Sequelize.STRING,
|
||||
allowNull: false,
|
||||
},
|
||||
xPercent: {
|
||||
type: Sequelize.DECIMAL(10, 6),
|
||||
allowNull: true,
|
||||
},
|
||||
yPercent: {
|
||||
type: Sequelize.DECIMAL(10, 6),
|
||||
allowNull: true,
|
||||
},
|
||||
content_json: {
|
||||
type: Sequelize.TEXT,
|
||||
allowNull: true,
|
||||
},
|
||||
createdAt: {
|
||||
type: Sequelize.DATE,
|
||||
allowNull: false,
|
||||
},
|
||||
updatedAt: {
|
||||
type: Sequelize.DATE,
|
||||
allowNull: false,
|
||||
},
|
||||
deletedAt: {
|
||||
type: Sequelize.DATE,
|
||||
allowNull: true,
|
||||
},
|
||||
createdById: {
|
||||
type: Sequelize.UUID,
|
||||
allowNull: true,
|
||||
references: {
|
||||
model: 'users',
|
||||
key: 'id',
|
||||
},
|
||||
onUpdate: 'CASCADE',
|
||||
onDelete: 'SET NULL',
|
||||
},
|
||||
updatedById: {
|
||||
type: Sequelize.UUID,
|
||||
allowNull: true,
|
||||
references: {
|
||||
model: 'users',
|
||||
key: 'id',
|
||||
},
|
||||
onUpdate: 'CASCADE',
|
||||
onDelete: 'SET NULL',
|
||||
},
|
||||
importHash: {
|
||||
type: Sequelize.STRING(255),
|
||||
allowNull: true,
|
||||
unique: true,
|
||||
},
|
||||
});
|
||||
},
|
||||
};
|
||||
@ -0,0 +1,108 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Migration: Drop page_links table
|
||||
*
|
||||
* This table was designed for storing navigation links between pages but was never used.
|
||||
* Navigation targets are stored in tour_pages.ui_schema_json as targetPageSlug instead.
|
||||
*/
|
||||
module.exports = {
|
||||
async up(queryInterface, _Sequelize) {
|
||||
// Verify the table is empty before dropping
|
||||
const [results] = await queryInterface.sequelize.query(
|
||||
'SELECT COUNT(*) as count FROM page_links',
|
||||
);
|
||||
const count = parseInt(results[0].count, 10);
|
||||
|
||||
if (count > 0) {
|
||||
throw new Error(
|
||||
`Cannot drop page_links table: it contains ${count} records. Please migrate or delete them first.`,
|
||||
);
|
||||
}
|
||||
|
||||
await queryInterface.dropTable('page_links');
|
||||
console.log('Dropped page_links table (was empty)');
|
||||
},
|
||||
|
||||
async down(queryInterface, Sequelize) {
|
||||
// Recreate the page_links table
|
||||
await queryInterface.createTable('page_links', {
|
||||
id: {
|
||||
type: Sequelize.UUID,
|
||||
defaultValue: Sequelize.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
from_pageId: {
|
||||
type: Sequelize.UUID,
|
||||
allowNull: true,
|
||||
references: {
|
||||
model: 'tour_pages',
|
||||
key: 'id',
|
||||
},
|
||||
onUpdate: 'CASCADE',
|
||||
onDelete: 'CASCADE',
|
||||
},
|
||||
to_pageId: {
|
||||
type: Sequelize.UUID,
|
||||
allowNull: true,
|
||||
references: {
|
||||
model: 'tour_pages',
|
||||
key: 'id',
|
||||
},
|
||||
onUpdate: 'CASCADE',
|
||||
onDelete: 'SET NULL',
|
||||
},
|
||||
transitionId: {
|
||||
type: Sequelize.UUID,
|
||||
allowNull: true,
|
||||
references: {
|
||||
model: 'transitions',
|
||||
key: 'id',
|
||||
},
|
||||
onUpdate: 'CASCADE',
|
||||
onDelete: 'SET NULL',
|
||||
},
|
||||
is_active: {
|
||||
type: Sequelize.BOOLEAN,
|
||||
defaultValue: true,
|
||||
},
|
||||
createdAt: {
|
||||
type: Sequelize.DATE,
|
||||
allowNull: false,
|
||||
},
|
||||
updatedAt: {
|
||||
type: Sequelize.DATE,
|
||||
allowNull: false,
|
||||
},
|
||||
deletedAt: {
|
||||
type: Sequelize.DATE,
|
||||
allowNull: true,
|
||||
},
|
||||
createdById: {
|
||||
type: Sequelize.UUID,
|
||||
allowNull: true,
|
||||
references: {
|
||||
model: 'users',
|
||||
key: 'id',
|
||||
},
|
||||
onUpdate: 'CASCADE',
|
||||
onDelete: 'SET NULL',
|
||||
},
|
||||
updatedById: {
|
||||
type: Sequelize.UUID,
|
||||
allowNull: true,
|
||||
references: {
|
||||
model: 'users',
|
||||
key: 'id',
|
||||
},
|
||||
onUpdate: 'CASCADE',
|
||||
onDelete: 'SET NULL',
|
||||
},
|
||||
importHash: {
|
||||
type: Sequelize.STRING(255),
|
||||
allowNull: true,
|
||||
unique: true,
|
||||
},
|
||||
});
|
||||
},
|
||||
};
|
||||
@ -0,0 +1,105 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Migration: Drop transitions table
|
||||
*
|
||||
* This table was designed for storing transition video metadata but was never used.
|
||||
* Transition video URLs are stored directly in tour_pages.ui_schema_json as transitionVideoUrl.
|
||||
*/
|
||||
module.exports = {
|
||||
async up(queryInterface, _Sequelize) {
|
||||
// Verify the table is empty before dropping
|
||||
const [results] = await queryInterface.sequelize.query(
|
||||
'SELECT COUNT(*) as count FROM transitions',
|
||||
);
|
||||
const count = parseInt(results[0].count, 10);
|
||||
|
||||
if (count > 0) {
|
||||
throw new Error(
|
||||
`Cannot drop transitions table: it contains ${count} records. Please migrate or delete them first.`,
|
||||
);
|
||||
}
|
||||
|
||||
await queryInterface.dropTable('transitions');
|
||||
console.log('Dropped transitions table (was empty)');
|
||||
},
|
||||
|
||||
async down(queryInterface, Sequelize) {
|
||||
// Recreate the transitions table
|
||||
await queryInterface.createTable('transitions', {
|
||||
id: {
|
||||
type: Sequelize.UUID,
|
||||
defaultValue: Sequelize.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
projectId: {
|
||||
type: Sequelize.UUID,
|
||||
allowNull: false,
|
||||
references: {
|
||||
model: 'projects',
|
||||
key: 'id',
|
||||
},
|
||||
onUpdate: 'CASCADE',
|
||||
onDelete: 'CASCADE',
|
||||
},
|
||||
environment: {
|
||||
type: Sequelize.STRING,
|
||||
allowNull: false,
|
||||
defaultValue: 'dev',
|
||||
},
|
||||
name: {
|
||||
type: Sequelize.STRING,
|
||||
allowNull: true,
|
||||
},
|
||||
video_url: {
|
||||
type: Sequelize.TEXT,
|
||||
allowNull: true,
|
||||
},
|
||||
duration_ms: {
|
||||
type: Sequelize.INTEGER,
|
||||
allowNull: true,
|
||||
},
|
||||
source_key: {
|
||||
type: Sequelize.UUID,
|
||||
allowNull: true,
|
||||
},
|
||||
createdAt: {
|
||||
type: Sequelize.DATE,
|
||||
allowNull: false,
|
||||
},
|
||||
updatedAt: {
|
||||
type: Sequelize.DATE,
|
||||
allowNull: false,
|
||||
},
|
||||
deletedAt: {
|
||||
type: Sequelize.DATE,
|
||||
allowNull: true,
|
||||
},
|
||||
createdById: {
|
||||
type: Sequelize.UUID,
|
||||
allowNull: true,
|
||||
references: {
|
||||
model: 'users',
|
||||
key: 'id',
|
||||
},
|
||||
onUpdate: 'CASCADE',
|
||||
onDelete: 'SET NULL',
|
||||
},
|
||||
updatedById: {
|
||||
type: Sequelize.UUID,
|
||||
allowNull: true,
|
||||
references: {
|
||||
model: 'users',
|
||||
key: 'id',
|
||||
},
|
||||
onUpdate: 'CASCADE',
|
||||
onDelete: 'SET NULL',
|
||||
},
|
||||
importHash: {
|
||||
type: Sequelize.STRING(255),
|
||||
allowNull: true,
|
||||
unique: true,
|
||||
},
|
||||
});
|
||||
},
|
||||
};
|
||||
@ -0,0 +1,150 @@
|
||||
'use strict';
|
||||
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
|
||||
/**
|
||||
* Add missing element type defaults (spot, logo, popup)
|
||||
* These were missing from the original DEFAULT_ROWS and need to be added to existing databases.
|
||||
* Also backfills project_element_defaults for existing projects.
|
||||
*/
|
||||
module.exports = {
|
||||
async up(queryInterface, Sequelize) {
|
||||
const now = new Date();
|
||||
|
||||
// Define the missing element types
|
||||
const missingTypes = [
|
||||
{
|
||||
id: uuidv4(),
|
||||
element_type: 'spot',
|
||||
name: 'Hotspot',
|
||||
sort_order: 9,
|
||||
settings_json: JSON.stringify({
|
||||
label: 'Hotspot',
|
||||
iconUrl: '',
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
}),
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
},
|
||||
{
|
||||
id: uuidv4(),
|
||||
element_type: 'logo',
|
||||
name: 'Logo',
|
||||
sort_order: 10,
|
||||
settings_json: JSON.stringify({
|
||||
label: 'Logo',
|
||||
iconUrl: '',
|
||||
backgroundImageUrl: '',
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
}),
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
},
|
||||
{
|
||||
id: uuidv4(),
|
||||
element_type: 'popup',
|
||||
name: 'Popup',
|
||||
sort_order: 11,
|
||||
settings_json: JSON.stringify({
|
||||
label: 'Popup',
|
||||
iconUrl: '',
|
||||
popupTitle: '',
|
||||
popupContent: '',
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
}),
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
},
|
||||
];
|
||||
|
||||
// Insert missing global defaults (skip if they already exist)
|
||||
for (const elementType of missingTypes) {
|
||||
const [existing] = await queryInterface.sequelize.query(
|
||||
`SELECT id FROM element_type_defaults WHERE element_type = :element_type AND "deletedAt" IS NULL`,
|
||||
{
|
||||
replacements: { element_type: elementType.element_type },
|
||||
type: Sequelize.QueryTypes.SELECT,
|
||||
},
|
||||
);
|
||||
|
||||
if (!existing) {
|
||||
await queryInterface.bulkInsert('element_type_defaults', [elementType]);
|
||||
console.log(`Added global default for: ${elementType.element_type}`);
|
||||
} else {
|
||||
console.log(
|
||||
`Global default already exists for: ${elementType.element_type}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Get all inserted/existing global defaults for the missing types
|
||||
const globalDefaults = await queryInterface.sequelize.query(
|
||||
`SELECT id, element_type, name, sort_order, settings_json
|
||||
FROM element_type_defaults
|
||||
WHERE element_type IN ('spot', 'logo', 'popup') AND "deletedAt" IS NULL`,
|
||||
{ type: Sequelize.QueryTypes.SELECT },
|
||||
);
|
||||
|
||||
// Get all projects
|
||||
const projects = await queryInterface.sequelize.query(
|
||||
`SELECT id FROM projects WHERE "deletedAt" IS NULL`,
|
||||
{ type: Sequelize.QueryTypes.SELECT },
|
||||
);
|
||||
|
||||
console.log(
|
||||
`Backfilling ${globalDefaults.length} element types to ${projects.length} projects...`,
|
||||
);
|
||||
|
||||
// Backfill project_element_defaults for each project
|
||||
for (const project of projects) {
|
||||
for (const globalDefault of globalDefaults) {
|
||||
// Check if project already has this element type
|
||||
const [existing] = await queryInterface.sequelize.query(
|
||||
`SELECT id FROM project_element_defaults
|
||||
WHERE "projectId" = :projectId AND element_type = :element_type AND "deletedAt" IS NULL`,
|
||||
{
|
||||
replacements: {
|
||||
projectId: project.id,
|
||||
element_type: globalDefault.element_type,
|
||||
},
|
||||
type: Sequelize.QueryTypes.SELECT,
|
||||
},
|
||||
);
|
||||
|
||||
if (!existing) {
|
||||
await queryInterface.bulkInsert('project_element_defaults', [
|
||||
{
|
||||
id: uuidv4(),
|
||||
projectId: project.id,
|
||||
element_type: globalDefault.element_type,
|
||||
name: globalDefault.name,
|
||||
sort_order: globalDefault.sort_order,
|
||||
settings_json: globalDefault.settings_json,
|
||||
source_element_id: globalDefault.id,
|
||||
snapshot_version: 1,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
},
|
||||
]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log('Backfill complete.');
|
||||
},
|
||||
|
||||
async down(queryInterface, _Sequelize) {
|
||||
// Remove the added element types from project_element_defaults
|
||||
await queryInterface.sequelize.query(
|
||||
`DELETE FROM project_element_defaults WHERE element_type IN ('spot', 'logo', 'popup')`,
|
||||
);
|
||||
|
||||
// Remove from element_type_defaults
|
||||
await queryInterface.sequelize.query(
|
||||
`DELETE FROM element_type_defaults WHERE element_type IN ('spot', 'logo', 'popup')`,
|
||||
);
|
||||
},
|
||||
};
|
||||
@ -0,0 +1,295 @@
|
||||
'use strict';
|
||||
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
|
||||
/**
|
||||
* Sync all 11 element type defaults with correct sort_order.
|
||||
* This migration ensures all element types exist in element_type_defaults
|
||||
* and backfills any missing project_element_defaults for existing projects.
|
||||
*/
|
||||
module.exports = {
|
||||
async up(queryInterface, Sequelize) {
|
||||
const now = new Date();
|
||||
|
||||
// Define all 11 element types with correct sort_order
|
||||
const DEFAULT_ELEMENT_TYPES = [
|
||||
{
|
||||
element_type: 'navigation_next',
|
||||
name: 'Navigation Forward Button',
|
||||
sort_order: 1,
|
||||
default_settings_json: {
|
||||
label: 'Navigation: Forward',
|
||||
navLabel: 'Forward',
|
||||
navType: 'forward',
|
||||
navDisabled: false,
|
||||
transitionReverseMode: 'auto_reverse',
|
||||
transitionDurationSec: 0.7,
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
element_type: 'navigation_prev',
|
||||
name: 'Navigation Back Button',
|
||||
sort_order: 2,
|
||||
default_settings_json: {
|
||||
label: 'Navigation: Back',
|
||||
navLabel: 'Back',
|
||||
navType: 'back',
|
||||
navDisabled: false,
|
||||
transitionReverseMode: 'auto_reverse',
|
||||
transitionDurationSec: 0.7,
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
element_type: 'tooltip',
|
||||
name: 'Tooltip',
|
||||
sort_order: 3,
|
||||
default_settings_json: {
|
||||
label: 'Tooltip',
|
||||
tooltipTitle: 'Tooltip title',
|
||||
tooltipText: 'Tooltip text',
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
element_type: 'description',
|
||||
name: 'Description',
|
||||
sort_order: 4,
|
||||
default_settings_json: {
|
||||
label: 'Description',
|
||||
descriptionTitle: 'TITLE',
|
||||
descriptionText: '',
|
||||
descriptionTitleFontSize: '48px',
|
||||
descriptionTextFontSize: '36px',
|
||||
descriptionTitleFontFamily: 'inherit',
|
||||
descriptionTextFontFamily: 'inherit',
|
||||
descriptionTitleColor: '#000000',
|
||||
descriptionTextColor: '#4B5563',
|
||||
descriptionBackgroundColor: 'transparent',
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
element_type: 'gallery',
|
||||
name: 'Gallery',
|
||||
sort_order: 5,
|
||||
default_settings_json: {
|
||||
label: 'Gallery',
|
||||
galleryCards: [{ imageUrl: '', title: 'Card 1', description: '' }],
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
element_type: 'carousel',
|
||||
name: 'Carousel',
|
||||
sort_order: 6,
|
||||
default_settings_json: {
|
||||
label: 'Carousel',
|
||||
carouselSlides: [{ imageUrl: '', caption: 'Slide 1' }],
|
||||
carouselPrevIconUrl: '',
|
||||
carouselNextIconUrl: '',
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
element_type: 'video_player',
|
||||
name: 'Video Player',
|
||||
sort_order: 7,
|
||||
default_settings_json: {
|
||||
label: 'Video Player',
|
||||
mediaUrl: '',
|
||||
mediaAutoplay: true,
|
||||
mediaLoop: true,
|
||||
mediaMuted: true,
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
element_type: 'audio_player',
|
||||
name: 'Audio Player',
|
||||
sort_order: 8,
|
||||
default_settings_json: {
|
||||
label: 'Audio Player',
|
||||
mediaUrl: '',
|
||||
mediaAutoplay: true,
|
||||
mediaLoop: true,
|
||||
mediaMuted: false,
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
element_type: 'spot',
|
||||
name: 'Hotspot',
|
||||
sort_order: 9,
|
||||
default_settings_json: {
|
||||
label: 'Hotspot',
|
||||
iconUrl: '',
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
element_type: 'logo',
|
||||
name: 'Logo',
|
||||
sort_order: 10,
|
||||
default_settings_json: {
|
||||
label: 'Logo',
|
||||
iconUrl: '',
|
||||
backgroundImageUrl: '',
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
element_type: 'popup',
|
||||
name: 'Popup',
|
||||
sort_order: 11,
|
||||
default_settings_json: {
|
||||
label: 'Popup',
|
||||
iconUrl: '',
|
||||
popupTitle: '',
|
||||
popupContent: '',
|
||||
appearDelaySec: 0,
|
||||
appearDurationSec: null,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
console.log('Syncing all 11 element type defaults...');
|
||||
|
||||
// Track inserted/updated global defaults for backfill
|
||||
const globalDefaultIds = new Map();
|
||||
|
||||
// For each element type: insert if not exists, update sort_order if wrong
|
||||
for (const elementType of DEFAULT_ELEMENT_TYPES) {
|
||||
const [existing] = await queryInterface.sequelize.query(
|
||||
`SELECT id, sort_order FROM element_type_defaults
|
||||
WHERE element_type = :element_type AND "deletedAt" IS NULL`,
|
||||
{
|
||||
replacements: { element_type: elementType.element_type },
|
||||
type: Sequelize.QueryTypes.SELECT,
|
||||
},
|
||||
);
|
||||
|
||||
if (!existing) {
|
||||
// Insert new element type
|
||||
const newId = uuidv4();
|
||||
await queryInterface.bulkInsert('element_type_defaults', [
|
||||
{
|
||||
id: newId,
|
||||
element_type: elementType.element_type,
|
||||
name: elementType.name,
|
||||
sort_order: elementType.sort_order,
|
||||
settings_json: JSON.stringify(elementType.default_settings_json),
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
},
|
||||
]);
|
||||
globalDefaultIds.set(elementType.element_type, newId);
|
||||
console.log(
|
||||
`Inserted: ${elementType.element_type} (sort_order: ${elementType.sort_order})`,
|
||||
);
|
||||
} else {
|
||||
globalDefaultIds.set(elementType.element_type, existing.id);
|
||||
// Update sort_order if different
|
||||
if (existing.sort_order !== elementType.sort_order) {
|
||||
await queryInterface.sequelize.query(
|
||||
`UPDATE element_type_defaults
|
||||
SET sort_order = :sort_order, "updatedAt" = :now
|
||||
WHERE id = :id`,
|
||||
{
|
||||
replacements: {
|
||||
sort_order: elementType.sort_order,
|
||||
now,
|
||||
id: existing.id,
|
||||
},
|
||||
},
|
||||
);
|
||||
console.log(
|
||||
`Updated sort_order for ${elementType.element_type}: ${existing.sort_order} -> ${elementType.sort_order}`,
|
||||
);
|
||||
} else {
|
||||
console.log(
|
||||
`Already exists: ${elementType.element_type} (sort_order: ${elementType.sort_order})`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get all projects
|
||||
const projects = await queryInterface.sequelize.query(
|
||||
`SELECT id FROM projects WHERE "deletedAt" IS NULL`,
|
||||
{ type: Sequelize.QueryTypes.SELECT },
|
||||
);
|
||||
|
||||
console.log(
|
||||
`Backfilling missing project_element_defaults for ${projects.length} projects...`,
|
||||
);
|
||||
|
||||
// Get all global defaults for backfill
|
||||
const globalDefaults = await queryInterface.sequelize.query(
|
||||
`SELECT id, element_type, name, sort_order, settings_json
|
||||
FROM element_type_defaults
|
||||
WHERE "deletedAt" IS NULL`,
|
||||
{ type: Sequelize.QueryTypes.SELECT },
|
||||
);
|
||||
|
||||
let backfillCount = 0;
|
||||
|
||||
// Backfill project_element_defaults for each project
|
||||
for (const project of projects) {
|
||||
for (const globalDefault of globalDefaults) {
|
||||
// Check if project already has this element type
|
||||
const [existing] = await queryInterface.sequelize.query(
|
||||
`SELECT id FROM project_element_defaults
|
||||
WHERE "projectId" = :projectId AND element_type = :element_type AND "deletedAt" IS NULL`,
|
||||
{
|
||||
replacements: {
|
||||
projectId: project.id,
|
||||
element_type: globalDefault.element_type,
|
||||
},
|
||||
type: Sequelize.QueryTypes.SELECT,
|
||||
},
|
||||
);
|
||||
|
||||
if (!existing) {
|
||||
await queryInterface.bulkInsert('project_element_defaults', [
|
||||
{
|
||||
id: uuidv4(),
|
||||
projectId: project.id,
|
||||
element_type: globalDefault.element_type,
|
||||
name: globalDefault.name,
|
||||
sort_order: globalDefault.sort_order,
|
||||
settings_json: globalDefault.settings_json,
|
||||
source_element_id: globalDefault.id,
|
||||
snapshot_version: 1,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
},
|
||||
]);
|
||||
backfillCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Backfilled ${backfillCount} project element defaults.`);
|
||||
console.log('Sync complete.');
|
||||
},
|
||||
|
||||
async down(_queryInterface, _Sequelize) {
|
||||
// This migration is safe - it only adds missing data
|
||||
// No destructive down migration needed
|
||||
console.log(
|
||||
'No down migration needed - this migration only adds missing data.',
|
||||
);
|
||||
},
|
||||
};
|
||||
@ -0,0 +1,25 @@
|
||||
'use strict';
|
||||
|
||||
/** @type {import('sequelize-cli').Migration} */
|
||||
module.exports = {
|
||||
async up(queryInterface, _Sequelize) {
|
||||
await queryInterface.removeColumn('projects', 'theme_config_json');
|
||||
await queryInterface.removeColumn('projects', 'custom_css_json');
|
||||
await queryInterface.removeColumn('projects', 'cdn_base_url');
|
||||
},
|
||||
|
||||
async down(queryInterface, Sequelize) {
|
||||
await queryInterface.addColumn('projects', 'theme_config_json', {
|
||||
type: Sequelize.JSON,
|
||||
allowNull: true,
|
||||
});
|
||||
await queryInterface.addColumn('projects', 'custom_css_json', {
|
||||
type: Sequelize.JSON,
|
||||
allowNull: true,
|
||||
});
|
||||
await queryInterface.addColumn('projects', 'cdn_base_url', {
|
||||
type: Sequelize.TEXT,
|
||||
allowNull: true,
|
||||
});
|
||||
},
|
||||
};
|
||||
@ -0,0 +1,67 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Remove duplicate element_type_defaults rows.
|
||||
* Keeps the oldest entry (by createdAt) for each element_type.
|
||||
* This fixes the unique constraint violation during project creation.
|
||||
*/
|
||||
module.exports = {
|
||||
async up(queryInterface, Sequelize) {
|
||||
// Find duplicate element_types
|
||||
const duplicates = await queryInterface.sequelize.query(
|
||||
`SELECT element_type, COUNT(*) as count
|
||||
FROM element_type_defaults
|
||||
WHERE "deletedAt" IS NULL
|
||||
GROUP BY element_type
|
||||
HAVING COUNT(*) > 1`,
|
||||
{ type: Sequelize.QueryTypes.SELECT },
|
||||
);
|
||||
|
||||
if (duplicates.length === 0) {
|
||||
console.log('No duplicate element_type_defaults found.');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(
|
||||
`Found ${duplicates.length} element_types with duplicates:`,
|
||||
duplicates.map((d) => d.element_type).join(', '),
|
||||
);
|
||||
|
||||
// For each duplicate element_type, keep oldest and delete others
|
||||
for (const dup of duplicates) {
|
||||
// Get all rows for this element_type, ordered by createdAt
|
||||
const rows = await queryInterface.sequelize.query(
|
||||
`SELECT id, "createdAt"
|
||||
FROM element_type_defaults
|
||||
WHERE element_type = :element_type AND "deletedAt" IS NULL
|
||||
ORDER BY "createdAt" ASC`,
|
||||
{
|
||||
replacements: { element_type: dup.element_type },
|
||||
type: Sequelize.QueryTypes.SELECT,
|
||||
},
|
||||
);
|
||||
|
||||
// Keep the first (oldest), delete the rest
|
||||
const idsToDelete = rows.slice(1).map((r) => r.id);
|
||||
|
||||
if (idsToDelete.length > 0) {
|
||||
await queryInterface.sequelize.query(
|
||||
`DELETE FROM element_type_defaults WHERE id IN (:ids)`,
|
||||
{ replacements: { ids: idsToDelete } },
|
||||
);
|
||||
console.log(
|
||||
`Deleted ${idsToDelete.length} duplicate(s) for element_type: ${dup.element_type}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('Duplicate removal complete.');
|
||||
},
|
||||
|
||||
async down(_queryInterface, _Sequelize) {
|
||||
// Cannot restore deleted duplicates
|
||||
console.log(
|
||||
'Down migration not applicable - duplicates cannot be restored.',
|
||||
);
|
||||
},
|
||||
};
|
||||
@ -0,0 +1,78 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Remove invalid element_type_defaults entries.
|
||||
* Only valid element types defined in DEFAULT_ROWS should exist.
|
||||
*/
|
||||
module.exports = {
|
||||
async up(queryInterface, Sequelize) {
|
||||
// Valid element types as defined in element_type_defaults.js DEFAULT_ROWS
|
||||
const validTypes = [
|
||||
'navigation_next',
|
||||
'navigation_prev',
|
||||
'tooltip',
|
||||
'description',
|
||||
'gallery',
|
||||
'carousel',
|
||||
'video_player',
|
||||
'audio_player',
|
||||
'spot',
|
||||
'logo',
|
||||
'popup',
|
||||
];
|
||||
|
||||
// Find invalid entries
|
||||
const invalidEntries = await queryInterface.sequelize.query(
|
||||
`SELECT id, element_type, name
|
||||
FROM element_type_defaults
|
||||
WHERE element_type NOT IN (:validTypes)
|
||||
AND "deletedAt" IS NULL`,
|
||||
{
|
||||
replacements: { validTypes },
|
||||
type: Sequelize.QueryTypes.SELECT,
|
||||
},
|
||||
);
|
||||
|
||||
if (invalidEntries.length === 0) {
|
||||
console.log('No invalid element_type_defaults found.');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(
|
||||
`Found ${invalidEntries.length} invalid element_type_defaults:`,
|
||||
);
|
||||
invalidEntries.forEach((entry) => {
|
||||
console.log(` - ${entry.name} (${entry.element_type})`);
|
||||
});
|
||||
|
||||
// Delete invalid entries
|
||||
const idsToDelete = invalidEntries.map((e) => e.id);
|
||||
await queryInterface.sequelize.query(
|
||||
`DELETE FROM element_type_defaults WHERE id IN (:ids)`,
|
||||
{ replacements: { ids: idsToDelete } },
|
||||
);
|
||||
|
||||
// Also delete from project_element_defaults
|
||||
const deletedProjectDefaults = await queryInterface.sequelize.query(
|
||||
`DELETE FROM project_element_defaults
|
||||
WHERE element_type NOT IN (:validTypes)
|
||||
RETURNING id, element_type`,
|
||||
{
|
||||
replacements: { validTypes },
|
||||
type: Sequelize.QueryTypes.SELECT,
|
||||
},
|
||||
);
|
||||
|
||||
console.log(`Deleted ${idsToDelete.length} invalid element_type_defaults.`);
|
||||
console.log(
|
||||
`Deleted ${deletedProjectDefaults.length} invalid project_element_defaults.`,
|
||||
);
|
||||
},
|
||||
|
||||
async down(_queryInterface, _Sequelize) {
|
||||
// Cannot restore deleted invalid entries
|
||||
console.log(
|
||||
'Down migration not applicable - invalid entries cannot be restored.',
|
||||
);
|
||||
},
|
||||
};
|
||||
@ -0,0 +1,53 @@
|
||||
'use strict';
|
||||
|
||||
/** @type {import('sequelize-cli').Migration} */
|
||||
module.exports = {
|
||||
async up(queryInterface, Sequelize) {
|
||||
await queryInterface.addColumn('tour_pages', 'background_video_autoplay', {
|
||||
type: Sequelize.BOOLEAN,
|
||||
allowNull: false,
|
||||
defaultValue: true,
|
||||
});
|
||||
await queryInterface.addColumn('tour_pages', 'background_video_loop', {
|
||||
type: Sequelize.BOOLEAN,
|
||||
allowNull: false,
|
||||
defaultValue: true,
|
||||
});
|
||||
await queryInterface.addColumn('tour_pages', 'background_video_muted', {
|
||||
type: Sequelize.BOOLEAN,
|
||||
allowNull: false,
|
||||
defaultValue: true,
|
||||
});
|
||||
await queryInterface.addColumn(
|
||||
'tour_pages',
|
||||
'background_video_start_time',
|
||||
{
|
||||
type: Sequelize.DECIMAL(10, 1),
|
||||
allowNull: true,
|
||||
defaultValue: null,
|
||||
},
|
||||
);
|
||||
await queryInterface.addColumn('tour_pages', 'background_video_end_time', {
|
||||
type: Sequelize.DECIMAL(10, 1),
|
||||
allowNull: true,
|
||||
defaultValue: null,
|
||||
});
|
||||
},
|
||||
|
||||
async down(queryInterface, _Sequelize) {
|
||||
await queryInterface.removeColumn(
|
||||
'tour_pages',
|
||||
'background_video_autoplay',
|
||||
);
|
||||
await queryInterface.removeColumn('tour_pages', 'background_video_loop');
|
||||
await queryInterface.removeColumn('tour_pages', 'background_video_muted');
|
||||
await queryInterface.removeColumn(
|
||||
'tour_pages',
|
||||
'background_video_start_time',
|
||||
);
|
||||
await queryInterface.removeColumn(
|
||||
'tour_pages',
|
||||
'background_video_end_time',
|
||||
);
|
||||
},
|
||||
};
|
||||
@ -0,0 +1,29 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Migration: Add design canvas dimensions to projects
|
||||
*
|
||||
* Adds design_width and design_height columns to support
|
||||
* responsive canvas scaling with project-specific aspect ratios.
|
||||
*
|
||||
* @type {import('sequelize-cli').Migration}
|
||||
*/
|
||||
module.exports = {
|
||||
async up(queryInterface, Sequelize) {
|
||||
await queryInterface.addColumn('projects', 'design_width', {
|
||||
type: Sequelize.INTEGER,
|
||||
allowNull: true,
|
||||
defaultValue: 1920,
|
||||
});
|
||||
await queryInterface.addColumn('projects', 'design_height', {
|
||||
type: Sequelize.INTEGER,
|
||||
allowNull: true,
|
||||
defaultValue: 1080,
|
||||
});
|
||||
},
|
||||
|
||||
async down(queryInterface, _Sequelize) {
|
||||
await queryInterface.removeColumn('projects', 'design_width');
|
||||
await queryInterface.removeColumn('projects', 'design_height');
|
||||
},
|
||||
};
|
||||
@ -0,0 +1,30 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Migration: Add design_width and design_height to tour_pages
|
||||
*
|
||||
* These fields store the canvas dimensions for presentations.
|
||||
* They are copied from the project's design dimensions when pages are saved/published.
|
||||
* This ensures presentations use the dimensions that were active at save time,
|
||||
* not the current project dimensions (safe migration pattern).
|
||||
*/
|
||||
module.exports = {
|
||||
async up(queryInterface, Sequelize) {
|
||||
await queryInterface.addColumn('tour_pages', 'design_width', {
|
||||
type: Sequelize.INTEGER,
|
||||
allowNull: true,
|
||||
defaultValue: null,
|
||||
});
|
||||
|
||||
await queryInterface.addColumn('tour_pages', 'design_height', {
|
||||
type: Sequelize.INTEGER,
|
||||
allowNull: true,
|
||||
defaultValue: null,
|
||||
});
|
||||
},
|
||||
|
||||
async down(queryInterface) {
|
||||
await queryInterface.removeColumn('tour_pages', 'design_width');
|
||||
await queryInterface.removeColumn('tour_pages', 'design_height');
|
||||
},
|
||||
};
|
||||
@ -0,0 +1,33 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Migration: Add 'reversed' variant type to asset_variants
|
||||
*
|
||||
* This enables storing pre-reversed videos for back navigation transitions.
|
||||
* Also adds storage_key column to track the S3/local storage path.
|
||||
*/
|
||||
|
||||
/** @type {import('sequelize-cli').Migration} */
|
||||
module.exports = {
|
||||
async up(queryInterface, Sequelize) {
|
||||
// Add 'reversed' to the enum_asset_variants_variant_type enum
|
||||
await queryInterface.sequelize.query(`
|
||||
ALTER TYPE "enum_asset_variants_variant_type"
|
||||
ADD VALUE IF NOT EXISTS 'reversed';
|
||||
`);
|
||||
|
||||
// Add storage_key column if it doesn't exist
|
||||
const tableInfo = await queryInterface.describeTable('asset_variants');
|
||||
if (!tableInfo.storage_key) {
|
||||
await queryInterface.addColumn('asset_variants', 'storage_key', {
|
||||
type: Sequelize.TEXT,
|
||||
allowNull: true,
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
async down() {
|
||||
// PostgreSQL doesn't support removing enum values
|
||||
// storage_key column is safe to leave (no data loss)
|
||||
},
|
||||
};
|
||||
@ -0,0 +1,103 @@
|
||||
'use strict';
|
||||
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
|
||||
/**
|
||||
* Migration: Add hierarchical transition settings
|
||||
*
|
||||
* Creates global_transition_defaults table for platform-wide transition settings
|
||||
* and adds transition_settings JSONB column to projects table for project-level overrides.
|
||||
*
|
||||
* Cascade: Element → Project → Global (fallback)
|
||||
*
|
||||
* @type {import('sequelize-cli').Migration}
|
||||
*/
|
||||
module.exports = {
|
||||
async up(queryInterface, Sequelize) {
|
||||
// Create global_transition_defaults table (single-row pattern)
|
||||
await queryInterface.createTable('global_transition_defaults', {
|
||||
id: {
|
||||
type: Sequelize.UUID,
|
||||
defaultValue: Sequelize.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
transition_type: {
|
||||
type: Sequelize.TEXT,
|
||||
allowNull: false,
|
||||
defaultValue: 'fade',
|
||||
},
|
||||
duration_ms: {
|
||||
type: Sequelize.INTEGER,
|
||||
allowNull: false,
|
||||
defaultValue: 700,
|
||||
},
|
||||
easing: {
|
||||
type: Sequelize.TEXT,
|
||||
allowNull: false,
|
||||
defaultValue: 'ease-in-out',
|
||||
},
|
||||
createdAt: {
|
||||
type: Sequelize.DATE,
|
||||
allowNull: false,
|
||||
defaultValue: Sequelize.literal('CURRENT_TIMESTAMP'),
|
||||
},
|
||||
updatedAt: {
|
||||
type: Sequelize.DATE,
|
||||
allowNull: false,
|
||||
defaultValue: Sequelize.literal('CURRENT_TIMESTAMP'),
|
||||
},
|
||||
deletedAt: {
|
||||
type: Sequelize.DATE,
|
||||
allowNull: true,
|
||||
},
|
||||
createdById: {
|
||||
type: Sequelize.UUID,
|
||||
allowNull: true,
|
||||
references: {
|
||||
model: 'users',
|
||||
key: 'id',
|
||||
},
|
||||
onUpdate: 'CASCADE',
|
||||
onDelete: 'SET NULL',
|
||||
},
|
||||
updatedById: {
|
||||
type: Sequelize.UUID,
|
||||
allowNull: true,
|
||||
references: {
|
||||
model: 'users',
|
||||
key: 'id',
|
||||
},
|
||||
onUpdate: 'CASCADE',
|
||||
onDelete: 'SET NULL',
|
||||
},
|
||||
});
|
||||
|
||||
// Seed the default row
|
||||
const now = new Date();
|
||||
await queryInterface.bulkInsert('global_transition_defaults', [
|
||||
{
|
||||
id: uuidv4(),
|
||||
transition_type: 'fade',
|
||||
duration_ms: 700,
|
||||
easing: 'ease-in-out',
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
},
|
||||
]);
|
||||
|
||||
// Add transition_settings JSONB column to projects
|
||||
await queryInterface.addColumn('projects', 'transition_settings', {
|
||||
type: Sequelize.JSONB,
|
||||
allowNull: true,
|
||||
defaultValue: null,
|
||||
});
|
||||
},
|
||||
|
||||
async down(queryInterface, _Sequelize) {
|
||||
// Remove transition_settings from projects
|
||||
await queryInterface.removeColumn('projects', 'transition_settings');
|
||||
|
||||
// Drop global_transition_defaults table
|
||||
await queryInterface.dropTable('global_transition_defaults');
|
||||
},
|
||||
};
|
||||
@ -0,0 +1,76 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Migration: Simplify transitions and add overlay color
|
||||
*
|
||||
* 1. Add overlay_color column to global_transition_defaults
|
||||
* 2. Update global_transition_defaults: change slide-left/slide-right/zoom to 'fade'
|
||||
* 3. Update projects.transition_settings JSONB where transitionType is slide/zoom
|
||||
*/
|
||||
module.exports = {
|
||||
async up(queryInterface, Sequelize) {
|
||||
const transaction = await queryInterface.sequelize.transaction();
|
||||
|
||||
try {
|
||||
// 1. Add overlay_color column to global_transition_defaults
|
||||
await queryInterface.addColumn(
|
||||
'global_transition_defaults',
|
||||
'overlay_color',
|
||||
{
|
||||
type: Sequelize.TEXT,
|
||||
allowNull: false,
|
||||
defaultValue: '#000000',
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
// 2. Update global_transition_defaults: change slide-left/slide-right/zoom to 'fade'
|
||||
await queryInterface.sequelize.query(
|
||||
`UPDATE global_transition_defaults
|
||||
SET transition_type = 'fade'
|
||||
WHERE transition_type IN ('slide-left', 'slide-right', 'zoom')`,
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
// 3. Update projects.transition_settings JSONB where transitionType is slide/zoom
|
||||
// Convert slide-left, slide-right, zoom to 'fade'
|
||||
await queryInterface.sequelize.query(
|
||||
`UPDATE projects
|
||||
SET transition_settings = jsonb_set(
|
||||
COALESCE(transition_settings, '{}'::jsonb),
|
||||
'{transitionType}',
|
||||
'"fade"'
|
||||
)
|
||||
WHERE transition_settings IS NOT NULL
|
||||
AND transition_settings->>'transitionType' IN ('slide-left', 'slide-right', 'zoom')`,
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await transaction.commit();
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
async down(queryInterface, _Sequelize) {
|
||||
const transaction = await queryInterface.sequelize.transaction();
|
||||
|
||||
try {
|
||||
// Remove overlay_color column
|
||||
await queryInterface.removeColumn(
|
||||
'global_transition_defaults',
|
||||
'overlay_color',
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
// Note: We cannot restore the original slide/zoom values as they are lost
|
||||
// The data migration is one-way
|
||||
|
||||
await transaction.commit();
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
};
|
||||
@ -0,0 +1,231 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Migration: Create project_transition_settings table
|
||||
*
|
||||
* Creates environment-aware project transition settings following the
|
||||
* project_audio_tracks pattern. This allows transition settings to be
|
||||
* isolated per environment and participate in the publishing workflow.
|
||||
*
|
||||
* Data migration:
|
||||
* - Existing projects.transition_settings values are copied to 'dev' environment records
|
||||
* - The column is dropped after migration to avoid dual storage
|
||||
*/
|
||||
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
|
||||
module.exports = {
|
||||
async up(queryInterface, Sequelize) {
|
||||
// Step 1: Create the project_transition_settings table
|
||||
await queryInterface.createTable('project_transition_settings', {
|
||||
id: {
|
||||
type: Sequelize.UUID,
|
||||
defaultValue: Sequelize.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
projectId: {
|
||||
type: Sequelize.UUID,
|
||||
allowNull: false,
|
||||
references: {
|
||||
model: 'projects',
|
||||
key: 'id',
|
||||
},
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
},
|
||||
environment: {
|
||||
type: Sequelize.ENUM('dev', 'stage', 'production'),
|
||||
allowNull: false,
|
||||
},
|
||||
source_key: {
|
||||
type: Sequelize.TEXT,
|
||||
allowNull: true,
|
||||
},
|
||||
transition_type: {
|
||||
type: Sequelize.TEXT,
|
||||
allowNull: false,
|
||||
defaultValue: 'fade',
|
||||
},
|
||||
duration_ms: {
|
||||
type: Sequelize.INTEGER,
|
||||
allowNull: false,
|
||||
defaultValue: 700,
|
||||
},
|
||||
easing: {
|
||||
type: Sequelize.TEXT,
|
||||
allowNull: false,
|
||||
defaultValue: 'ease-in-out',
|
||||
},
|
||||
overlay_color: {
|
||||
type: Sequelize.TEXT,
|
||||
allowNull: false,
|
||||
defaultValue: '#000000',
|
||||
},
|
||||
createdById: {
|
||||
type: Sequelize.UUID,
|
||||
references: {
|
||||
model: 'users',
|
||||
key: 'id',
|
||||
},
|
||||
allowNull: true,
|
||||
},
|
||||
updatedById: {
|
||||
type: Sequelize.UUID,
|
||||
references: {
|
||||
model: 'users',
|
||||
key: 'id',
|
||||
},
|
||||
allowNull: true,
|
||||
},
|
||||
createdAt: {
|
||||
allowNull: false,
|
||||
type: Sequelize.DATE,
|
||||
},
|
||||
updatedAt: {
|
||||
allowNull: false,
|
||||
type: Sequelize.DATE,
|
||||
},
|
||||
deletedAt: {
|
||||
type: Sequelize.DATE,
|
||||
allowNull: true,
|
||||
},
|
||||
importHash: {
|
||||
type: Sequelize.STRING(255),
|
||||
allowNull: true,
|
||||
unique: true,
|
||||
},
|
||||
});
|
||||
|
||||
// Add unique constraint on (projectId, environment)
|
||||
// Use IF NOT EXISTS to avoid errors if index already exists
|
||||
await queryInterface.sequelize.query(`
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS project_transition_settings_project_env_unique
|
||||
ON project_transition_settings ("projectId", environment)
|
||||
WHERE "deletedAt" IS NULL
|
||||
`);
|
||||
|
||||
// Add index on deletedAt for soft delete queries
|
||||
await queryInterface.sequelize.query(`
|
||||
CREATE INDEX IF NOT EXISTS project_transition_settings_deleted_at
|
||||
ON project_transition_settings ("deletedAt")
|
||||
`);
|
||||
|
||||
// Step 2: Migrate existing project.transition_settings data to 'dev' records
|
||||
const [projects] = await queryInterface.sequelize.query(`
|
||||
SELECT id, transition_settings, "createdById", "updatedById"
|
||||
FROM projects
|
||||
WHERE transition_settings IS NOT NULL
|
||||
AND transition_settings != 'null'
|
||||
AND "deletedAt" IS NULL
|
||||
`);
|
||||
|
||||
const now = new Date();
|
||||
const records = [];
|
||||
|
||||
for (const project of projects) {
|
||||
let settings = project.transition_settings;
|
||||
|
||||
// Parse JSONB if it's a string
|
||||
if (typeof settings === 'string') {
|
||||
try {
|
||||
settings = JSON.parse(settings);
|
||||
} catch (e) {
|
||||
console.warn(
|
||||
`Failed to parse transition_settings for project ${project.id}:`,
|
||||
e,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Skip if settings is null, empty object, or has no actual values
|
||||
if (
|
||||
!settings ||
|
||||
typeof settings !== 'object' ||
|
||||
Object.keys(settings).length === 0
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
records.push({
|
||||
id: uuidv4(),
|
||||
projectId: project.id,
|
||||
environment: 'dev',
|
||||
source_key: null,
|
||||
transition_type: settings.transitionType || 'fade',
|
||||
duration_ms: settings.durationMs || 700,
|
||||
easing: settings.easing || 'ease-in-out',
|
||||
overlay_color: settings.overlayColor || '#000000',
|
||||
createdById: project.createdById || null,
|
||||
updatedById: project.updatedById || null,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
deletedAt: null,
|
||||
importHash: null,
|
||||
});
|
||||
}
|
||||
|
||||
if (records.length > 0) {
|
||||
await queryInterface.bulkInsert('project_transition_settings', records);
|
||||
console.log(
|
||||
`Migrated ${records.length} project transition settings to 'dev' environment`,
|
||||
);
|
||||
}
|
||||
|
||||
// Step 3: Drop the transition_settings column from projects table
|
||||
await queryInterface.removeColumn('projects', 'transition_settings');
|
||||
console.log('Dropped transition_settings column from projects table');
|
||||
},
|
||||
|
||||
async down(queryInterface, Sequelize) {
|
||||
// Step 1: Re-add the transition_settings column to projects
|
||||
await queryInterface.addColumn('projects', 'transition_settings', {
|
||||
type: Sequelize.JSONB,
|
||||
allowNull: true,
|
||||
defaultValue: null,
|
||||
});
|
||||
|
||||
// Step 2: Migrate 'dev' records back to projects.transition_settings
|
||||
const [settings] = await queryInterface.sequelize.query(`
|
||||
SELECT "projectId", transition_type, duration_ms, easing, overlay_color
|
||||
FROM project_transition_settings
|
||||
WHERE environment = 'dev'
|
||||
AND "deletedAt" IS NULL
|
||||
`);
|
||||
|
||||
for (const setting of settings) {
|
||||
const jsonValue = JSON.stringify({
|
||||
transitionType: setting.transition_type,
|
||||
durationMs: setting.duration_ms,
|
||||
easing: setting.easing,
|
||||
overlayColor: setting.overlay_color,
|
||||
});
|
||||
|
||||
await queryInterface.sequelize.query(
|
||||
`
|
||||
UPDATE projects
|
||||
SET transition_settings = :settings::jsonb
|
||||
WHERE id = :projectId
|
||||
`,
|
||||
{
|
||||
replacements: {
|
||||
settings: jsonValue,
|
||||
projectId: setting.projectId,
|
||||
},
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
// Step 3: Drop indexes and table
|
||||
await queryInterface.sequelize.query(`
|
||||
DROP INDEX IF EXISTS project_transition_settings_project_env_unique;
|
||||
DROP INDEX IF EXISTS project_transition_settings_deleted_at;
|
||||
`);
|
||||
await queryInterface.dropTable('project_transition_settings');
|
||||
|
||||
// Drop the ENUM type
|
||||
await queryInterface.sequelize.query(
|
||||
'DROP TYPE IF EXISTS "enum_project_transition_settings_environment";',
|
||||
);
|
||||
},
|
||||
};
|
||||
@ -1,4 +1,4 @@
|
||||
module.exports = function(sequelize, DataTypes) {
|
||||
module.exports = function (sequelize, DataTypes) {
|
||||
const access_logs = sequelize.define(
|
||||
'access_logs',
|
||||
{
|
||||
@ -8,50 +8,44 @@ module.exports = function(sequelize, DataTypes) {
|
||||
primaryKey: true,
|
||||
},
|
||||
|
||||
environment: {
|
||||
environment: {
|
||||
type: DataTypes.ENUM,
|
||||
allowNull: false,
|
||||
|
||||
values: [
|
||||
|
||||
"admin",
|
||||
|
||||
|
||||
"stage",
|
||||
|
||||
|
||||
"production"
|
||||
|
||||
],
|
||||
|
||||
values: ['admin', 'stage', 'production'],
|
||||
},
|
||||
|
||||
path: {
|
||||
path: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
validate: {
|
||||
len: { args: [0, 2048], msg: 'Path must be at most 2048 characters' },
|
||||
},
|
||||
},
|
||||
|
||||
ip_address: {
|
||||
ip_address: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
validate: {
|
||||
len: {
|
||||
args: [0, 45],
|
||||
msg: 'IP address must be at most 45 characters',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
user_agent: {
|
||||
user_agent: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
validate: {
|
||||
len: {
|
||||
args: [0, 1024],
|
||||
msg: 'User agent must be at most 1024 characters',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
accessed_at: {
|
||||
accessed_at: {
|
||||
type: DataTypes.DATE,
|
||||
allowNull: false,
|
||||
defaultValue: DataTypes.NOW,
|
||||
|
||||
},
|
||||
|
||||
importHash: {
|
||||
@ -74,38 +68,18 @@ accessed_at: {
|
||||
);
|
||||
|
||||
access_logs.associate = (db) => {
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
//end loop
|
||||
|
||||
|
||||
//end loop
|
||||
|
||||
db.access_logs.belongsTo(db.projects, {
|
||||
as: 'project',
|
||||
foreignKey: {
|
||||
name: 'projectId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
db.access_logs.belongsTo(db.users, {
|
||||
@ -113,12 +87,11 @@ accessed_at: {
|
||||
foreignKey: {
|
||||
name: 'userId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
db.access_logs.belongsTo(db.users, {
|
||||
as: 'createdBy',
|
||||
});
|
||||
@ -128,8 +101,5 @@ accessed_at: {
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
|
||||
return access_logs;
|
||||
};
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
module.exports = function(sequelize, DataTypes) {
|
||||
module.exports = function (sequelize, DataTypes) {
|
||||
const asset_variants = sequelize.define(
|
||||
'asset_variants',
|
||||
{
|
||||
@ -8,60 +8,65 @@ module.exports = function(sequelize, DataTypes) {
|
||||
primaryKey: true,
|
||||
},
|
||||
|
||||
variant_type: {
|
||||
variant_type: {
|
||||
type: DataTypes.ENUM,
|
||||
|
||||
|
||||
|
||||
values: [
|
||||
'thumbnail',
|
||||
|
||||
"thumbnail",
|
||||
'preview',
|
||||
|
||||
'webp',
|
||||
|
||||
"preview",
|
||||
'mp4_low',
|
||||
|
||||
'mp4_high',
|
||||
|
||||
"webp",
|
||||
|
||||
|
||||
"mp4_low",
|
||||
|
||||
|
||||
"mp4_high",
|
||||
|
||||
|
||||
"original"
|
||||
'original',
|
||||
|
||||
'reversed',
|
||||
],
|
||||
|
||||
},
|
||||
|
||||
cdn_url: {
|
||||
storage_key: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
allowNull: true,
|
||||
},
|
||||
|
||||
width_px: {
|
||||
cdn_url: {
|
||||
type: DataTypes.TEXT,
|
||||
validate: {
|
||||
len: {
|
||||
args: [0, 2048],
|
||||
msg: 'CDN URL must be at most 2048 characters',
|
||||
},
|
||||
isUrlOrEmpty(value) {
|
||||
if (value && value.length > 0 && !/^https?:\/\/.+/.test(value)) {
|
||||
throw new Error('CDN URL must be a valid URL');
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
width_px: {
|
||||
type: DataTypes.INTEGER,
|
||||
|
||||
|
||||
|
||||
validate: {
|
||||
min: { args: [0], msg: 'Width must be a non-negative integer' },
|
||||
},
|
||||
},
|
||||
|
||||
height_px: {
|
||||
height_px: {
|
||||
type: DataTypes.INTEGER,
|
||||
|
||||
|
||||
|
||||
validate: {
|
||||
min: { args: [0], msg: 'Height must be a non-negative integer' },
|
||||
},
|
||||
},
|
||||
|
||||
size_mb: {
|
||||
size_mb: {
|
||||
type: DataTypes.DECIMAL,
|
||||
|
||||
|
||||
|
||||
validate: {
|
||||
min: { args: [0], msg: 'Size must be a non-negative number' },
|
||||
},
|
||||
},
|
||||
|
||||
importHash: {
|
||||
@ -78,43 +83,20 @@ size_mb: {
|
||||
);
|
||||
|
||||
asset_variants.associate = (db) => {
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
//end loop
|
||||
|
||||
|
||||
//end loop
|
||||
|
||||
db.asset_variants.belongsTo(db.assets, {
|
||||
as: 'asset',
|
||||
foreignKey: {
|
||||
name: 'assetId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
db.asset_variants.belongsTo(db.users, {
|
||||
as: 'createdBy',
|
||||
});
|
||||
@ -124,9 +106,5 @@ size_mb: {
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
|
||||
return asset_variants;
|
||||
};
|
||||
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
module.exports = function(sequelize, DataTypes) {
|
||||
module.exports = function (sequelize, DataTypes) {
|
||||
const assets = sequelize.define(
|
||||
'assets',
|
||||
{
|
||||
@ -8,115 +8,92 @@ module.exports = function(sequelize, DataTypes) {
|
||||
primaryKey: true,
|
||||
},
|
||||
|
||||
name: {
|
||||
name: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
validate: {
|
||||
len: {
|
||||
args: [0, 255],
|
||||
msg: 'Asset name must be at most 255 characters',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
asset_type: {
|
||||
asset_type: {
|
||||
type: DataTypes.ENUM,
|
||||
allowNull: false,
|
||||
|
||||
values: ['image', 'video', 'audio', 'file'],
|
||||
},
|
||||
|
||||
type: {
|
||||
type: DataTypes.ENUM,
|
||||
allowNull: false,
|
||||
defaultValue: 'general',
|
||||
|
||||
values: [
|
||||
'icon',
|
||||
|
||||
"image",
|
||||
'background_image',
|
||||
|
||||
'audio',
|
||||
|
||||
"video",
|
||||
'video',
|
||||
|
||||
'transition',
|
||||
|
||||
"audio",
|
||||
'logo',
|
||||
|
||||
'favicon',
|
||||
|
||||
"file"
|
||||
'document',
|
||||
|
||||
'general',
|
||||
],
|
||||
|
||||
},
|
||||
|
||||
cdn_url: {
|
||||
cdn_url: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
storage_key: {
|
||||
storage_key: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
mime_type: {
|
||||
mime_type: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
validate: {
|
||||
is: {
|
||||
args: /^[a-z0-9]+\/[a-z0-9.+-]+$/i,
|
||||
msg: 'Invalid MIME type format',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
size_mb: {
|
||||
size_mb: {
|
||||
type: DataTypes.DECIMAL,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
width_px: {
|
||||
width_px: {
|
||||
type: DataTypes.INTEGER,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
height_px: {
|
||||
height_px: {
|
||||
type: DataTypes.INTEGER,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
duration_sec: {
|
||||
duration_sec: {
|
||||
type: DataTypes.DECIMAL,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
checksum: {
|
||||
checksum: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
is_public: {
|
||||
is_public: {
|
||||
type: DataTypes.BOOLEAN,
|
||||
|
||||
|
||||
allowNull: false,
|
||||
defaultValue: false,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
is_deleted: {
|
||||
type: DataTypes.BOOLEAN,
|
||||
|
||||
allowNull: false,
|
||||
defaultValue: false,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
deleted_at_time: {
|
||||
type: DataTypes.DATE,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
importHash: {
|
||||
@ -132,59 +109,38 @@ deleted_at_time: {
|
||||
indexes: [
|
||||
{ fields: ['projectId'] },
|
||||
{ fields: ['asset_type'] },
|
||||
{ fields: ['type'] },
|
||||
{ fields: ['is_public'] },
|
||||
{ fields: ['is_deleted'] },
|
||||
{ fields: ['deletedAt'] },
|
||||
],
|
||||
},
|
||||
);
|
||||
|
||||
assets.associate = (db) => {
|
||||
|
||||
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
db.assets.hasMany(db.asset_variants, {
|
||||
as: 'asset_variants_asset',
|
||||
foreignKey: {
|
||||
name: 'assetId',
|
||||
name: 'assetId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
//end loop
|
||||
|
||||
|
||||
//end loop
|
||||
|
||||
db.assets.belongsTo(db.projects, {
|
||||
as: 'project',
|
||||
foreignKey: {
|
||||
name: 'projectId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
db.assets.belongsTo(db.users, {
|
||||
as: 'createdBy',
|
||||
});
|
||||
@ -194,8 +150,5 @@ deleted_at_time: {
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
|
||||
return assets;
|
||||
};
|
||||
|
||||
|
||||
91
backend/src/db/models/element_type_defaults.js
Normal file
91
backend/src/db/models/element_type_defaults.js
Normal file
@ -0,0 +1,91 @@
|
||||
module.exports = function (sequelize, DataTypes) {
|
||||
const element_type_defaults = sequelize.define(
|
||||
'element_type_defaults',
|
||||
{
|
||||
id: {
|
||||
type: DataTypes.UUID,
|
||||
defaultValue: DataTypes.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
element_type: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: false,
|
||||
unique: true,
|
||||
validate: {
|
||||
notEmpty: { msg: 'Element type is required' },
|
||||
len: {
|
||||
args: [1, 100],
|
||||
msg: 'Element type must be between 1 and 100 characters',
|
||||
},
|
||||
},
|
||||
},
|
||||
name: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: false,
|
||||
validate: {
|
||||
notEmpty: { msg: 'Name is required' },
|
||||
len: {
|
||||
args: [1, 255],
|
||||
msg: 'Name must be between 1 and 255 characters',
|
||||
},
|
||||
},
|
||||
},
|
||||
sort_order: {
|
||||
type: DataTypes.INTEGER,
|
||||
allowNull: false,
|
||||
defaultValue: 0,
|
||||
},
|
||||
is_active: {
|
||||
type: DataTypes.VIRTUAL,
|
||||
get() {
|
||||
return true;
|
||||
},
|
||||
},
|
||||
default_settings_json: {
|
||||
type: DataTypes.TEXT,
|
||||
field: 'settings_json',
|
||||
allowNull: true,
|
||||
},
|
||||
importHash: {
|
||||
type: DataTypes.STRING(255),
|
||||
allowNull: true,
|
||||
unique: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
paranoid: true,
|
||||
freezeTableName: true,
|
||||
indexes: [
|
||||
{ fields: ['element_type'] },
|
||||
{ fields: ['sort_order'] },
|
||||
{ fields: ['deletedAt'] },
|
||||
],
|
||||
},
|
||||
);
|
||||
|
||||
element_type_defaults.associate = (db) => {
|
||||
db.element_type_defaults.belongsTo(db.users, {
|
||||
as: 'createdBy',
|
||||
});
|
||||
|
||||
db.element_type_defaults.belongsTo(db.users, {
|
||||
as: 'updatedBy',
|
||||
});
|
||||
|
||||
// Add hasMany relationship to project_element_defaults
|
||||
if (db.project_element_defaults) {
|
||||
db.element_type_defaults.hasMany(db.project_element_defaults, {
|
||||
as: 'project_defaults',
|
||||
foreignKey: {
|
||||
name: 'source_element_id',
|
||||
},
|
||||
constraints: true,
|
||||
onDelete: 'SET NULL',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
return element_type_defaults;
|
||||
};
|
||||
@ -1,4 +1,4 @@
|
||||
module.exports = function(sequelize, DataTypes) {
|
||||
module.exports = function (sequelize, DataTypes) {
|
||||
const file = sequelize.define(
|
||||
'file',
|
||||
{
|
||||
|
||||
73
backend/src/db/models/global_transition_defaults.js
Normal file
73
backend/src/db/models/global_transition_defaults.js
Normal file
@ -0,0 +1,73 @@
|
||||
module.exports = function (sequelize, DataTypes) {
|
||||
const global_transition_defaults = sequelize.define(
|
||||
'global_transition_defaults',
|
||||
{
|
||||
id: {
|
||||
type: DataTypes.UUID,
|
||||
defaultValue: DataTypes.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
transition_type: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: false,
|
||||
defaultValue: 'fade',
|
||||
validate: {
|
||||
notEmpty: { msg: 'Transition type is required' },
|
||||
isIn: {
|
||||
args: [['fade', 'none']],
|
||||
msg: 'Invalid transition type',
|
||||
},
|
||||
},
|
||||
},
|
||||
overlay_color: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: false,
|
||||
defaultValue: '#000000',
|
||||
validate: {
|
||||
notEmpty: { msg: 'Overlay color is required' },
|
||||
},
|
||||
},
|
||||
duration_ms: {
|
||||
type: DataTypes.INTEGER,
|
||||
allowNull: false,
|
||||
defaultValue: 700,
|
||||
validate: {
|
||||
isInt: { msg: 'Duration must be an integer' },
|
||||
min: {
|
||||
args: [0],
|
||||
msg: 'Duration must be at least 0ms',
|
||||
},
|
||||
},
|
||||
},
|
||||
easing: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: false,
|
||||
defaultValue: 'ease-in-out',
|
||||
validate: {
|
||||
notEmpty: { msg: 'Easing is required' },
|
||||
isIn: {
|
||||
args: [['ease-in-out', 'ease-in', 'ease-out', 'linear']],
|
||||
msg: 'Invalid easing function',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
paranoid: true,
|
||||
freezeTableName: true,
|
||||
},
|
||||
);
|
||||
|
||||
global_transition_defaults.associate = (db) => {
|
||||
db.global_transition_defaults.belongsTo(db.users, {
|
||||
as: 'createdBy',
|
||||
});
|
||||
|
||||
db.global_transition_defaults.belongsTo(db.users, {
|
||||
as: 'updatedBy',
|
||||
});
|
||||
};
|
||||
|
||||
return global_transition_defaults;
|
||||
};
|
||||
@ -5,7 +5,7 @@ const path = require('path');
|
||||
const Sequelize = require('sequelize');
|
||||
const basename = path.basename(__filename);
|
||||
const env = process.env.NODE_ENV || 'development';
|
||||
const config = require("../db.config")[env];
|
||||
const config = require('../db.config')[env];
|
||||
const db = {};
|
||||
|
||||
let sequelize;
|
||||
@ -13,20 +13,29 @@ console.log(env);
|
||||
if (config.use_env_variable) {
|
||||
sequelize = new Sequelize(process.env[config.use_env_variable], config);
|
||||
} else {
|
||||
sequelize = new Sequelize(config.database, config.username, config.password, config);
|
||||
sequelize = new Sequelize(
|
||||
config.database,
|
||||
config.username,
|
||||
config.password,
|
||||
config,
|
||||
);
|
||||
}
|
||||
|
||||
fs
|
||||
.readdirSync(__dirname)
|
||||
.filter(file => {
|
||||
return (file.indexOf('.') !== 0) && (file !== basename) && (file.slice(-3) === '.js');
|
||||
fs.readdirSync(__dirname)
|
||||
.filter((file) => {
|
||||
return (
|
||||
file.indexOf('.') !== 0 && file !== basename && file.slice(-3) === '.js'
|
||||
);
|
||||
})
|
||||
.forEach(file => {
|
||||
const model = require(path.join(__dirname, file))(sequelize, Sequelize.DataTypes)
|
||||
.forEach((file) => {
|
||||
const model = require(path.join(__dirname, file))(
|
||||
sequelize,
|
||||
Sequelize.DataTypes,
|
||||
);
|
||||
db[model.name] = model;
|
||||
});
|
||||
|
||||
Object.keys(db).forEach(modelName => {
|
||||
Object.keys(db).forEach((modelName) => {
|
||||
if (db[modelName].associate) {
|
||||
db[modelName].associate(db);
|
||||
}
|
||||
|
||||
@ -1,190 +0,0 @@
|
||||
module.exports = function(sequelize, DataTypes) {
|
||||
const page_elements = sequelize.define(
|
||||
'page_elements',
|
||||
{
|
||||
id: {
|
||||
type: DataTypes.UUID,
|
||||
defaultValue: DataTypes.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
|
||||
element_type: {
|
||||
type: DataTypes.ENUM,
|
||||
allowNull: false,
|
||||
|
||||
values: [
|
||||
|
||||
"nav_button",
|
||||
|
||||
|
||||
"spot",
|
||||
|
||||
|
||||
"description",
|
||||
|
||||
|
||||
"tooltip",
|
||||
|
||||
|
||||
"gallery",
|
||||
|
||||
|
||||
"carousel",
|
||||
|
||||
|
||||
"logo",
|
||||
|
||||
|
||||
"video_player",
|
||||
|
||||
|
||||
"popup"
|
||||
|
||||
],
|
||||
|
||||
},
|
||||
|
||||
name: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
sort_order: {
|
||||
type: DataTypes.INTEGER,
|
||||
allowNull: false,
|
||||
defaultValue: 0,
|
||||
|
||||
},
|
||||
|
||||
is_visible: {
|
||||
type: DataTypes.BOOLEAN,
|
||||
|
||||
allowNull: false,
|
||||
defaultValue: false,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
x_percent: {
|
||||
type: DataTypes.DECIMAL,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
y_percent: {
|
||||
type: DataTypes.DECIMAL,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
width_percent: {
|
||||
type: DataTypes.DECIMAL,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
height_percent: {
|
||||
type: DataTypes.DECIMAL,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
rotation_deg: {
|
||||
type: DataTypes.DECIMAL,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
style_json: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
content_json: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
importHash: {
|
||||
type: DataTypes.STRING(255),
|
||||
allowNull: true,
|
||||
unique: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
paranoid: true,
|
||||
freezeTableName: true,
|
||||
indexes: [
|
||||
{ fields: ['pageId'] },
|
||||
{ fields: ['pageId', 'sort_order'] },
|
||||
{ fields: ['is_visible'] },
|
||||
{ fields: ['deletedAt'] },
|
||||
],
|
||||
},
|
||||
);
|
||||
|
||||
page_elements.associate = (db) => {
|
||||
|
||||
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
//end loop
|
||||
|
||||
|
||||
|
||||
db.page_elements.belongsTo(db.tour_pages, {
|
||||
as: 'page',
|
||||
foreignKey: {
|
||||
name: 'pageId',
|
||||
},
|
||||
constraints: false,
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
db.page_elements.belongsTo(db.users, {
|
||||
as: 'createdBy',
|
||||
});
|
||||
|
||||
db.page_elements.belongsTo(db.users, {
|
||||
as: 'updatedBy',
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
|
||||
return page_elements;
|
||||
};
|
||||
|
||||
@ -1,141 +0,0 @@
|
||||
module.exports = function(sequelize, DataTypes) {
|
||||
const page_links = sequelize.define(
|
||||
'page_links',
|
||||
{
|
||||
id: {
|
||||
type: DataTypes.UUID,
|
||||
defaultValue: DataTypes.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
|
||||
direction: {
|
||||
type: DataTypes.ENUM,
|
||||
allowNull: false,
|
||||
defaultValue: 'forward',
|
||||
|
||||
values: [
|
||||
|
||||
"forward",
|
||||
|
||||
|
||||
"back",
|
||||
|
||||
|
||||
"external"
|
||||
|
||||
],
|
||||
|
||||
},
|
||||
|
||||
external_url: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
is_active: {
|
||||
type: DataTypes.BOOLEAN,
|
||||
|
||||
allowNull: false,
|
||||
defaultValue: false,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
trigger_selector: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
importHash: {
|
||||
type: DataTypes.STRING(255),
|
||||
allowNull: true,
|
||||
unique: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
paranoid: true,
|
||||
freezeTableName: true,
|
||||
indexes: [
|
||||
{ fields: ['from_pageId'] },
|
||||
{ fields: ['to_pageId'] },
|
||||
{ fields: ['transitionId'] },
|
||||
{ fields: ['is_active'] },
|
||||
{ fields: ['deletedAt'] },
|
||||
],
|
||||
},
|
||||
);
|
||||
|
||||
page_links.associate = (db) => {
|
||||
|
||||
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
//end loop
|
||||
|
||||
|
||||
|
||||
db.page_links.belongsTo(db.tour_pages, {
|
||||
as: 'from_page',
|
||||
foreignKey: {
|
||||
name: 'from_pageId',
|
||||
},
|
||||
constraints: false,
|
||||
});
|
||||
|
||||
db.page_links.belongsTo(db.tour_pages, {
|
||||
as: 'to_page',
|
||||
foreignKey: {
|
||||
name: 'to_pageId',
|
||||
},
|
||||
constraints: false,
|
||||
});
|
||||
|
||||
db.page_links.belongsTo(db.transitions, {
|
||||
as: 'transition',
|
||||
foreignKey: {
|
||||
name: 'transitionId',
|
||||
},
|
||||
constraints: false,
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
db.page_links.belongsTo(db.users, {
|
||||
as: 'createdBy',
|
||||
});
|
||||
|
||||
db.page_links.belongsTo(db.users, {
|
||||
as: 'updatedBy',
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
|
||||
return page_links;
|
||||
};
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
module.exports = function(sequelize, DataTypes) {
|
||||
module.exports = function (sequelize, DataTypes) {
|
||||
const permissions = sequelize.define(
|
||||
'permissions',
|
||||
{
|
||||
@ -8,11 +8,17 @@ module.exports = function(sequelize, DataTypes) {
|
||||
primaryKey: true,
|
||||
},
|
||||
|
||||
name: {
|
||||
name: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
allowNull: false,
|
||||
unique: true,
|
||||
validate: {
|
||||
notEmpty: { msg: 'Permission name is required' },
|
||||
len: {
|
||||
args: [1, 100],
|
||||
msg: 'Permission name must be between 1 and 100 characters',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
importHash: {
|
||||
@ -29,34 +35,9 @@ name: {
|
||||
);
|
||||
|
||||
permissions.associate = (db) => {
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
//end loop
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
//end loop
|
||||
|
||||
db.permissions.belongsTo(db.users, {
|
||||
as: 'createdBy',
|
||||
@ -67,9 +48,5 @@ name: {
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
|
||||
return permissions;
|
||||
};
|
||||
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
module.exports = function(sequelize, DataTypes) {
|
||||
module.exports = function (sequelize, DataTypes) {
|
||||
const presigned_url_requests = sequelize.define(
|
||||
'presigned_url_requests',
|
||||
{
|
||||
@ -8,77 +8,63 @@ module.exports = function(sequelize, DataTypes) {
|
||||
primaryKey: true,
|
||||
},
|
||||
|
||||
purpose: {
|
||||
purpose: {
|
||||
type: DataTypes.ENUM,
|
||||
|
||||
|
||||
|
||||
values: [
|
||||
|
||||
"upload",
|
||||
|
||||
|
||||
"download"
|
||||
|
||||
],
|
||||
|
||||
values: ['upload', 'download'],
|
||||
},
|
||||
|
||||
asset_type: {
|
||||
asset_type: {
|
||||
type: DataTypes.ENUM,
|
||||
|
||||
|
||||
|
||||
values: [
|
||||
|
||||
"image",
|
||||
|
||||
|
||||
"video",
|
||||
|
||||
|
||||
"audio",
|
||||
|
||||
|
||||
"file"
|
||||
|
||||
],
|
||||
|
||||
values: ['image', 'video', 'audio', 'file'],
|
||||
},
|
||||
|
||||
requested_key: {
|
||||
requested_key: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
validate: {
|
||||
len: {
|
||||
args: [0, 1024],
|
||||
msg: 'Requested key must be at most 1024 characters',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
mime_type: {
|
||||
mime_type: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
validate: {
|
||||
len: {
|
||||
args: [0, 255],
|
||||
msg: 'MIME type must be at most 255 characters',
|
||||
},
|
||||
isMimeTypeOrEmpty(value) {
|
||||
if (
|
||||
value &&
|
||||
value.length > 0 &&
|
||||
!/^[\w.-]+\/[\w.+-]+$/.test(value)
|
||||
) {
|
||||
throw new Error('MIME type must be in format type/subtype');
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
requested_size_mb: {
|
||||
requested_size_mb: {
|
||||
type: DataTypes.DECIMAL,
|
||||
|
||||
|
||||
|
||||
validate: {
|
||||
min: {
|
||||
args: [0],
|
||||
msg: 'Requested size must be a non-negative number',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
expires_at: {
|
||||
expires_at: {
|
||||
type: DataTypes.DATE,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
status: {
|
||||
status: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
importHash: {
|
||||
@ -95,38 +81,18 @@ status: {
|
||||
);
|
||||
|
||||
presigned_url_requests.associate = (db) => {
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
//end loop
|
||||
|
||||
|
||||
//end loop
|
||||
|
||||
db.presigned_url_requests.belongsTo(db.projects, {
|
||||
as: 'project',
|
||||
foreignKey: {
|
||||
name: 'projectId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
db.presigned_url_requests.belongsTo(db.users, {
|
||||
@ -134,12 +100,11 @@ status: {
|
||||
foreignKey: {
|
||||
name: 'userId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
db.presigned_url_requests.belongsTo(db.users, {
|
||||
as: 'createdBy',
|
||||
});
|
||||
@ -149,9 +114,5 @@ status: {
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
|
||||
return presigned_url_requests;
|
||||
};
|
||||
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
module.exports = function(sequelize, DataTypes) {
|
||||
module.exports = function (sequelize, DataTypes) {
|
||||
const project_audio_tracks = sequelize.define(
|
||||
'project_audio_tracks',
|
||||
{
|
||||
@ -8,85 +8,58 @@ module.exports = function(sequelize, DataTypes) {
|
||||
primaryKey: true,
|
||||
},
|
||||
|
||||
environment: {
|
||||
environment: {
|
||||
type: DataTypes.ENUM,
|
||||
|
||||
|
||||
|
||||
values: [
|
||||
|
||||
"dev",
|
||||
|
||||
|
||||
"stage",
|
||||
|
||||
|
||||
"production"
|
||||
|
||||
],
|
||||
|
||||
values: ['dev', 'stage', 'production'],
|
||||
},
|
||||
|
||||
source_key: {
|
||||
source_key: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
name: {
|
||||
name: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
validate: {
|
||||
len: {
|
||||
args: [0, 255],
|
||||
msg: 'Audio track name must be at most 255 characters',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
slug: {
|
||||
slug: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
url: {
|
||||
url: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
loop: {
|
||||
loop: {
|
||||
type: DataTypes.BOOLEAN,
|
||||
|
||||
|
||||
allowNull: false,
|
||||
defaultValue: false,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
volume: {
|
||||
volume: {
|
||||
type: DataTypes.DECIMAL,
|
||||
|
||||
|
||||
|
||||
validate: {
|
||||
min: { args: [0], msg: 'Volume must be at least 0' },
|
||||
max: { args: [1], msg: 'Volume must be at most 1' },
|
||||
},
|
||||
},
|
||||
|
||||
sort_order: {
|
||||
sort_order: {
|
||||
type: DataTypes.INTEGER,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
is_enabled: {
|
||||
is_enabled: {
|
||||
type: DataTypes.BOOLEAN,
|
||||
|
||||
|
||||
allowNull: false,
|
||||
defaultValue: false,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
importHash: {
|
||||
@ -103,43 +76,20 @@ is_enabled: {
|
||||
);
|
||||
|
||||
project_audio_tracks.associate = (db) => {
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
//end loop
|
||||
|
||||
|
||||
//end loop
|
||||
|
||||
db.project_audio_tracks.belongsTo(db.projects, {
|
||||
as: 'project',
|
||||
foreignKey: {
|
||||
name: 'projectId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
db.project_audio_tracks.belongsTo(db.users, {
|
||||
as: 'createdBy',
|
||||
});
|
||||
@ -149,9 +99,5 @@ is_enabled: {
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
|
||||
return project_audio_tracks;
|
||||
};
|
||||
|
||||
|
||||
|
||||
101
backend/src/db/models/project_element_defaults.js
Normal file
101
backend/src/db/models/project_element_defaults.js
Normal file
@ -0,0 +1,101 @@
|
||||
module.exports = function (sequelize, DataTypes) {
|
||||
const project_element_defaults = sequelize.define(
|
||||
'project_element_defaults',
|
||||
{
|
||||
id: {
|
||||
type: DataTypes.UUID,
|
||||
defaultValue: DataTypes.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
element_type: {
|
||||
// TEXT for flexibility - matches element_type_defaults and page_elements
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: false,
|
||||
validate: {
|
||||
notEmpty: { msg: 'Element type is required' },
|
||||
len: {
|
||||
args: [1, 100],
|
||||
msg: 'Element type must be between 1 and 100 characters',
|
||||
},
|
||||
},
|
||||
},
|
||||
name: {
|
||||
type: DataTypes.TEXT,
|
||||
validate: {
|
||||
len: { args: [0, 255], msg: 'Name must be at most 255 characters' },
|
||||
},
|
||||
},
|
||||
sort_order: {
|
||||
type: DataTypes.INTEGER,
|
||||
allowNull: false,
|
||||
defaultValue: 0,
|
||||
},
|
||||
settings_json: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: true,
|
||||
},
|
||||
source_element_id: {
|
||||
// Optional FK - tracks which global default this was snapshotted from
|
||||
// SET NULL on global delete to preserve project overrides
|
||||
type: DataTypes.UUID,
|
||||
allowNull: true,
|
||||
},
|
||||
snapshot_version: {
|
||||
// Increments when resetting from global - enables "check for updates" feature
|
||||
type: DataTypes.INTEGER,
|
||||
allowNull: false,
|
||||
defaultValue: 1,
|
||||
},
|
||||
importHash: {
|
||||
type: DataTypes.STRING(255),
|
||||
allowNull: true,
|
||||
unique: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
paranoid: true,
|
||||
freezeTableName: true,
|
||||
indexes: [
|
||||
{ fields: ['projectId'] },
|
||||
{ fields: ['projectId', 'element_type'], unique: true },
|
||||
{ fields: ['element_type'] },
|
||||
{ fields: ['source_element_id'] },
|
||||
{ fields: ['deletedAt'] },
|
||||
],
|
||||
},
|
||||
);
|
||||
|
||||
project_element_defaults.associate = (db) => {
|
||||
db.project_element_defaults.belongsTo(db.projects, {
|
||||
as: 'project',
|
||||
foreignKey: {
|
||||
name: 'projectId',
|
||||
allowNull: false,
|
||||
},
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
db.project_element_defaults.belongsTo(db.element_type_defaults, {
|
||||
as: 'source_element',
|
||||
foreignKey: {
|
||||
name: 'source_element_id',
|
||||
},
|
||||
constraints: true,
|
||||
onDelete: 'SET NULL',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
db.project_element_defaults.belongsTo(db.users, {
|
||||
as: 'createdBy',
|
||||
});
|
||||
|
||||
db.project_element_defaults.belongsTo(db.users, {
|
||||
as: 'updatedBy',
|
||||
});
|
||||
};
|
||||
|
||||
return project_element_defaults;
|
||||
};
|
||||
@ -1,4 +1,4 @@
|
||||
module.exports = function(sequelize, DataTypes) {
|
||||
module.exports = function (sequelize, DataTypes) {
|
||||
const project_memberships = sequelize.define(
|
||||
'project_memberships',
|
||||
{
|
||||
@ -8,50 +8,27 @@ module.exports = function(sequelize, DataTypes) {
|
||||
primaryKey: true,
|
||||
},
|
||||
|
||||
access_level: {
|
||||
access_level: {
|
||||
type: DataTypes.ENUM,
|
||||
allowNull: false,
|
||||
defaultValue: 'viewer',
|
||||
|
||||
values: [
|
||||
|
||||
"owner",
|
||||
|
||||
|
||||
"editor",
|
||||
|
||||
|
||||
"reviewer",
|
||||
|
||||
|
||||
"viewer"
|
||||
|
||||
],
|
||||
|
||||
values: ['owner', 'editor', 'reviewer', 'viewer'],
|
||||
},
|
||||
|
||||
is_active: {
|
||||
is_active: {
|
||||
type: DataTypes.BOOLEAN,
|
||||
|
||||
|
||||
allowNull: false,
|
||||
defaultValue: false,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
invited_at: {
|
||||
invited_at: {
|
||||
type: DataTypes.DATE,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
accepted_at: {
|
||||
accepted_at: {
|
||||
type: DataTypes.DATE,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
importHash: {
|
||||
@ -75,38 +52,18 @@ accepted_at: {
|
||||
);
|
||||
|
||||
project_memberships.associate = (db) => {
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
//end loop
|
||||
|
||||
|
||||
//end loop
|
||||
|
||||
db.project_memberships.belongsTo(db.projects, {
|
||||
as: 'project',
|
||||
foreignKey: {
|
||||
name: 'projectId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
db.project_memberships.belongsTo(db.users, {
|
||||
@ -114,12 +71,11 @@ accepted_at: {
|
||||
foreignKey: {
|
||||
name: 'userId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
db.project_memberships.belongsTo(db.users, {
|
||||
as: 'createdBy',
|
||||
});
|
||||
@ -129,8 +85,5 @@ accepted_at: {
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
|
||||
return project_memberships;
|
||||
};
|
||||
|
||||
|
||||
103
backend/src/db/models/project_transition_settings.js
Normal file
103
backend/src/db/models/project_transition_settings.js
Normal file
@ -0,0 +1,103 @@
|
||||
module.exports = function (sequelize, DataTypes) {
|
||||
const project_transition_settings = sequelize.define(
|
||||
'project_transition_settings',
|
||||
{
|
||||
id: {
|
||||
type: DataTypes.UUID,
|
||||
defaultValue: DataTypes.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
|
||||
environment: {
|
||||
type: DataTypes.ENUM,
|
||||
values: ['dev', 'stage', 'production'],
|
||||
allowNull: false,
|
||||
},
|
||||
|
||||
source_key: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: true,
|
||||
},
|
||||
|
||||
transition_type: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: false,
|
||||
defaultValue: 'fade',
|
||||
validate: {
|
||||
isIn: {
|
||||
args: [['fade', 'none', 'video']],
|
||||
msg: 'Transition type must be fade, none, or video',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
duration_ms: {
|
||||
type: DataTypes.INTEGER,
|
||||
allowNull: false,
|
||||
defaultValue: 700,
|
||||
validate: {
|
||||
min: { args: [0], msg: 'Duration must be at least 0ms' },
|
||||
max: { args: [10000], msg: 'Duration must be at most 10000ms' },
|
||||
},
|
||||
},
|
||||
|
||||
easing: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: false,
|
||||
defaultValue: 'ease-in-out',
|
||||
validate: {
|
||||
isIn: {
|
||||
args: [['ease-in-out', 'ease-in', 'ease-out', 'linear']],
|
||||
msg: 'Easing must be ease-in-out, ease-in, ease-out, or linear',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
overlay_color: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: false,
|
||||
defaultValue: '#000000',
|
||||
},
|
||||
|
||||
importHash: {
|
||||
type: DataTypes.STRING(255),
|
||||
allowNull: true,
|
||||
unique: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
paranoid: true,
|
||||
freezeTableName: true,
|
||||
indexes: [
|
||||
{
|
||||
fields: ['projectId', 'environment'],
|
||||
unique: true,
|
||||
where: { deletedAt: null },
|
||||
},
|
||||
],
|
||||
},
|
||||
);
|
||||
|
||||
project_transition_settings.associate = (db) => {
|
||||
db.project_transition_settings.belongsTo(db.projects, {
|
||||
as: 'project',
|
||||
foreignKey: {
|
||||
name: 'projectId',
|
||||
},
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
db.project_transition_settings.belongsTo(db.users, {
|
||||
as: 'createdBy',
|
||||
});
|
||||
|
||||
db.project_transition_settings.belongsTo(db.users, {
|
||||
as: 'updatedBy',
|
||||
});
|
||||
};
|
||||
|
||||
return project_transition_settings;
|
||||
};
|
||||
@ -1,4 +1,4 @@
|
||||
module.exports = function(sequelize, DataTypes) {
|
||||
module.exports = function (sequelize, DataTypes) {
|
||||
const projects = sequelize.define(
|
||||
'projects',
|
||||
{
|
||||
@ -8,110 +8,65 @@ module.exports = function(sequelize, DataTypes) {
|
||||
primaryKey: true,
|
||||
},
|
||||
|
||||
name: {
|
||||
name: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: false,
|
||||
|
||||
validate: {
|
||||
notEmpty: { msg: 'Project name is required' },
|
||||
len: {
|
||||
args: [1, 255],
|
||||
msg: 'Project name must be between 1 and 255 characters',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
slug: {
|
||||
slug: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: false,
|
||||
unique: true,
|
||||
|
||||
validate: {
|
||||
notEmpty: { msg: 'Slug is required' },
|
||||
is: {
|
||||
args: /^[a-z0-9_-]+$/i,
|
||||
msg: 'Slug can only contain letters, numbers, dashes, and underscores',
|
||||
},
|
||||
len: {
|
||||
args: [1, 255],
|
||||
msg: 'Slug must be between 1 and 255 characters',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
description: {
|
||||
description: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
phase: {
|
||||
type: DataTypes.ENUM,
|
||||
allowNull: false,
|
||||
defaultValue: 'dev',
|
||||
|
||||
values: [
|
||||
|
||||
"dev",
|
||||
|
||||
|
||||
"stage",
|
||||
|
||||
|
||||
"production"
|
||||
|
||||
],
|
||||
|
||||
},
|
||||
|
||||
logo_url: {
|
||||
logo_url: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
favicon_url: {
|
||||
favicon_url: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
og_image_url: {
|
||||
og_image_url: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
theme_config_json: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
design_width: {
|
||||
type: DataTypes.INTEGER,
|
||||
allowNull: true,
|
||||
defaultValue: 1920,
|
||||
},
|
||||
|
||||
custom_css_json: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
design_height: {
|
||||
type: DataTypes.INTEGER,
|
||||
allowNull: true,
|
||||
defaultValue: 1080,
|
||||
},
|
||||
|
||||
cdn_base_url: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
entry_page_slug: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
is_deleted: {
|
||||
type: DataTypes.BOOLEAN,
|
||||
|
||||
allowNull: false,
|
||||
defaultValue: false,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
deleted_at_time: {
|
||||
type: DataTypes.DATE,
|
||||
|
||||
|
||||
|
||||
},
|
||||
// Note: transition_settings moved to project_transition_settings table
|
||||
// for environment-aware storage (dev, stage, production)
|
||||
|
||||
importHash: {
|
||||
type: DataTypes.STRING(255),
|
||||
@ -123,115 +78,114 @@ deleted_at_time: {
|
||||
timestamps: true,
|
||||
paranoid: true,
|
||||
freezeTableName: true,
|
||||
indexes: [
|
||||
{ fields: ['slug'], unique: true },
|
||||
{ fields: ['phase'] },
|
||||
{ fields: ['deletedAt'] },
|
||||
],
|
||||
indexes: [{ fields: ['slug'], unique: true }, { fields: ['deletedAt'] }],
|
||||
},
|
||||
);
|
||||
|
||||
projects.associate = (db) => {
|
||||
|
||||
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
db.projects.hasMany(db.project_memberships, {
|
||||
as: 'project_memberships_project',
|
||||
foreignKey: {
|
||||
name: 'projectId',
|
||||
name: 'projectId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
|
||||
db.projects.hasMany(db.assets, {
|
||||
as: 'assets_project',
|
||||
foreignKey: {
|
||||
name: 'projectId',
|
||||
name: 'projectId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
|
||||
|
||||
db.projects.hasMany(db.presigned_url_requests, {
|
||||
as: 'presigned_url_requests_project',
|
||||
foreignKey: {
|
||||
name: 'projectId',
|
||||
name: 'projectId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
|
||||
db.projects.hasMany(db.tour_pages, {
|
||||
as: 'tour_pages_project',
|
||||
foreignKey: {
|
||||
name: 'projectId',
|
||||
name: 'projectId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
db.projects.hasMany(db.transitions, {
|
||||
as: 'transitions_project',
|
||||
foreignKey: {
|
||||
name: 'projectId',
|
||||
},
|
||||
constraints: false,
|
||||
});
|
||||
|
||||
|
||||
db.projects.hasMany(db.project_audio_tracks, {
|
||||
as: 'project_audio_tracks_project',
|
||||
foreignKey: {
|
||||
name: 'projectId',
|
||||
name: 'projectId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
|
||||
db.projects.hasMany(db.publish_events, {
|
||||
as: 'publish_events_project',
|
||||
foreignKey: {
|
||||
name: 'projectId',
|
||||
name: 'projectId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
|
||||
db.projects.hasMany(db.pwa_caches, {
|
||||
as: 'pwa_caches_project',
|
||||
foreignKey: {
|
||||
name: 'projectId',
|
||||
name: 'projectId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
|
||||
db.projects.hasMany(db.access_logs, {
|
||||
as: 'access_logs_project',
|
||||
foreignKey: {
|
||||
name: 'projectId',
|
||||
name: 'projectId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
db.projects.hasMany(db.project_element_defaults, {
|
||||
as: 'project_element_defaults_project',
|
||||
foreignKey: {
|
||||
name: 'projectId',
|
||||
},
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
db.projects.hasMany(db.project_transition_settings, {
|
||||
as: 'project_transition_settings_project',
|
||||
foreignKey: {
|
||||
name: 'projectId',
|
||||
},
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
//end loop
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
//end loop
|
||||
|
||||
db.projects.belongsTo(db.users, {
|
||||
as: 'createdBy',
|
||||
@ -242,8 +196,5 @@ deleted_at_time: {
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
|
||||
return projects;
|
||||
};
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
module.exports = function(sequelize, DataTypes) {
|
||||
module.exports = function (sequelize, DataTypes) {
|
||||
const publish_events = sequelize.define(
|
||||
'publish_events',
|
||||
{
|
||||
@ -8,116 +8,87 @@ module.exports = function(sequelize, DataTypes) {
|
||||
primaryKey: true,
|
||||
},
|
||||
|
||||
title: {
|
||||
title: {
|
||||
type: DataTypes.STRING,
|
||||
allowNull: true,
|
||||
|
||||
validate: {
|
||||
len: { args: [0, 255], msg: 'Title must be at most 255 characters' },
|
||||
},
|
||||
},
|
||||
|
||||
description: {
|
||||
description: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: true,
|
||||
|
||||
validate: {
|
||||
len: {
|
||||
args: [0, 5000],
|
||||
msg: 'Description must be at most 5000 characters',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
from_environment: {
|
||||
from_environment: {
|
||||
type: DataTypes.ENUM,
|
||||
allowNull: false,
|
||||
|
||||
values: [
|
||||
|
||||
"dev",
|
||||
|
||||
|
||||
"stage",
|
||||
|
||||
|
||||
"production"
|
||||
|
||||
],
|
||||
|
||||
values: ['dev', 'stage', 'production'],
|
||||
},
|
||||
|
||||
to_environment: {
|
||||
to_environment: {
|
||||
type: DataTypes.ENUM,
|
||||
allowNull: false,
|
||||
|
||||
values: [
|
||||
|
||||
"dev",
|
||||
|
||||
|
||||
"stage",
|
||||
|
||||
|
||||
"production"
|
||||
|
||||
],
|
||||
|
||||
values: ['dev', 'stage', 'production'],
|
||||
},
|
||||
|
||||
started_at: {
|
||||
started_at: {
|
||||
type: DataTypes.DATE,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
finished_at: {
|
||||
finished_at: {
|
||||
type: DataTypes.DATE,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
status: {
|
||||
status: {
|
||||
type: DataTypes.ENUM,
|
||||
allowNull: false,
|
||||
defaultValue: 'queued',
|
||||
|
||||
values: [
|
||||
|
||||
"queued",
|
||||
|
||||
|
||||
"running",
|
||||
|
||||
|
||||
"success",
|
||||
|
||||
|
||||
"failed"
|
||||
|
||||
],
|
||||
|
||||
values: ['queued', 'running', 'success', 'failed'],
|
||||
},
|
||||
|
||||
error_message: {
|
||||
error_message: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
pages_copied: {
|
||||
pages_copied: {
|
||||
type: DataTypes.INTEGER,
|
||||
|
||||
|
||||
|
||||
validate: {
|
||||
min: {
|
||||
args: [0],
|
||||
msg: 'Pages copied must be a non-negative integer',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
transitions_copied: {
|
||||
transitions_copied: {
|
||||
type: DataTypes.INTEGER,
|
||||
|
||||
|
||||
|
||||
validate: {
|
||||
min: {
|
||||
args: [0],
|
||||
msg: 'Transitions copied must be a non-negative integer',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
audios_copied: {
|
||||
audios_copied: {
|
||||
type: DataTypes.INTEGER,
|
||||
|
||||
|
||||
|
||||
validate: {
|
||||
min: {
|
||||
args: [0],
|
||||
msg: 'Audios copied must be a non-negative integer',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
importHash: {
|
||||
@ -140,38 +111,18 @@ audios_copied: {
|
||||
);
|
||||
|
||||
publish_events.associate = (db) => {
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
//end loop
|
||||
|
||||
|
||||
//end loop
|
||||
|
||||
db.publish_events.belongsTo(db.projects, {
|
||||
as: 'project',
|
||||
foreignKey: {
|
||||
name: 'projectId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
db.publish_events.belongsTo(db.users, {
|
||||
@ -179,12 +130,11 @@ audios_copied: {
|
||||
foreignKey: {
|
||||
name: 'userId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
db.publish_events.belongsTo(db.users, {
|
||||
as: 'createdBy',
|
||||
});
|
||||
@ -194,7 +144,5 @@ audios_copied: {
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
|
||||
return publish_events;
|
||||
};
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
module.exports = function(sequelize, DataTypes) {
|
||||
module.exports = function (sequelize, DataTypes) {
|
||||
const pwa_caches = sequelize.define(
|
||||
'pwa_caches',
|
||||
{
|
||||
@ -8,58 +8,39 @@ module.exports = function(sequelize, DataTypes) {
|
||||
primaryKey: true,
|
||||
},
|
||||
|
||||
environment: {
|
||||
environment: {
|
||||
type: DataTypes.ENUM,
|
||||
|
||||
|
||||
|
||||
values: [
|
||||
|
||||
"stage",
|
||||
|
||||
|
||||
"production"
|
||||
|
||||
],
|
||||
|
||||
values: ['dev', 'stage', 'production'],
|
||||
},
|
||||
|
||||
cache_version: {
|
||||
cache_version: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
validate: {
|
||||
len: {
|
||||
args: [0, 255],
|
||||
msg: 'Cache version must be at most 255 characters',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
manifest_json: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
manifest_json: {
|
||||
type: DataTypes.JSON,
|
||||
},
|
||||
|
||||
asset_list_json: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
asset_list_json: {
|
||||
type: DataTypes.JSON,
|
||||
},
|
||||
|
||||
generated_at: {
|
||||
generated_at: {
|
||||
type: DataTypes.DATE,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
is_active: {
|
||||
is_active: {
|
||||
type: DataTypes.BOOLEAN,
|
||||
|
||||
|
||||
allowNull: false,
|
||||
defaultValue: false,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
importHash: {
|
||||
@ -76,43 +57,20 @@ is_active: {
|
||||
);
|
||||
|
||||
pwa_caches.associate = (db) => {
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
//end loop
|
||||
|
||||
|
||||
//end loop
|
||||
|
||||
db.pwa_caches.belongsTo(db.projects, {
|
||||
as: 'project',
|
||||
foreignKey: {
|
||||
name: 'projectId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
db.pwa_caches.belongsTo(db.users, {
|
||||
as: 'createdBy',
|
||||
});
|
||||
@ -122,9 +80,5 @@ is_active: {
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
|
||||
return pwa_caches;
|
||||
};
|
||||
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
module.exports = function(sequelize, DataTypes) {
|
||||
module.exports = function (sequelize, DataTypes) {
|
||||
const roles = sequelize.define(
|
||||
'roles',
|
||||
{
|
||||
@ -8,18 +8,20 @@ module.exports = function(sequelize, DataTypes) {
|
||||
primaryKey: true,
|
||||
},
|
||||
|
||||
name: {
|
||||
name: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
allowNull: false,
|
||||
validate: {
|
||||
notEmpty: { msg: 'Role name is required' },
|
||||
len: {
|
||||
args: [1, 100],
|
||||
msg: 'Role name must be between 1 and 100 characters',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
role_customization: {
|
||||
role_customization: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
importHash: {
|
||||
@ -36,13 +38,13 @@ role_customization: {
|
||||
);
|
||||
|
||||
roles.associate = (db) => {
|
||||
|
||||
db.roles.belongsToMany(db.permissions, {
|
||||
as: 'permissions',
|
||||
foreignKey: {
|
||||
name: 'roles_permissionsId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
through: 'rolesPermissionsPermissions',
|
||||
});
|
||||
|
||||
@ -51,45 +53,24 @@ role_customization: {
|
||||
foreignKey: {
|
||||
name: 'roles_permissionsId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
through: 'rolesPermissionsPermissions',
|
||||
});
|
||||
|
||||
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
db.roles.hasMany(db.users, {
|
||||
as: 'users_app_role',
|
||||
foreignKey: {
|
||||
name: 'app_roleId',
|
||||
name: 'app_roleId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'SET NULL',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
//end loop
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
//end loop
|
||||
|
||||
db.roles.belongsTo(db.users, {
|
||||
as: 'createdBy',
|
||||
@ -100,9 +81,5 @@ role_customization: {
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
|
||||
return roles;
|
||||
};
|
||||
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
module.exports = function(sequelize, DataTypes) {
|
||||
module.exports = function (sequelize, DataTypes) {
|
||||
const tour_pages = sequelize.define(
|
||||
'tour_pages',
|
||||
{
|
||||
@ -8,97 +8,122 @@ module.exports = function(sequelize, DataTypes) {
|
||||
primaryKey: true,
|
||||
},
|
||||
|
||||
environment: {
|
||||
environment: {
|
||||
type: DataTypes.ENUM,
|
||||
allowNull: false,
|
||||
defaultValue: 'dev',
|
||||
|
||||
values: [
|
||||
|
||||
"dev",
|
||||
|
||||
|
||||
"stage",
|
||||
|
||||
|
||||
"production"
|
||||
|
||||
],
|
||||
|
||||
values: ['dev', 'stage', 'production'],
|
||||
},
|
||||
|
||||
source_key: {
|
||||
source_key: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
name: {
|
||||
name: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: false,
|
||||
|
||||
validate: {
|
||||
notEmpty: { msg: 'Page name is required' },
|
||||
len: {
|
||||
args: [1, 255],
|
||||
msg: 'Page name must be between 1 and 255 characters',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
slug: {
|
||||
slug: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: false,
|
||||
|
||||
validate: {
|
||||
notEmpty: { msg: 'Slug is required' },
|
||||
is: {
|
||||
args: /^[a-z0-9_-]+$/i,
|
||||
msg: 'Slug can only contain letters, numbers, dashes, and underscores',
|
||||
},
|
||||
len: {
|
||||
args: [1, 255],
|
||||
msg: 'Slug must be between 1 and 255 characters',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
sort_order: {
|
||||
sort_order: {
|
||||
type: DataTypes.INTEGER,
|
||||
allowNull: false,
|
||||
defaultValue: 0,
|
||||
|
||||
},
|
||||
|
||||
background_image_url: {
|
||||
background_image_url: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
background_video_url: {
|
||||
background_video_url: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
background_audio_url: {
|
||||
background_audio_url: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
background_loop: {
|
||||
background_loop: {
|
||||
type: DataTypes.BOOLEAN,
|
||||
|
||||
|
||||
allowNull: false,
|
||||
defaultValue: false,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
requires_auth: {
|
||||
background_video_autoplay: {
|
||||
type: DataTypes.BOOLEAN,
|
||||
|
||||
allowNull: false,
|
||||
defaultValue: true,
|
||||
},
|
||||
|
||||
background_video_loop: {
|
||||
type: DataTypes.BOOLEAN,
|
||||
allowNull: false,
|
||||
defaultValue: true,
|
||||
},
|
||||
|
||||
background_video_muted: {
|
||||
type: DataTypes.BOOLEAN,
|
||||
allowNull: false,
|
||||
defaultValue: true,
|
||||
},
|
||||
|
||||
background_video_start_time: {
|
||||
type: DataTypes.DECIMAL(10, 1),
|
||||
allowNull: true,
|
||||
defaultValue: null,
|
||||
},
|
||||
|
||||
background_video_end_time: {
|
||||
type: DataTypes.DECIMAL(10, 1),
|
||||
allowNull: true,
|
||||
defaultValue: null,
|
||||
},
|
||||
|
||||
design_width: {
|
||||
type: DataTypes.INTEGER,
|
||||
allowNull: true,
|
||||
defaultValue: null,
|
||||
},
|
||||
|
||||
design_height: {
|
||||
type: DataTypes.INTEGER,
|
||||
allowNull: true,
|
||||
defaultValue: null,
|
||||
},
|
||||
|
||||
requires_auth: {
|
||||
type: DataTypes.BOOLEAN,
|
||||
|
||||
allowNull: false,
|
||||
defaultValue: false,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
ui_schema_json: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
ui_schema_json: {
|
||||
type: DataTypes.JSON,
|
||||
},
|
||||
|
||||
importHash: {
|
||||
@ -121,67 +146,20 @@ ui_schema_json: {
|
||||
);
|
||||
|
||||
tour_pages.associate = (db) => {
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
db.tour_pages.hasMany(db.page_elements, {
|
||||
as: 'page_elements_page',
|
||||
foreignKey: {
|
||||
name: 'pageId',
|
||||
},
|
||||
constraints: false,
|
||||
});
|
||||
|
||||
|
||||
db.tour_pages.hasMany(db.page_links, {
|
||||
as: 'page_links_from_page',
|
||||
foreignKey: {
|
||||
name: 'from_pageId',
|
||||
},
|
||||
constraints: false,
|
||||
});
|
||||
|
||||
db.tour_pages.hasMany(db.page_links, {
|
||||
as: 'page_links_to_page',
|
||||
foreignKey: {
|
||||
name: 'to_pageId',
|
||||
},
|
||||
constraints: false,
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
//end loop
|
||||
|
||||
|
||||
//end loop
|
||||
|
||||
db.tour_pages.belongsTo(db.projects, {
|
||||
as: 'project',
|
||||
foreignKey: {
|
||||
name: 'projectId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
db.tour_pages.belongsTo(db.users, {
|
||||
as: 'createdBy',
|
||||
});
|
||||
@ -191,8 +169,5 @@ ui_schema_json: {
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
|
||||
return tour_pages;
|
||||
};
|
||||
|
||||
|
||||
@ -1,157 +0,0 @@
|
||||
module.exports = function(sequelize, DataTypes) {
|
||||
const transitions = sequelize.define(
|
||||
'transitions',
|
||||
{
|
||||
id: {
|
||||
type: DataTypes.UUID,
|
||||
defaultValue: DataTypes.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
|
||||
environment: {
|
||||
type: DataTypes.ENUM,
|
||||
allowNull: false,
|
||||
defaultValue: 'dev',
|
||||
|
||||
values: [
|
||||
|
||||
"dev",
|
||||
|
||||
|
||||
"stage",
|
||||
|
||||
|
||||
"production"
|
||||
|
||||
],
|
||||
|
||||
},
|
||||
|
||||
source_key: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
name: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: false,
|
||||
|
||||
},
|
||||
|
||||
slug: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: false,
|
||||
|
||||
},
|
||||
|
||||
video_url: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
audio_url: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
supports_reverse: {
|
||||
type: DataTypes.BOOLEAN,
|
||||
|
||||
allowNull: false,
|
||||
defaultValue: false,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
duration_sec: {
|
||||
type: DataTypes.DECIMAL,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
importHash: {
|
||||
type: DataTypes.STRING(255),
|
||||
allowNull: true,
|
||||
unique: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
paranoid: true,
|
||||
freezeTableName: true,
|
||||
indexes: [
|
||||
{ fields: ['projectId'] },
|
||||
{ fields: ['projectId', 'environment', 'slug'], unique: true },
|
||||
{ fields: ['deletedAt'] },
|
||||
],
|
||||
},
|
||||
);
|
||||
|
||||
transitions.associate = (db) => {
|
||||
|
||||
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
db.transitions.hasMany(db.page_links, {
|
||||
as: 'page_links_transition',
|
||||
foreignKey: {
|
||||
name: 'transitionId',
|
||||
},
|
||||
constraints: false,
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
//end loop
|
||||
|
||||
|
||||
|
||||
db.transitions.belongsTo(db.projects, {
|
||||
as: 'project',
|
||||
foreignKey: {
|
||||
name: 'projectId',
|
||||
},
|
||||
constraints: false,
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
db.transitions.belongsTo(db.users, {
|
||||
as: 'createdBy',
|
||||
});
|
||||
|
||||
db.transitions.belongsTo(db.users, {
|
||||
as: 'updatedBy',
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
|
||||
return transitions;
|
||||
};
|
||||
|
||||
@ -1,50 +0,0 @@
|
||||
module.exports = function (sequelize, DataTypes) {
|
||||
const ui_elements = sequelize.define(
|
||||
'ui_elements',
|
||||
{
|
||||
id: {
|
||||
type: DataTypes.UUID,
|
||||
defaultValue: DataTypes.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
element_type: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: false,
|
||||
},
|
||||
name: {
|
||||
type: DataTypes.TEXT,
|
||||
},
|
||||
settings_json: {
|
||||
type: DataTypes.TEXT,
|
||||
},
|
||||
sort_order: {
|
||||
type: DataTypes.INTEGER,
|
||||
allowNull: false,
|
||||
defaultValue: 0,
|
||||
},
|
||||
importHash: {
|
||||
type: DataTypes.STRING(255),
|
||||
allowNull: true,
|
||||
unique: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
paranoid: true,
|
||||
freezeTableName: true,
|
||||
indexes: [{ fields: ['element_type'] }, { fields: ['sort_order'] }, { fields: ['deletedAt'] }],
|
||||
},
|
||||
);
|
||||
|
||||
ui_elements.associate = (db) => {
|
||||
db.ui_elements.belongsTo(db.users, {
|
||||
as: 'createdBy',
|
||||
});
|
||||
|
||||
db.ui_elements.belongsTo(db.users, {
|
||||
as: 'updatedBy',
|
||||
});
|
||||
};
|
||||
|
||||
return ui_elements;
|
||||
};
|
||||
@ -3,7 +3,7 @@ const providers = config.providers;
|
||||
const crypto = require('crypto');
|
||||
const bcrypt = require('bcrypt');
|
||||
|
||||
module.exports = function(sequelize, DataTypes) {
|
||||
module.exports = function (sequelize, DataTypes) {
|
||||
const users = sequelize.define(
|
||||
'users',
|
||||
{
|
||||
@ -13,93 +13,67 @@ module.exports = function(sequelize, DataTypes) {
|
||||
primaryKey: true,
|
||||
},
|
||||
|
||||
firstName: {
|
||||
firstName: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
lastName: {
|
||||
lastName: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
phoneNumber: {
|
||||
phoneNumber: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
email: {
|
||||
email: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: false,
|
||||
unique: true,
|
||||
|
||||
validate: {
|
||||
isEmail: { msg: 'Must be a valid email address' },
|
||||
notEmpty: { msg: 'Email is required' },
|
||||
},
|
||||
},
|
||||
|
||||
disabled: {
|
||||
disabled: {
|
||||
type: DataTypes.BOOLEAN,
|
||||
|
||||
|
||||
allowNull: false,
|
||||
defaultValue: false,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
password: {
|
||||
password: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: false,
|
||||
|
||||
},
|
||||
|
||||
emailVerified: {
|
||||
emailVerified: {
|
||||
type: DataTypes.BOOLEAN,
|
||||
|
||||
|
||||
allowNull: false,
|
||||
defaultValue: false,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
emailVerificationToken: {
|
||||
emailVerificationToken: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
emailVerificationTokenExpiresAt: {
|
||||
emailVerificationTokenExpiresAt: {
|
||||
type: DataTypes.DATE,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
passwordResetToken: {
|
||||
passwordResetToken: {
|
||||
type: DataTypes.TEXT,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
passwordResetTokenExpiresAt: {
|
||||
passwordResetTokenExpiresAt: {
|
||||
type: DataTypes.DATE,
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
provider: {
|
||||
provider: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: false,
|
||||
defaultValue: providers.LOCAL,
|
||||
|
||||
},
|
||||
|
||||
importHash: {
|
||||
@ -121,13 +95,13 @@ provider: {
|
||||
);
|
||||
|
||||
users.associate = (db) => {
|
||||
|
||||
db.users.belongsToMany(db.permissions, {
|
||||
as: 'custom_permissions',
|
||||
foreignKey: {
|
||||
name: 'users_custom_permissionsId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
through: 'usersCustom_permissionsPermissions',
|
||||
});
|
||||
|
||||
@ -136,88 +110,77 @@ provider: {
|
||||
foreignKey: {
|
||||
name: 'users_custom_permissionsId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
through: 'usersCustom_permissionsPermissions',
|
||||
});
|
||||
|
||||
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
db.users.hasMany(db.project_memberships, {
|
||||
as: 'project_memberships_user',
|
||||
foreignKey: {
|
||||
name: 'userId',
|
||||
name: 'userId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
db.users.hasMany(db.presigned_url_requests, {
|
||||
as: 'presigned_url_requests_user',
|
||||
foreignKey: {
|
||||
name: 'userId',
|
||||
name: 'userId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
db.users.hasMany(db.publish_events, {
|
||||
as: 'publish_events_user',
|
||||
foreignKey: {
|
||||
name: 'userId',
|
||||
name: 'userId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'SET NULL',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
|
||||
|
||||
db.users.hasMany(db.access_logs, {
|
||||
as: 'access_logs_user',
|
||||
foreignKey: {
|
||||
name: 'userId',
|
||||
name: 'userId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'SET NULL',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
|
||||
|
||||
//end loop
|
||||
|
||||
|
||||
//end loop
|
||||
|
||||
db.users.belongsTo(db.roles, {
|
||||
as: 'app_role',
|
||||
foreignKey: {
|
||||
name: 'app_roleId',
|
||||
},
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'SET NULL',
|
||||
onUpdate: 'CASCADE',
|
||||
});
|
||||
|
||||
|
||||
|
||||
db.users.hasMany(db.file, {
|
||||
as: 'avatar',
|
||||
foreignKey: 'belongsToId',
|
||||
constraints: false,
|
||||
constraints: true,
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
scope: {
|
||||
belongsTo: db.users.getTableName(),
|
||||
belongsToColumn: 'avatar',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
db.users.belongsTo(db.users, {
|
||||
as: 'createdBy',
|
||||
});
|
||||
@ -227,47 +190,41 @@ provider: {
|
||||
});
|
||||
};
|
||||
|
||||
users.beforeCreate((users) => {
|
||||
users = trimStringFields(users);
|
||||
|
||||
users.beforeCreate((users) => {
|
||||
users = trimStringFields(users);
|
||||
if (
|
||||
users.provider !== providers.LOCAL &&
|
||||
Object.values(providers).indexOf(users.provider) > -1
|
||||
) {
|
||||
users.emailVerified = true;
|
||||
|
||||
if (users.provider !== providers.LOCAL && Object.values(providers).indexOf(users.provider) > -1) {
|
||||
users.emailVerified = true;
|
||||
if (!users.password) {
|
||||
const password = crypto.randomBytes(20).toString('hex');
|
||||
|
||||
if (!users.password) {
|
||||
const password = crypto
|
||||
.randomBytes(20)
|
||||
.toString('hex');
|
||||
|
||||
const hashedPassword = bcrypt.hashSync(
|
||||
password,
|
||||
config.bcrypt.saltRounds,
|
||||
const hashedPassword = bcrypt.hashSync(
|
||||
password,
|
||||
config.bcrypt.saltRounds,
|
||||
);
|
||||
|
||||
users.password = hashedPassword
|
||||
}
|
||||
}
|
||||
});
|
||||
users.password = hashedPassword;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
users.beforeUpdate((users) => {
|
||||
trimStringFields(users);
|
||||
});
|
||||
|
||||
|
||||
return users;
|
||||
};
|
||||
|
||||
|
||||
function trimStringFields(users) {
|
||||
users.email = users.email.trim();
|
||||
|
||||
users.firstName = users.firstName
|
||||
? users.firstName.trim()
|
||||
: null;
|
||||
users.firstName = users.firstName ? users.firstName.trim() : null;
|
||||
|
||||
users.lastName = users.lastName
|
||||
? users.lastName.trim()
|
||||
: null;
|
||||
users.lastName = users.lastName ? users.lastName.trim() : null;
|
||||
|
||||
return users;
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user