Initial version
This commit is contained in:
commit
9c39ef3bbc
3
.dockerignore
Normal file
3
.dockerignore
Normal file
@ -0,0 +1,3 @@
|
||||
backend/node_modules
|
||||
frontend/node_modules
|
||||
frontend/build
|
||||
3
.gitignore
vendored
Normal file
3
.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
node_modules/
|
||||
*/node_modules/
|
||||
*/build/
|
||||
17
Dockerfile
Normal file
17
Dockerfile
Normal file
@ -0,0 +1,17 @@
|
||||
FROM node:20.15.1-alpine AS builder
|
||||
RUN apk add --no-cache git
|
||||
WORKDIR /app
|
||||
COPY frontend/package.json frontend/yarn.lock ./
|
||||
RUN yarn install --pure-lockfile
|
||||
COPY frontend .
|
||||
RUN yarn build
|
||||
|
||||
FROM node:20.15.1-alpine
|
||||
WORKDIR /app
|
||||
COPY backend/package.json backend/yarn.lock ./
|
||||
RUN yarn install --pure-lockfile
|
||||
COPY backend .
|
||||
|
||||
COPY --from=builder /app/build /app/public
|
||||
CMD ["yarn", "start"]
|
||||
|
||||
64
Dockerfile.dev
Normal file
64
Dockerfile.dev
Normal file
@ -0,0 +1,64 @@
|
||||
# Base image for Node.js dependencies
|
||||
FROM node:20.15.1-alpine AS frontend-deps
|
||||
RUN apk add --no-cache git
|
||||
WORKDIR /app/frontend
|
||||
COPY frontend/package.json frontend/yarn.lock ./
|
||||
RUN yarn install --pure-lockfile
|
||||
|
||||
FROM node:20.15.1-alpine AS backend-deps
|
||||
RUN apk add --no-cache git
|
||||
WORKDIR /app/backend
|
||||
COPY backend/package.json backend/yarn.lock ./
|
||||
RUN yarn install --pure-lockfile
|
||||
|
||||
FROM node:20.15.1-alpine AS app-shell-deps
|
||||
RUN apk add --no-cache git
|
||||
WORKDIR /app/app-shell
|
||||
COPY app-shell/package.json app-shell/yarn.lock ./
|
||||
RUN yarn install --pure-lockfile
|
||||
|
||||
# Nginx setup and application build
|
||||
FROM node:20.15.1-alpine AS build
|
||||
RUN apk add --no-cache git nginx
|
||||
RUN yarn global add concurrently
|
||||
|
||||
# Make sure to add yarn global bin to PATH
|
||||
ENV PATH /root/.yarn/bin:/root/.config/yarn/global/node_modules/.bin:$PATH
|
||||
|
||||
# Copy dependencies
|
||||
WORKDIR /app
|
||||
COPY --from=frontend-deps /app/frontend /app/frontend
|
||||
COPY --from=backend-deps /app/backend /app/backend
|
||||
COPY --from=app-shell-deps /app/app-shell /app/app-shell
|
||||
|
||||
COPY frontend /app/frontend
|
||||
COPY backend /app/backend
|
||||
COPY app-shell /app/app-shell
|
||||
COPY docker /app/docker
|
||||
|
||||
# Copy Nginx configuration
|
||||
COPY nginx.conf /etc/nginx/nginx.conf
|
||||
|
||||
# Copy all files from root to /app
|
||||
COPY . /app
|
||||
|
||||
# Expose the port the app runs on
|
||||
EXPOSE 8080
|
||||
ENV NODE_ENV=dev_stage
|
||||
ENV FRONT_PORT=3001
|
||||
ENV APP_SHELL_PORT=4000
|
||||
|
||||
# Start app_shell
|
||||
CMD ["sh", "-c", "\
|
||||
concurrently --kill-others-on-fail \
|
||||
\"yarn --cwd /app/frontend dev\" \
|
||||
\"yarn --cwd /app/backend start\" \
|
||||
\"sleep 10 && nginx -g 'daemon off;'\" & \
|
||||
CONC_PID=$! && \
|
||||
echo 'Waiting for frontend (port ${FRONT_PORT}) to be available...' && \
|
||||
while ! nc -z localhost ${FRONT_PORT}; do \
|
||||
sleep 2; \
|
||||
done && \
|
||||
echo 'Frontend is up. Starting app_shell for Git check...' && \
|
||||
yarn --cwd /app/app-shell start && \
|
||||
wait $CONC_PID"]
|
||||
198
README.md
Normal file
198
README.md
Normal file
@ -0,0 +1,198 @@
|
||||
|
||||
|
||||
# i want to create a website for pdf & image convert
|
||||
|
||||
## This project was generated by [Flatlogic Platform](https://flatlogic.com).
|
||||
|
||||
- Frontend: [React.js](https://flatlogic.com/templates?framework%5B%5D=react&sort=default)
|
||||
|
||||
- Backend: [NodeJS](https://flatlogic.com/templates?backend%5B%5D=nodejs&sort=default)
|
||||
|
||||
<details><summary>Backend Folder Structure</summary>
|
||||
|
||||
The generated application has the following backend folder structure:
|
||||
|
||||
`src` folder which contains your working files that will be used later to create the build. The src folder contains folders as:
|
||||
|
||||
- `auth` - config the library for authentication and authorization;
|
||||
|
||||
- `db` - contains such folders as:
|
||||
|
||||
- `api` - documentation that is automatically generated by jsdoc or other tools;
|
||||
|
||||
- `migrations` - is a skeleton of the database or all the actions that users do with the database;
|
||||
|
||||
- `models`- what will represent the database for the backend;
|
||||
|
||||
- `seeders` - the entity that creates the data for the database.
|
||||
|
||||
- `routes` - this folder would contain all the routes that you have created using Express Router and what they do would be exported from a Controller file;
|
||||
|
||||
- `services` - contains such folders as `emails` and `notifications`.
|
||||
</details>
|
||||
|
||||
- Database: PostgreSQL
|
||||
|
||||
-----------------------
|
||||
### We offer 2 ways how to start the project locally: by running Frontend and Backend or with Docker.
|
||||
-----------------------
|
||||
|
||||
## To start the project:
|
||||
|
||||
### Backend:
|
||||
|
||||
> Please change current folder: `cd backend`
|
||||
|
||||
#### Install local dependencies:
|
||||
`yarn install`
|
||||
|
||||
------------
|
||||
|
||||
#### Adjust local db:
|
||||
##### 1. Install postgres:
|
||||
|
||||
MacOS:
|
||||
|
||||
`brew install postgres`
|
||||
|
||||
> if you don’t have ‘brew‘ please install it (https://brew.sh) and repeat step `brew install postgres`.
|
||||
|
||||
Ubuntu:
|
||||
|
||||
`sudo apt update`
|
||||
|
||||
`sudo apt install postgresql postgresql-contrib`
|
||||
|
||||
##### 2. Create db and admin user:
|
||||
Before run and test connection, make sure you have created a database as described in the above configuration. You can use the `psql` command to create a user and database.
|
||||
|
||||
`psql postgres --u postgres`
|
||||
|
||||
Next, type this command for creating a new user with password then give access for creating the database.
|
||||
|
||||
`postgres-# CREATE ROLE admin WITH LOGIN PASSWORD 'admin_pass';`
|
||||
|
||||
`postgres-# ALTER ROLE admin CREATEDB;`
|
||||
|
||||
Quit `psql` then log in again using the new user that previously created.
|
||||
|
||||
`postgres-# \q`
|
||||
|
||||
`psql postgres -U admin`
|
||||
|
||||
Type this command to creating a new database.
|
||||
|
||||
`postgres=> CREATE DATABASE db_{your_project_name};`
|
||||
|
||||
Then give that new user privileges to the new database then quit the `psql`.
|
||||
|
||||
`postgres=> GRANT ALL PRIVILEGES ON DATABASE db_{your_project_name} TO admin;`
|
||||
|
||||
`postgres=> \q`
|
||||
|
||||
------------
|
||||
|
||||
#### Create database:
|
||||
`yarn db:create`
|
||||
|
||||
#### Start production build:
|
||||
`yarn start`
|
||||
|
||||
### Frontend:
|
||||
|
||||
> Please change current folder: `cd frontend`
|
||||
|
||||
## To start the project with Docker:
|
||||
### Description:
|
||||
|
||||
The project contains the **docker folder** and the `Dockerfile`.
|
||||
|
||||
The `Dockerfile` is used to Deploy the project to Google Cloud.
|
||||
|
||||
The **docker folder** contains a couple of helper scripts:
|
||||
|
||||
- `docker-compose.yml` (all our services: web, backend, db are described here)
|
||||
- `start-backend.sh` (starts backend, but only after the database)
|
||||
- `wait-for-it.sh` (imported from https://github.com/vishnubob/wait-for-it)
|
||||
|
||||
> To avoid breaking the application, we recommend you don't edit the following files: everything that includes the **docker folder** and `Dokerfile`.
|
||||
|
||||
## Run services:
|
||||
|
||||
1. Install docker compose (https://docs.docker.com/compose/install/)
|
||||
|
||||
2. Move to `docker` folder. All next steps should be done from this folder.
|
||||
|
||||
``` cd docker ```
|
||||
|
||||
3. Make executables from `wait-for-it.sh` and `start-backend.sh`:
|
||||
|
||||
``` chmod +x start-backend.sh && chmod +x wait-for-it.sh ```
|
||||
|
||||
4. Download dependend projects for services.
|
||||
|
||||
5. Review the docker-compose.yml file. Make sure that all services have Dockerfiles. Only db service doesn't require a Dockerfile.
|
||||
|
||||
6. Make sure you have needed ports (see them in `ports`) available on your local machine.
|
||||
|
||||
7. Start services:
|
||||
|
||||
7.1. With an empty database `rm -rf data && docker-compose up`
|
||||
|
||||
7.2. With a stored (from previus runs) database data `docker-compose up`
|
||||
|
||||
8. Check http://localhost:3000
|
||||
|
||||
9. Stop services:
|
||||
|
||||
9.1. Just press `Ctr+C`
|
||||
|
||||
## Most common errors:
|
||||
|
||||
1. `connection refused`
|
||||
|
||||
There could be many reasons, but the most common are:
|
||||
|
||||
- The port is not open on the destination machine.
|
||||
|
||||
- The port is open on the destination machine, but its backlog of pending connections is full.
|
||||
|
||||
- A firewall between the client and server is blocking access (also check local firewalls).
|
||||
|
||||
After checking for firewalls and that the port is open, use telnet to connect to the IP/port to test connectivity. This removes any potential issues from your application.
|
||||
|
||||
***MacOS:***
|
||||
|
||||
If you suspect that your SSH service might be down, you can run this command to find out:
|
||||
|
||||
`sudo service ssh status`
|
||||
|
||||
If the command line returns a status of down, then you’ve likely found the reason behind your connectivity error.
|
||||
|
||||
***Ubuntu:***
|
||||
|
||||
Sometimes a connection refused error can also indicate that there is an IP address conflict on your network. You can search for possible IP conflicts by running:
|
||||
|
||||
`arp-scan -I eth0 -l | grep <ipaddress>`
|
||||
|
||||
`arp-scan -I eth0 -l | grep <ipaddress>`
|
||||
|
||||
and
|
||||
|
||||
`arping <ipaddress>`
|
||||
|
||||
2. `yarn db:create` creates database with the assembled tables (on MacOS with Postgres database)
|
||||
|
||||
The workaround - put the next commands to your Postgres database terminal:
|
||||
|
||||
`DROP SCHEMA public CASCADE;`
|
||||
|
||||
`CREATE SCHEMA public;`
|
||||
|
||||
`GRANT ALL ON SCHEMA public TO postgres;`
|
||||
|
||||
`GRANT ALL ON SCHEMA public TO public;`
|
||||
|
||||
Afterwards, continue to start your project in the backend directory by running:
|
||||
|
||||
`yarn start`
|
||||
26
app-shell/.eslintrc.cjs
Normal file
26
app-shell/.eslintrc.cjs
Normal file
@ -0,0 +1,26 @@
|
||||
const globals = require('globals');
|
||||
|
||||
module.exports = [
|
||||
{
|
||||
files: ['**/*.js', '**/*.ts', '**/*.tsx'],
|
||||
languageOptions: {
|
||||
ecmaVersion: 2021,
|
||||
sourceType: 'module',
|
||||
globals: {
|
||||
...globals.browser,
|
||||
...globals.node,
|
||||
},
|
||||
parser: '@typescript-eslint/parser',
|
||||
},
|
||||
plugins: ['@typescript-eslint'],
|
||||
rules: {
|
||||
'no-unused-vars': 'warn',
|
||||
'no-console': 'off',
|
||||
'indent': ['error', 2],
|
||||
'quotes': ['error', 'single'],
|
||||
'semi': ['error', 'always'],
|
||||
|
||||
'@typescript-eslint/no-unused-vars': 'warn',
|
||||
},
|
||||
},
|
||||
];
|
||||
11
app-shell/.prettierrc
Normal file
11
app-shell/.prettierrc
Normal file
@ -0,0 +1,11 @@
|
||||
{
|
||||
"singleQuote": true,
|
||||
"tabWidth": 2,
|
||||
"printWidth": 80,
|
||||
"trailingComma": "all",
|
||||
"quoteProps": "as-needed",
|
||||
"jsxSingleQuote": true,
|
||||
"bracketSpacing": true,
|
||||
"bracketSameLine": false,
|
||||
"arrowParens": "always"
|
||||
}
|
||||
7
app-shell/.sequelizerc
Normal file
7
app-shell/.sequelizerc
Normal file
@ -0,0 +1,7 @@
|
||||
const path = require('path');
|
||||
module.exports = {
|
||||
"config": path.resolve("src", "db", "db.config.js"),
|
||||
"models-path": path.resolve("src", "db", "models"),
|
||||
"seeders-path": path.resolve("src", "db", "seeders"),
|
||||
"migrations-path": path.resolve("src", "db", "migrations")
|
||||
};
|
||||
23
app-shell/Dockerfile
Normal file
23
app-shell/Dockerfile
Normal file
@ -0,0 +1,23 @@
|
||||
FROM node:20.15.1-alpine
|
||||
|
||||
RUN apk update && apk add bash
|
||||
# Create app directory
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# Install app dependencies
|
||||
# A wildcard is used to ensure both package.json AND package-lock.json are copied
|
||||
# where available (npm@5+)
|
||||
COPY package*.json ./
|
||||
|
||||
RUN yarn install
|
||||
# If you are building your code for production
|
||||
# RUN npm ci --only=production
|
||||
|
||||
|
||||
# Bundle app source
|
||||
COPY . .
|
||||
|
||||
|
||||
EXPOSE 4000
|
||||
|
||||
CMD [ "yarn", "start" ]
|
||||
13
app-shell/README.md
Normal file
13
app-shell/README.md
Normal file
@ -0,0 +1,13 @@
|
||||
#test - template backend,
|
||||
|
||||
#### Run App on local machine:
|
||||
|
||||
##### Install local dependencies:
|
||||
|
||||
- `yarn install`
|
||||
|
||||
---
|
||||
|
||||
##### Start build:
|
||||
|
||||
- `yarn start`
|
||||
42
app-shell/package.json
Normal file
42
app-shell/package.json
Normal file
@ -0,0 +1,42 @@
|
||||
{
|
||||
"name": "app-shell",
|
||||
"description": "app-shell",
|
||||
"scripts": {
|
||||
"start": "nodemon ./src/index.js --delay 1000"
|
||||
},
|
||||
"dependencies": {
|
||||
"@babel/parser": "^7.26.7",
|
||||
"adm-zip": "^0.5.16",
|
||||
"axios": "^1.6.7",
|
||||
"bcrypt": "5.1.1",
|
||||
"cors": "2.8.5",
|
||||
"eslint": "^9.13.0",
|
||||
"express": "4.18.2",
|
||||
"formidable": "1.2.2",
|
||||
"helmet": "4.1.1",
|
||||
"json2csv": "^5.0.7",
|
||||
"jsonwebtoken": "8.5.1",
|
||||
"lodash": "4.17.21",
|
||||
"moment": "2.30.1",
|
||||
"multer": "^1.4.4",
|
||||
"passport": "^0.7.0",
|
||||
"passport-google-oauth2": "^0.2.0",
|
||||
"passport-jwt": "^4.0.1",
|
||||
"passport-microsoft": "^0.1.0",
|
||||
"postcss": "^8.5.1",
|
||||
"sequelize-json-schema": "^2.1.1",
|
||||
"pg": "^8.13.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
},
|
||||
"private": true,
|
||||
"devDependencies": {
|
||||
"@typescript-eslint/eslint-plugin": "^8.12.2",
|
||||
"@typescript-eslint/parser": "^8.12.2",
|
||||
"cross-env": "7.0.3",
|
||||
"mocha": "8.1.3",
|
||||
"nodemon": "^3.1.7",
|
||||
"sequelize-cli": "6.6.2"
|
||||
}
|
||||
}
|
||||
15
app-shell/src/config.js
Normal file
15
app-shell/src/config.js
Normal file
@ -0,0 +1,15 @@
|
||||
|
||||
|
||||
const config = {
|
||||
|
||||
project_uuid: '0fdecac5-300d-420e-8b10-78ac65cdc9f4',
|
||||
flHost: process.env.NODE_ENV === 'production' ? 'https://flatlogic.com/projects' : 'http://localhost:3000/projects',
|
||||
|
||||
gitea_domain: process.env.GITEA_DOMAIN || 'gitea.flatlogic.app',
|
||||
gitea_username: process.env.GITEA_USERNAME || 'admin',
|
||||
gitea_api_token: process.env.GITEA_API_TOKEN || null,
|
||||
github_repo_url: process.env.GITHUB_REPO_URL || null,
|
||||
github_token: process.env.GITHUB_TOKEN || null,
|
||||
};
|
||||
|
||||
module.exports = config;
|
||||
23
app-shell/src/helpers.js
Normal file
23
app-shell/src/helpers.js
Normal file
@ -0,0 +1,23 @@
|
||||
const jwt = require('jsonwebtoken');
|
||||
const config = require('./config');
|
||||
|
||||
module.exports = class Helpers {
|
||||
static wrapAsync(fn) {
|
||||
return function (req, res, next) {
|
||||
fn(req, res, next).catch(next);
|
||||
};
|
||||
}
|
||||
|
||||
static commonErrorHandler(error, req, res, next) {
|
||||
if ([400, 403, 404].includes(error.code)) {
|
||||
return res.status(error.code).send(error.message);
|
||||
}
|
||||
|
||||
console.error(error);
|
||||
return res.status(500).send(error.message);
|
||||
}
|
||||
|
||||
static jwtSign(data) {
|
||||
return jwt.sign(data, config.secret_key, { expiresIn: '6h' });
|
||||
}
|
||||
};
|
||||
54
app-shell/src/index.js
Normal file
54
app-shell/src/index.js
Normal file
@ -0,0 +1,54 @@
|
||||
const express = require('express');
|
||||
const cors = require('cors');
|
||||
const app = express();
|
||||
const bodyParser = require('body-parser');
|
||||
const checkPermissions = require('./middlewares/check-permissions');
|
||||
const modifyPath = require('./middlewares/modify-path');
|
||||
const VCS = require('./services/vcs');
|
||||
|
||||
const executorRoutes = require('./routes/executor');
|
||||
const vcsRoutes = require('./routes/vcs');
|
||||
|
||||
// Function to initialize the Git repository
|
||||
function initRepo() {
|
||||
const projectId = '29929';
|
||||
return VCS.initRepo(projectId);
|
||||
}
|
||||
|
||||
// Start the Express app on APP_SHELL_PORT (4000)
|
||||
function startServer() {
|
||||
const PORT = 4000;
|
||||
app.listen(PORT, () => {
|
||||
console.log(`Listening on port ${PORT}`);
|
||||
});
|
||||
}
|
||||
|
||||
// Run Git check after the server is up
|
||||
function runGitCheck() {
|
||||
initRepo()
|
||||
.then(result => {
|
||||
console.log(result.message);
|
||||
// Here you can add additional logic if needed
|
||||
})
|
||||
.catch(err => {
|
||||
console.error('Error during repo initialization:', err);
|
||||
// Optionally exit the process if Git check is critical:
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
app.use(cors({ origin: true }));
|
||||
app.use(bodyParser.json());
|
||||
app.use(checkPermissions);
|
||||
app.use(modifyPath);
|
||||
|
||||
app.use('/executor', executorRoutes);
|
||||
app.use('/vcs', vcsRoutes);
|
||||
|
||||
// Start the app_shell server
|
||||
startServer();
|
||||
|
||||
// Now perform Git check
|
||||
runGitCheck();
|
||||
|
||||
module.exports = app;
|
||||
17
app-shell/src/middlewares/check-permissions.js
Normal file
17
app-shell/src/middlewares/check-permissions.js
Normal file
@ -0,0 +1,17 @@
|
||||
const config = require('../config');
|
||||
|
||||
function checkPermissions(req, res, next) {
|
||||
const project_uuid = config.project_uuid;
|
||||
const requiredHeader = 'X-Project-UUID';
|
||||
const headerValue = req.headers[requiredHeader.toLowerCase()];
|
||||
// Logging whatever request we're getting
|
||||
console.log('Request:', req.url, req.method, req.body, req.headers);
|
||||
|
||||
if (headerValue && headerValue === project_uuid) {
|
||||
next();
|
||||
} else {
|
||||
res.status(403).send({ error: 'Stop right there, criminal scum! Your project UUID is invalid or missing.' });
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = checkPermissions;
|
||||
8
app-shell/src/middlewares/modify-path.js
Normal file
8
app-shell/src/middlewares/modify-path.js
Normal file
@ -0,0 +1,8 @@
|
||||
function modifyPath(req, res, next) {
|
||||
if (req.body && req.body.path) {
|
||||
req.body.path = '../../../' + req.body.path;
|
||||
}
|
||||
next();
|
||||
}
|
||||
|
||||
module.exports = modifyPath;
|
||||
288
app-shell/src/routes/executor.js
Normal file
288
app-shell/src/routes/executor.js
Normal file
@ -0,0 +1,288 @@
|
||||
const express = require('express');
|
||||
const multer = require('multer');
|
||||
const upload = multer({ dest: 'uploads/' });
|
||||
const fs = require('fs');
|
||||
|
||||
const ExecutorService = require('../services/executor');
|
||||
|
||||
const wrapAsync = require('../helpers').wrapAsync;
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.post(
|
||||
'/read_project_tree',
|
||||
wrapAsync(async (req, res) => {
|
||||
const { path } = req.body;
|
||||
const tree = await ExecutorService.readProjectTree(path);
|
||||
res.status(200).send(tree);
|
||||
}),
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/read_file',
|
||||
wrapAsync(async (req, res) => {
|
||||
const { path, showLines } = req.body;
|
||||
const content = await ExecutorService.readFileContents(path, showLines);
|
||||
res.status(200).send(content);
|
||||
}),
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/count_file_lines',
|
||||
wrapAsync(async (req, res) => {
|
||||
const { path } = req.body;
|
||||
const content = await ExecutorService.countFileLines(path);
|
||||
res.status(200).send(content);
|
||||
}),
|
||||
);
|
||||
|
||||
// router.post(
|
||||
// '/read_file_header',
|
||||
// wrapAsync(async (req, res) => {
|
||||
// const { path, N } = req.body;
|
||||
// try {
|
||||
// const header = await ExecutorService.readFileHeader(path, N);
|
||||
// res.status(200).send(header);
|
||||
// } catch (error) {
|
||||
// res.status(500).send({
|
||||
// error: true,
|
||||
// message: error.message,
|
||||
// details: error.details || error.stack,
|
||||
// validation: error.validation
|
||||
// });
|
||||
// }
|
||||
// }),
|
||||
// );
|
||||
|
||||
router.post(
|
||||
'/read_file_line_context',
|
||||
wrapAsync(async (req, res) => {
|
||||
const { path, lineNumber, windowSize, showLines } = req.body;
|
||||
try {
|
||||
const context = await ExecutorService.readFileLineContext(path, lineNumber, windowSize, showLines);
|
||||
res.status(200).send(context);
|
||||
} catch (error) {
|
||||
res.status(500).send({
|
||||
error: true,
|
||||
message: error.message,
|
||||
details: error.details || error.stack,
|
||||
validation: error.validation
|
||||
});
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/write_file',
|
||||
wrapAsync(async (req, res) => {
|
||||
const { path, fileContents, comment } = req.body;
|
||||
try {
|
||||
await ExecutorService.writeFile(path, fileContents, comment);
|
||||
res.status(200).send({ message: 'File written successfully' });
|
||||
} catch (error) {
|
||||
res.status(500).send({
|
||||
error: true,
|
||||
message: error.message,
|
||||
details: error.details || error.stack,
|
||||
validation: error.validation
|
||||
});
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/insert_file_content',
|
||||
wrapAsync(async (req, res) => {
|
||||
const { path, lineNumber, newContent, message } = req.body;
|
||||
try {
|
||||
await ExecutorService.insertFileContent(path, lineNumber, newContent, message);
|
||||
res.status(200).send({ message: 'File written successfully' });
|
||||
} catch (error) {
|
||||
res.status(500).send({
|
||||
error: true,
|
||||
message: error.message,
|
||||
details: error.details || error.stack,
|
||||
validation: error.validation
|
||||
});
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/replace_file_line',
|
||||
wrapAsync(async (req, res) => {
|
||||
const { path, lineNumber, newText } = req.body;
|
||||
try {
|
||||
const result = await ExecutorService.replaceFileLine(path, lineNumber, newText);
|
||||
res.status(200).send(result);
|
||||
} catch (error) {
|
||||
res.status(500).send({
|
||||
error: true,
|
||||
message: error.message,
|
||||
details: error.details || error.stack,
|
||||
validation: error.validation
|
||||
});
|
||||
}
|
||||
}),
|
||||
);
|
||||
router.post(
|
||||
'/replace_file_chunk',
|
||||
wrapAsync(async (req, res) => {
|
||||
const { path, startLine, endLine, newCode } = req.body;
|
||||
try {
|
||||
const result = await ExecutorService.replaceFileChunk(path, startLine, endLine, newCode);
|
||||
res.status(200).send(result);
|
||||
} catch (error) {
|
||||
res.status(500).send({
|
||||
error: true,
|
||||
message: error.message,
|
||||
details: error.details || error.stack,
|
||||
validation: error.validation
|
||||
});
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/delete_file_lines',
|
||||
wrapAsync(async (req, res) => {
|
||||
const { path, startLine, endLine, message } = req.body;
|
||||
try {
|
||||
const result = await ExecutorService.deleteFileLines(path, startLine, endLine, message);
|
||||
res.status(200).send(result);
|
||||
} catch (error) {
|
||||
res.status(500).send({
|
||||
error: true,
|
||||
message: error.message,
|
||||
details: error.details || error.stack,
|
||||
validation: error.validation
|
||||
});
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/validate_file',
|
||||
wrapAsync(async (req, res) => {
|
||||
const { path } = req.body;
|
||||
try {
|
||||
const validationResult = await ExecutorService.validateFile(path);
|
||||
res.status(200).send({ validationResult });
|
||||
} catch (error) {
|
||||
res.status(500).send({
|
||||
error: true,
|
||||
message: error.message,
|
||||
details: error.details || error.stack,
|
||||
validation: error.validation
|
||||
});
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
|
||||
router.post(
|
||||
'/check_frontend_runtime_error',
|
||||
wrapAsync(async (req, res) => {
|
||||
try {
|
||||
const result = await ExecutorService.checkFrontendRuntimeLogs();
|
||||
res.status(200).send(result);
|
||||
} catch (error) {
|
||||
res.status(500).send({ error: error });
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
|
||||
router.post(
|
||||
'/replace_code_block',
|
||||
wrapAsync(async (req, res) => {
|
||||
const {path, oldCode, newCode, message} = req.body;
|
||||
try {
|
||||
const response = await ExecutorService.replaceCodeBlock(path, oldCode, newCode, message);
|
||||
res.status(200).send(response);
|
||||
} catch (error) {
|
||||
res.status(500).send({
|
||||
error: true,
|
||||
message: error.message,
|
||||
details: error.details || error.stack,
|
||||
validation: error.validation
|
||||
})
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
router.post('/update_project_files_from_scheme',
|
||||
upload.single('file'), // 'file' - name of the field in the form
|
||||
async (req, res) => {
|
||||
console.log('Request received');
|
||||
console.log('Headers:', req.headers);
|
||||
if (!req.file) {
|
||||
return res.status(400).json({ error: 'No file uploaded' });
|
||||
}
|
||||
|
||||
console.log('File info:', {
|
||||
originalname: req.file.originalname,
|
||||
path: req.file.path,
|
||||
size: req.file.size,
|
||||
mimetype: req.file.mimetype
|
||||
});
|
||||
|
||||
try {
|
||||
console.log('Starting update process...');
|
||||
const result = await ExecutorService.updateProjectFilesFromScheme(req.file.path);
|
||||
console.log('Update completed, result:', result);
|
||||
|
||||
console.log('Removing temp file...');
|
||||
fs.unlinkSync(req.file.path);
|
||||
console.log('Temp file removed');
|
||||
|
||||
console.log('Sending response...');
|
||||
return res.json(result);
|
||||
} catch (error) {
|
||||
console.error('Error in route handler:', error);
|
||||
if (req.file) {
|
||||
try {
|
||||
fs.unlinkSync(req.file.path);
|
||||
console.log('Temp file removed after error');
|
||||
} catch (unlinkError) {
|
||||
console.error('Error removing temp file:', unlinkError);
|
||||
}
|
||||
}
|
||||
console.error('Update project files error:', error);
|
||||
return res.status(500).json({
|
||||
error: error.message,
|
||||
stack: process.env.NODE_ENV === 'development' ? error.stack : undefined
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/get_db_schema',
|
||||
wrapAsync(async (req, res) => {
|
||||
try {
|
||||
|
||||
const jsonSchema = await ExecutorService.getDBSchema();
|
||||
res.status(200).send({ jsonSchema });
|
||||
} catch (error) {
|
||||
res.status(500).send({ error: error });
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/execute_sql',
|
||||
wrapAsync(async (req, res) => {
|
||||
try {
|
||||
const { query } = req.body;
|
||||
const result = await ExecutorService.executeSQL(query);
|
||||
res.status(200).send(result);
|
||||
} catch (error) {
|
||||
res.status(500).send({ error: error });
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
router.use('/', require('../helpers').commonErrorHandler);
|
||||
|
||||
module.exports = router;
|
||||
40
app-shell/src/routes/vcs.js
Normal file
40
app-shell/src/routes/vcs.js
Normal file
@ -0,0 +1,40 @@
|
||||
const express = require('express');
|
||||
const wrapAsync = require('../helpers').wrapAsync; // Ваша обёртка для обработки асинхронных маршрутов
|
||||
const VSC = require('../services/vcs');
|
||||
const router = express.Router();
|
||||
|
||||
router.post('/init', wrapAsync(async (req, res) => {
|
||||
const result = await VSC.initRepo();
|
||||
res.status(200).send(result);
|
||||
}));
|
||||
|
||||
router.post('/commit', wrapAsync(async (req, res) => {
|
||||
const { message, files } = req.body;
|
||||
const result = await VSC.commitChanges(message, files);
|
||||
res.status(200).send(result);
|
||||
}));
|
||||
|
||||
router.post('/log', wrapAsync(async (req, res) => {
|
||||
const result = await VSC.getLog();
|
||||
res.status(200).send(result);
|
||||
}));
|
||||
|
||||
router.post('/rollback', wrapAsync(async (req, res) => {
|
||||
const { ref } = req.body;
|
||||
// const result = await VSC.checkout(ref);
|
||||
const result = await VSC.revert(ref);
|
||||
res.status(200).send(result);
|
||||
}));
|
||||
|
||||
router.post('/sync-to-stable', wrapAsync(async (req, res) => {
|
||||
const result = await VSC.mergeDevIntoMaster();
|
||||
res.status(200).send(result);
|
||||
}));
|
||||
|
||||
router.post('/reset-dev', wrapAsync(async (req, res) => {
|
||||
const result = await VSC.resetDevBranch();
|
||||
res.status(200).send(result);
|
||||
}));
|
||||
|
||||
router.use('/', require('../helpers').commonErrorHandler);
|
||||
module.exports = router;
|
||||
88
app-shell/src/services/database.js
Normal file
88
app-shell/src/services/database.js
Normal file
@ -0,0 +1,88 @@
|
||||
// Database.js
|
||||
const { Client } = require('pg');
|
||||
const config = require('../../../backend/src/db/db.config');
|
||||
|
||||
const env = process.env.NODE_ENV || 'development';
|
||||
const dbConfig = config[env];
|
||||
|
||||
class Database {
|
||||
constructor() {
|
||||
this.client = new Client({
|
||||
user: dbConfig.username,
|
||||
password: dbConfig.password,
|
||||
database: dbConfig.database,
|
||||
host: dbConfig.host,
|
||||
port: dbConfig.port
|
||||
});
|
||||
|
||||
// Connect once, reuse the client
|
||||
this.client.connect().catch(err => {
|
||||
console.error('Error connecting to the database:', err);
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
|
||||
async executeSQL(query) {
|
||||
try {
|
||||
const result = await this.client.query(query);
|
||||
return {
|
||||
success: true,
|
||||
rows: result.rows
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error executing query:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Method to fetch simple table/column info from 'information_schema'
|
||||
// (You can expand this to handle constraints, indexes, etc.)
|
||||
async getDBSchema(schemaName = 'public') {
|
||||
try {
|
||||
const tableQuery = `
|
||||
SELECT table_name
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = $1
|
||||
AND table_type = 'BASE TABLE'
|
||||
ORDER BY table_name
|
||||
`;
|
||||
|
||||
const columnQuery = `
|
||||
SELECT table_name, column_name, data_type, is_nullable
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = $1
|
||||
ORDER BY table_name, ordinal_position
|
||||
`;
|
||||
|
||||
const [tablesResult, columnsResult] = await Promise.all([
|
||||
this.client.query(tableQuery, [schemaName]),
|
||||
this.client.query(columnQuery, [schemaName]),
|
||||
]);
|
||||
|
||||
// Build a simple schema object:
|
||||
const tables = tablesResult.rows.map(row => row.table_name);
|
||||
const columnsByTable = {};
|
||||
|
||||
columnsResult.rows.forEach(row => {
|
||||
const { table_name, column_name, data_type, is_nullable } = row;
|
||||
if (!columnsByTable[table_name]) columnsByTable[table_name] = [];
|
||||
columnsByTable[table_name].push({ column_name, data_type, is_nullable });
|
||||
});
|
||||
|
||||
// Combine tables with their columns
|
||||
return tables.map(table => ({
|
||||
table,
|
||||
columns: columnsByTable[table] || [],
|
||||
}));
|
||||
} catch (error) {
|
||||
console.error('Error fetching schema:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async close() {
|
||||
await this.client.end();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new Database();
|
||||
999
app-shell/src/services/executor.js
Normal file
999
app-shell/src/services/executor.js
Normal file
@ -0,0 +1,999 @@
|
||||
const fs = require('fs').promises;
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
const AdmZip = require('adm-zip');
|
||||
const { exec } = require('child_process');
|
||||
const util = require('util');
|
||||
// Babel Parser for JS/TS/TSX
|
||||
const babelParser = require('@babel/parser');
|
||||
const babelParse = babelParser.parse;
|
||||
|
||||
// Local App DB Connection
|
||||
const database = require('./database');
|
||||
|
||||
// PostCSS for CSS
|
||||
const postcss = require('postcss');
|
||||
|
||||
const execAsync = util.promisify(exec);
|
||||
|
||||
module.exports = class ExecutorService {
|
||||
static async readProjectTree (directoryPath) {
|
||||
const paths = {
|
||||
frontend: '../../../frontend',
|
||||
backend: '../../../backend',
|
||||
default: '../../../'
|
||||
};
|
||||
|
||||
try {
|
||||
const publicDir = path.join(__dirname, paths[directoryPath] || directoryPath || paths.default);
|
||||
return await getDirectoryTree(publicDir);
|
||||
} catch (error) {
|
||||
console.error('Error reading directory:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async readFileContents(filePath, showLines) {
|
||||
try {
|
||||
const fullPath = path.join(__dirname, filePath);
|
||||
const content = await fs.readFile(fullPath, 'utf8');
|
||||
|
||||
if (showLines) {
|
||||
const lines = content.split('\n');
|
||||
|
||||
const lineObject = {};
|
||||
lines.forEach((line, index) => {
|
||||
lineObject[index + 1] = line;
|
||||
});
|
||||
|
||||
return lineObject;
|
||||
} else {
|
||||
return content;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error reading file:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async countFileLines(filePath) {
|
||||
try {
|
||||
const fullPath = path.join(__dirname, filePath);
|
||||
|
||||
// Check file exists
|
||||
await fs.access(fullPath);
|
||||
|
||||
// Read file content
|
||||
const content = await fs.readFile(fullPath, 'utf8');
|
||||
|
||||
// Split by newline and count
|
||||
const lines = content.split('\n');
|
||||
|
||||
return {
|
||||
success: true,
|
||||
lineCount: lines.length
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error counting file lines:', error);
|
||||
return {
|
||||
success: false,
|
||||
message: error.message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// static async readFileHeader(filePath, N = 30) {
|
||||
// try {
|
||||
// const fullPath = path.join(__dirname, filePath);
|
||||
// const content = await fs.readFile(fullPath, 'utf8');
|
||||
// const lines = content.split('\n');
|
||||
//
|
||||
// if (lines.length < N) {
|
||||
// return { error: `File has less than ${N} lines` };
|
||||
// }
|
||||
//
|
||||
// const headerLines = lines.slice(0, Math.min(50, lines.length));
|
||||
//
|
||||
// const lineObject = {};
|
||||
// headerLines.forEach((line, index) => {
|
||||
// lineObject[index + 1] = line;
|
||||
// });
|
||||
//
|
||||
// return lineObject;
|
||||
// } catch (error) {
|
||||
// console.error('Error reading file header:', error);
|
||||
// throw error;
|
||||
// }
|
||||
// }
|
||||
|
||||
static async readFileLineContext(filePath, lineNumber, windowSize, showLines) {
|
||||
try {
|
||||
const fullPath = path.join(__dirname, filePath);
|
||||
const content = await fs.readFile(fullPath, 'utf8');
|
||||
const lines = content.split('\n');
|
||||
|
||||
const start = Math.max(0, lineNumber - windowSize);
|
||||
const end = Math.min(lines.length, lineNumber + windowSize + 1);
|
||||
|
||||
const contextLines = lines.slice(start, end);
|
||||
|
||||
if (showLines) {
|
||||
const lineObject = {};
|
||||
contextLines.forEach((line, index) => {
|
||||
lineObject[start + index + 1] = line;
|
||||
});
|
||||
|
||||
return lineObject;
|
||||
} else {
|
||||
return contextLines.join('\n');
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error reading file line context:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async validateFile(filePath) {
|
||||
console.log('Validating file:', filePath);
|
||||
// Read file content
|
||||
let content;
|
||||
try {
|
||||
content = await fs.readFile(filePath, 'utf8');
|
||||
} catch (err) {
|
||||
throw new Error(`Could not read file: ${filePath}\n${err.message}`);
|
||||
}
|
||||
|
||||
// Determine file extension
|
||||
let ext = path.extname(filePath).toLowerCase();
|
||||
if (ext === '.temp') {
|
||||
ext = path.extname(filePath.slice(0, -5)).toLowerCase();
|
||||
}
|
||||
|
||||
try {
|
||||
switch (ext) {
|
||||
case '.js':
|
||||
case '.ts':
|
||||
case '.tsx': {
|
||||
// Parse JS/TS/TSX with Babel
|
||||
babelParse(content, {
|
||||
sourceType: 'module',
|
||||
// plugins array covers JS, TS, TSX, and optional JS flavors
|
||||
plugins: ['jsx', 'typescript']
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
case '.css': {
|
||||
// Parse CSS with PostCSS
|
||||
postcss.parse(content);
|
||||
break;
|
||||
}
|
||||
|
||||
default: {
|
||||
// If the extension isn't recognized, assume it's "valid"
|
||||
// or you could throw an error to force a known extension
|
||||
console.warn(`No validation implemented for extension "${ext}". Skipping syntax check.`);
|
||||
}
|
||||
}
|
||||
|
||||
// If parsing succeeded, return true
|
||||
return true;
|
||||
|
||||
} catch (parseError) {
|
||||
// Rethrow parse errors with a friendlier message
|
||||
throw parseError;
|
||||
}
|
||||
}
|
||||
|
||||
static async checkFrontendRuntimeLogs() {
|
||||
const frontendLogPath = '../frontend/json/runtimeError.json';
|
||||
|
||||
try {
|
||||
// Check if file exists
|
||||
try {
|
||||
console.log('Accessing frontend logs:', frontendLogPath);
|
||||
await fs.access(frontendLogPath);
|
||||
} catch (error) {
|
||||
console.log('Frontend logs not found:', error);
|
||||
// File doesn't exist - return empty object
|
||||
return { runtime_error: {} };
|
||||
}
|
||||
|
||||
// File exists, try to read it
|
||||
try {
|
||||
// Read the entire file instead of using tail
|
||||
const fileContent = await fs.readFile(frontendLogPath, 'utf8');
|
||||
console.log('Reading frontend logs:', fileContent);
|
||||
|
||||
// Handle empty file
|
||||
if (!fileContent || fileContent.trim() === '') {
|
||||
return { runtime_error: {} };
|
||||
}
|
||||
|
||||
// Parse JSON content
|
||||
const runtime_error = JSON.parse(fileContent);
|
||||
|
||||
console.log('Parsed frontend logs:', runtime_error);
|
||||
return { runtime_error };
|
||||
} catch (error) {
|
||||
// Error reading or parsing file
|
||||
console.error('Error reading frontend runtime logs:', error);
|
||||
return { runtime_error: {} };
|
||||
}
|
||||
} catch (error) {
|
||||
// Unexpected error
|
||||
console.log('Error checking frontend logs:', error);
|
||||
return { runtime_error: {} };
|
||||
}
|
||||
}
|
||||
|
||||
static async writeFile(filePath, fileContents, comment) {
|
||||
try {
|
||||
console.log(comment)
|
||||
const fullPath = path.join(__dirname, filePath);
|
||||
|
||||
// Write to a temp file first
|
||||
const tempPath = `${fullPath}.temp`;
|
||||
await fs.writeFile(tempPath, fileContents, 'utf8');
|
||||
|
||||
// Validate the temp file
|
||||
await this.validateFile(tempPath);
|
||||
|
||||
// Rename temp file to original path
|
||||
await fs.rename(tempPath, fullPath);
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error('Error writing file:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async insertFileContent(filePath, lineNumber, newContent, message) {
|
||||
try {
|
||||
const fullPath = path.join(__dirname, filePath);
|
||||
|
||||
// Check file exists
|
||||
await fs.access(fullPath);
|
||||
|
||||
// Read and split by line
|
||||
const content = await fs.readFile(fullPath, 'utf8');
|
||||
const lines = content.split('\n');
|
||||
|
||||
// Ensure lineNumber is within [1 ... lines.length + 1]
|
||||
// 1 means "insert at the very first line"
|
||||
// lines.length + 1 means "append at the end"
|
||||
if (lineNumber < 1) {
|
||||
lineNumber = 1;
|
||||
}
|
||||
if (lineNumber > lines.length + 1) {
|
||||
lineNumber = lines.length + 1;
|
||||
}
|
||||
|
||||
// Convert to 0-based index
|
||||
const insertIndex = lineNumber - 1;
|
||||
|
||||
// Prepare preview
|
||||
const preview = {
|
||||
insertionLine: lineNumber,
|
||||
insertedLines: newContent.split('\n')
|
||||
};
|
||||
|
||||
// Insert newContent lines at the specified index
|
||||
lines.splice(insertIndex, 0, ...newContent.split('\n'));
|
||||
|
||||
// Write changes to a temp file first
|
||||
const updatedContent = lines.join('\n');
|
||||
const tempPath = `${fullPath}.temp`;
|
||||
await fs.writeFile(tempPath, updatedContent, 'utf8');
|
||||
|
||||
await this.validateFile(tempPath);
|
||||
|
||||
// Rename temp file to original path
|
||||
await fs.rename(tempPath, fullPath);
|
||||
|
||||
return {
|
||||
success: true
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error inserting file content:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async replaceFileLine(filePath, lineNumber, newText, message = null) {
|
||||
const fullPath = path.join(__dirname, filePath);
|
||||
try {
|
||||
|
||||
try {
|
||||
await fs.access(fullPath);
|
||||
} catch (error) {
|
||||
throw new Error(`File not found: ${filePath}`);
|
||||
}
|
||||
|
||||
const content = await fs.readFile(fullPath, 'utf8');
|
||||
const lines = content.split('\n');
|
||||
|
||||
if (lineNumber < 1 || lineNumber > lines.length) {
|
||||
throw new Error(`Invalid line number: ${lineNumber}. File has ${lines.length} lines`);
|
||||
}
|
||||
|
||||
if (typeof newText !== 'string') {
|
||||
throw new Error('New text must be a string');
|
||||
}
|
||||
|
||||
const preview = {
|
||||
oldLine: lines[lineNumber - 1],
|
||||
newLine: newText,
|
||||
lineNumber: lineNumber
|
||||
};
|
||||
|
||||
lines[lineNumber - 1] = newText;
|
||||
const newContent = lines.join('\n');
|
||||
const tempPath = `${fullPath}.temp`;
|
||||
await fs.writeFile(tempPath, newContent, 'utf8');
|
||||
|
||||
await this.validateFile(tempPath);
|
||||
|
||||
await fs.rename(tempPath, fullPath);
|
||||
|
||||
return {
|
||||
success: true
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error updating file line:', error);
|
||||
|
||||
try {
|
||||
await fs.unlink(`${fullPath}.temp`);
|
||||
} catch {}
|
||||
|
||||
throw {
|
||||
error: error,
|
||||
message: error.message,
|
||||
details: error.stack
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
static async replaceFileChunk(filePath, startLine, endLine, newCode) {
|
||||
try {
|
||||
// Check if this is a single-line change
|
||||
const newCodeLines = newCode.split('\n');
|
||||
if (newCodeLines.length === 1 && endLine === startLine) {
|
||||
// Redirect to replace_file_line
|
||||
return await this.replaceFileLine(filePath, startLine, newCode);
|
||||
}
|
||||
|
||||
const fullPath = path.join(__dirname, filePath);
|
||||
|
||||
// Check if file exists
|
||||
try {
|
||||
await fs.access(fullPath);
|
||||
} catch (error) {
|
||||
throw new Error(`File not found: ${filePath}`);
|
||||
}
|
||||
|
||||
const content = await fs.readFile(fullPath, 'utf8');
|
||||
const lines = content.split('\n');
|
||||
|
||||
// Adjust line numbers to array indices (subtract 1)
|
||||
const startIndex = startLine - 1;
|
||||
const endIndex = endLine - 1;
|
||||
|
||||
// Validate input parameters
|
||||
if (startIndex < 0 || endIndex >= lines.length || startIndex > endIndex) {
|
||||
throw new Error(`Invalid line range: ${startLine}-${endLine}. File has ${lines.length} lines`);
|
||||
}
|
||||
|
||||
// Check type of new code
|
||||
if (typeof newCode !== 'string') {
|
||||
throw new Error('New code must be a string');
|
||||
}
|
||||
|
||||
// Create changes preview
|
||||
const preview = {
|
||||
oldLines: lines.slice(startIndex, endIndex + 1),
|
||||
newLines: newCode.split('\n'),
|
||||
startLine,
|
||||
endLine
|
||||
};
|
||||
|
||||
// Apply changes to temp file first
|
||||
lines.splice(startIndex, endIndex - startIndex + 1, ...newCode.split('\n'));
|
||||
const newContent = lines.join(os.EOL);
|
||||
const tempPath = `${fullPath}.temp`;
|
||||
await fs.writeFile(tempPath, newContent, 'utf8');
|
||||
await this.validateFile(tempPath);
|
||||
// Apply changes if all validations passed
|
||||
await fs.rename(tempPath, fullPath);
|
||||
|
||||
return {
|
||||
success: true
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error updating file slice:', error);
|
||||
|
||||
// Clean up temp file if exists
|
||||
try {
|
||||
await fs.unlink(`${fullPath}.temp`);
|
||||
} catch {}
|
||||
|
||||
throw {
|
||||
error: error,
|
||||
message: error.message,
|
||||
details: error.details || error.stack
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
static async replaceCodeBlock(filePath, oldCode, newCode, message) {
|
||||
try {
|
||||
console.log(message);
|
||||
const fullPath = path.join(__dirname, filePath);
|
||||
|
||||
// Check file exists
|
||||
await fs.access(fullPath);
|
||||
|
||||
// Read file content
|
||||
let content = await fs.readFile(fullPath, 'utf8');
|
||||
|
||||
// A small helper to unify line breaks to just `\n`
|
||||
const unifyLineBreaks = (str) => str.replace(/\r\n/g, '\n');
|
||||
|
||||
// Normalize line breaks in file content, oldCode, and newCode
|
||||
content = unifyLineBreaks(content);
|
||||
oldCode = unifyLineBreaks(oldCode);
|
||||
newCode = unifyLineBreaks(newCode);
|
||||
|
||||
// Optional: Trim trailing spaces or handle other whitespace normalization if needed
|
||||
// oldCode = oldCode.trim();
|
||||
// newCode = newCode.trim();
|
||||
|
||||
// Check if oldCode actually exists in the content
|
||||
const index = content.indexOf(oldCode);
|
||||
if (index === -1) {
|
||||
return {
|
||||
success: false,
|
||||
message: 'Old code not found in file.'
|
||||
};
|
||||
}
|
||||
|
||||
// Create a preview before replacing
|
||||
const preview = {
|
||||
oldCodeSnippet: oldCode,
|
||||
newCodeSnippet: newCode
|
||||
};
|
||||
|
||||
// Perform replacement (single occurrence). For multiple, use replaceAll or a loop.
|
||||
// If you want a global replacement, consider:
|
||||
// content = content.split(oldCode).join(newCode);
|
||||
content = content.replace(oldCode, newCode);
|
||||
|
||||
// Write to a temp file first
|
||||
const tempPath = `${fullPath}.temp`;
|
||||
await fs.writeFile(tempPath, content, 'utf8');
|
||||
|
||||
await this.validateFile(tempPath);
|
||||
// Rename temp file to original
|
||||
await fs.rename(tempPath, fullPath);
|
||||
|
||||
return {
|
||||
success: true
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error replacing code:', error);
|
||||
return {
|
||||
error: error,
|
||||
message: error.message,
|
||||
details: error.details || error.stack
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//todo add validation
|
||||
static async deleteFileLines(filePath, startLine, endLine, veryShortDescription) {
|
||||
try {
|
||||
const fullPath = path.join(__dirname, filePath);
|
||||
|
||||
// Check if file exists
|
||||
await fs.access(fullPath);
|
||||
|
||||
// Read file content
|
||||
const content = await fs.readFile(fullPath, 'utf8');
|
||||
const lines = content.split('\n');
|
||||
|
||||
// Convert to zero-based indices
|
||||
const startIndex = startLine - 1;
|
||||
const endIndex = endLine - 1;
|
||||
|
||||
// Validate range
|
||||
if (startIndex < 0 || endIndex >= lines.length || startIndex > endIndex) {
|
||||
throw new Error(
|
||||
`Invalid line range: ${startLine}-${endLine}. File has ${lines.length} lines`
|
||||
);
|
||||
}
|
||||
|
||||
// Prepare a preview of the lines being deleted
|
||||
const preview = {
|
||||
deletedLines: lines.slice(startIndex, endIndex + 1),
|
||||
startLine,
|
||||
endLine
|
||||
};
|
||||
|
||||
// Remove lines
|
||||
lines.splice(startIndex, endIndex - startIndex + 1);
|
||||
|
||||
// Join remaining lines and write to a temporary file
|
||||
const newContent = lines.join('\n');
|
||||
const tempPath = `${fullPath}.temp`;
|
||||
await fs.writeFile(tempPath, newContent, 'utf8');
|
||||
|
||||
await this.validateFile(tempPath);
|
||||
// Rename temp file to original
|
||||
await fs.rename(tempPath, fullPath);
|
||||
|
||||
return {
|
||||
success: true
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error deleting file lines:', error);
|
||||
return {
|
||||
error: error,
|
||||
message: error.message,
|
||||
details: error.details || error.stack
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static async validateTypeScript(filePath, content = null) {
|
||||
try {
|
||||
// Basic validation of JSX syntax
|
||||
const jsxErrors = [];
|
||||
|
||||
if (content !== null) {
|
||||
// Check for matching braces
|
||||
if ((content.match(/{/g) || []).length !== (content.match(/}/g) || []).length) {
|
||||
jsxErrors.push("Unmatched curly braces");
|
||||
}
|
||||
|
||||
// Check for invalid syntax in JSX attributes
|
||||
if (content.includes('label={')) {
|
||||
if (!content.match(/label={[^}]+}/)) {
|
||||
jsxErrors.push("Invalid label attribute syntax");
|
||||
}
|
||||
}
|
||||
|
||||
if (jsxErrors.length > 0) {
|
||||
return {
|
||||
valid: false,
|
||||
errors: jsxErrors.map(error => ({
|
||||
code: 'JSX_SYNTAX_ERROR',
|
||||
severity: 'error',
|
||||
location: '',
|
||||
message: error
|
||||
}))
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
valid: true,
|
||||
errors: [],
|
||||
errorCount: 0,
|
||||
warningCount: 0
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
console.error('TypeScript validation error:', error);
|
||||
return {
|
||||
valid: false,
|
||||
errors: [{
|
||||
code: 'VALIDATION_FAILED',
|
||||
severity: 'error',
|
||||
location: '',
|
||||
message: `TypeScript validation error: ${error.message}`
|
||||
}],
|
||||
errorCount: 1,
|
||||
warningCount: 0
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
static async validateBackendFiles(backendPath) {
|
||||
try {
|
||||
// Check for syntax errors
|
||||
await execAsync(`node --check ${backendPath}/src/index.js`);
|
||||
|
||||
// Try to run the code in a test environment
|
||||
const testProcess = exec(
|
||||
'NODE_ENV=test node -e "try { require(\'./src/index.js\') } catch(e) { console.error(e); process.exit(1) }"',
|
||||
{ cwd: backendPath }
|
||||
);
|
||||
|
||||
return new Promise((resolve) => {
|
||||
let output = '';
|
||||
let error = '';
|
||||
|
||||
testProcess.stdout.on('data', (data) => {
|
||||
output += data;
|
||||
});
|
||||
|
||||
testProcess.stderr.on('data', (data) => {
|
||||
error += data;
|
||||
});
|
||||
|
||||
testProcess.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
resolve({ valid: true });
|
||||
} else {
|
||||
resolve({
|
||||
valid: false,
|
||||
error: error || output
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Timeout on validation
|
||||
setTimeout(() => {
|
||||
testProcess.kill();
|
||||
resolve({
|
||||
valid: true,
|
||||
warning: 'Validation timeout, but no immediate errors found'
|
||||
});
|
||||
}, 5000);
|
||||
});
|
||||
} catch (error) {
|
||||
return {
|
||||
valid: false,
|
||||
error: error.message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
static async createBackup(ROOT_PATH) {
|
||||
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
||||
const backupDir = path.join(ROOT_PATH, 'backups', timestamp);
|
||||
|
||||
try {
|
||||
await fs.mkdir(path.join(ROOT_PATH, 'backups'), { recursive: true });
|
||||
|
||||
const dirsToBackup = ['frontend', 'backend'];
|
||||
|
||||
for (const dir of dirsToBackup) {
|
||||
const sourceDir = path.join(ROOT_PATH, dir);
|
||||
const targetDir = path.join(backupDir, dir);
|
||||
|
||||
await fs.mkdir(targetDir, { recursive: true });
|
||||
|
||||
await execAsync(
|
||||
`cd "${sourceDir}" && ` +
|
||||
`find . -type f -not -path "*/node_modules/*" -not -path "*/\\.*" | ` +
|
||||
`while read file; do ` +
|
||||
`mkdir -p "${targetDir}/$(dirname "$file")" && ` +
|
||||
`cp "$file" "${targetDir}/$file"; ` +
|
||||
`done`
|
||||
);
|
||||
}
|
||||
|
||||
console.log('Backup created at:', backupDir);
|
||||
return backupDir;
|
||||
} catch (error) {
|
||||
console.error('Error creating backup:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async restoreFromBackup(backupDir, ROOT_PATH) {
|
||||
try {
|
||||
console.log('Restoring from backup:', backupDir);
|
||||
await execAsync(`rm -rf ${ROOT_PATH}/backend/*`);
|
||||
await execAsync(`cp -r ${backupDir}/* ${ROOT_PATH}/backend/`);
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error('Error restoring from backup:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async updateProjectFilesFromScheme(zipFilePath) {
|
||||
const MAX_FILE_SIZE = 10 * 1024 * 1024;
|
||||
const ROOT_PATH = path.join(__dirname, '../../../');
|
||||
|
||||
try {
|
||||
console.log('Checking file access...');
|
||||
await fs.access(zipFilePath);
|
||||
|
||||
console.log('Getting file stats...');
|
||||
const stats = await fs.stat(zipFilePath);
|
||||
console.log('File size:', stats.size);
|
||||
|
||||
if (stats.size > MAX_FILE_SIZE) {
|
||||
console.log('File size exceeds limit');
|
||||
return { success: false, error: 'File size exceeds limit' };
|
||||
}
|
||||
|
||||
// Copying zip file to /tmp
|
||||
const tempZipPath = path.join('/tmp', path.basename(zipFilePath));
|
||||
await fs.copyFile(zipFilePath, tempZipPath);
|
||||
|
||||
// Launching background update process
|
||||
const servicesUpdate = (async () => {
|
||||
try {
|
||||
console.log('Stopping services...');
|
||||
await stopServices();
|
||||
|
||||
console.log('Creating zip instance...');
|
||||
const zip = new AdmZip(tempZipPath);
|
||||
|
||||
console.log('Extracting files to:', ROOT_PATH);
|
||||
zip.extractAllTo(ROOT_PATH, true);
|
||||
console.log('Files extracted');
|
||||
|
||||
const removedFilesPath = path.join(ROOT_PATH, 'removed_files.json');
|
||||
try {
|
||||
await fs.access(removedFilesPath);
|
||||
const removedFilesContent = await fs.readFile(removedFilesPath, 'utf8');
|
||||
const filesToRemove = JSON.parse(removedFilesContent);
|
||||
await removeFiles(filesToRemove, ROOT_PATH);
|
||||
|
||||
await fs.unlink(removedFilesPath);
|
||||
} catch (error) {
|
||||
console.log('No removed files to process or error accessing removed_files.json:', error);
|
||||
}
|
||||
|
||||
// Remove temp zip file
|
||||
await fs.unlink(tempZipPath);
|
||||
|
||||
// Start services after a delay
|
||||
setTimeout(() => {
|
||||
startServices()
|
||||
.then(() => console.log('Services started successfully'))
|
||||
.catch(e => console.error('Failed to start services:', e));
|
||||
}, 1000);
|
||||
} catch (error) {
|
||||
console.error('Error in service update process:', error);
|
||||
}
|
||||
})();
|
||||
|
||||
servicesUpdate.catch(error => {
|
||||
console.error('Background update process failed:', error);
|
||||
});
|
||||
|
||||
console.log('Returning immediate response');
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: 'Update process initiated'
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
console.error('Critical error in updateProjectFilesFromScheme:', error);
|
||||
return {
|
||||
success: false,
|
||||
error: error.message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
static async getDBSchema() {
|
||||
try {
|
||||
return await database.getDBSchema();
|
||||
} catch (error) {
|
||||
console.error('Error reading schema:', error);
|
||||
throw {
|
||||
error: error,
|
||||
message: error.message,
|
||||
details: error.details || error.stack
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
static async executeSQL(query) {
|
||||
try {
|
||||
return await database.executeSQL(query);
|
||||
} catch (error) {
|
||||
console.error('Error executing query:', error);
|
||||
throw {
|
||||
error: error,
|
||||
message: error.message,
|
||||
details: error.details || error.stack
|
||||
};
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
async function getDirectoryTree(dirPath) {
|
||||
const entries = await fs.readdir(dirPath, { withFileTypes: true });
|
||||
const result = {};
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(dirPath, entry.name);
|
||||
|
||||
if (entry.isDirectory() && (
|
||||
entry.name === 'node_modules' ||
|
||||
entry.name === 'app-shell' ||
|
||||
entry.name === '.git' ||
|
||||
entry.name === '.idea'
|
||||
)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const relativePath = fullPath.replace('/app', '');
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
const subTree = await getDirectoryTree(fullPath);
|
||||
Object.keys(subTree).forEach(key => {
|
||||
result[key.replace('/app', '')] = subTree[key];
|
||||
});
|
||||
} else {
|
||||
const fileContent = await fs.readFile(fullPath, 'utf8');
|
||||
const lineCount = fileContent.split('\n').length;
|
||||
result[relativePath] = lineCount;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
async function stopServices() {
|
||||
try {
|
||||
console.log('Finding service processes...');
|
||||
|
||||
// Frontend stopping
|
||||
const { stdout: frontendProcess } = await execAsync("ps -o pid,cmd | grep '[n]ext-server' | awk '{print $1}'");
|
||||
if (frontendProcess.trim()) {
|
||||
console.log('Stopping frontend, pid:', frontendProcess.trim());
|
||||
await execAsync(`kill -15 ${frontendProcess.trim()}`);
|
||||
}
|
||||
|
||||
// Backend stopping
|
||||
const { stdout: backendProcess } = await execAsync("ps -o pid,cmd | grep '[n]ode ./src/index.js' | grep -v app-shell | awk '{print $1}'");
|
||||
if (backendProcess.trim()) {
|
||||
console.log('Stopping backend, pid:', backendProcess.trim());
|
||||
await execAsync(`kill -15 ${backendProcess.trim()}`);
|
||||
}
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 2000));
|
||||
|
||||
return { success: true };
|
||||
} catch (error) {
|
||||
console.error('Error stopping services:', error);
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
}
|
||||
|
||||
async function startServices() {
|
||||
try {
|
||||
console.log('Starting services...');
|
||||
|
||||
await execAsync('yarn --cwd /app/frontend dev &');
|
||||
await execAsync('yarn --cwd /app/backend start &');
|
||||
|
||||
return { success: true };
|
||||
} catch (error) {
|
||||
console.error('Error starting services:', error);
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
}
|
||||
|
||||
async function checkStatus() {
|
||||
try {
|
||||
const { stdout } = await execAsync('ps aux');
|
||||
return {
|
||||
success: true,
|
||||
frontendRunning: stdout.includes('next-server'),
|
||||
backendRunning: stdout.includes('nodemon') && stdout.includes('/app/backend'),
|
||||
nginxRunning: stdout.includes('nginx: master process')
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
error: error.message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async function validateJSXSyntax(code) {
|
||||
// Define validation rules for JSX
|
||||
const rules = [
|
||||
{
|
||||
// JSX attribute with expression
|
||||
pattern: /^[a-zA-Z][a-zA-Z0-9]*={.*}$/,
|
||||
message: 'Invalid JSX attribute syntax'
|
||||
},
|
||||
{
|
||||
// Invalid sequences
|
||||
pattern: /,{2,}/,
|
||||
message: 'Invalid character sequence detected',
|
||||
shouldNotMatch: true
|
||||
},
|
||||
{
|
||||
// Ternary expressions
|
||||
pattern: /^[a-zA-Z][a-zA-Z0-9]*={[\w\s]+\?[^}]+:[^}]+}$/,
|
||||
message: 'Invalid ternary expression in JSX'
|
||||
}
|
||||
];
|
||||
|
||||
// Validate each line
|
||||
const lines = code.split('\n');
|
||||
for (const line of lines) {
|
||||
const trimmedLine = line.trim();
|
||||
|
||||
// Skip empty lines
|
||||
if (!trimmedLine) continue;
|
||||
|
||||
// Check each rule
|
||||
for (const rule of rules) {
|
||||
if (rule.shouldNotMatch) {
|
||||
// For patterns that should not be present
|
||||
if (rule.pattern.test(trimmedLine)) {
|
||||
return {
|
||||
valid: false,
|
||||
errors: [{
|
||||
code: 'JSX_SYNTAX_ERROR',
|
||||
severity: 'error',
|
||||
location: '',
|
||||
message: rule.message
|
||||
}]
|
||||
};
|
||||
}
|
||||
} else {
|
||||
// For patterns that should match
|
||||
if (trimmedLine.includes('=') && !rule.pattern.test(trimmedLine)) {
|
||||
return {
|
||||
valid: false,
|
||||
errors: [{
|
||||
code: 'JSX_SYNTAX_ERROR',
|
||||
severity: 'error',
|
||||
location: '',
|
||||
message: rule.message
|
||||
}]
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Additional JSX-specific checks
|
||||
if ((trimmedLine.match(/{/g) || []).length !== (trimmedLine.match(/}/g) || []).length) {
|
||||
return {
|
||||
valid: false,
|
||||
errors: [{
|
||||
code: 'JSX_SYNTAX_ERROR',
|
||||
severity: 'error',
|
||||
location: '',
|
||||
message: 'Unmatched curly braces in JSX'
|
||||
}]
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// If all checks pass
|
||||
return {
|
||||
valid: true,
|
||||
errors: []
|
||||
};
|
||||
}
|
||||
|
||||
async function removeFiles(files, rootPath) {
|
||||
try {
|
||||
for (const file of files) {
|
||||
const fullPath = path.join(rootPath, file);
|
||||
try {
|
||||
await fs.unlink(fullPath);
|
||||
console.log(`File removed: ${fullPath}`);
|
||||
} catch (error) {
|
||||
console.error(`Error when trying to delete a file ${fullPath}:`, error);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error removing files:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
16
app-shell/src/services/notifications/errors/forbidden.js
Normal file
16
app-shell/src/services/notifications/errors/forbidden.js
Normal file
@ -0,0 +1,16 @@
|
||||
const { getNotification, isNotification } = require('../helpers');
|
||||
|
||||
module.exports = class ForbiddenError extends Error {
|
||||
constructor(messageCode) {
|
||||
let message;
|
||||
|
||||
if (messageCode && isNotification(messageCode)) {
|
||||
message = getNotification(messageCode);
|
||||
}
|
||||
|
||||
message = message || getNotification('errors.forbidden.message');
|
||||
|
||||
super(message);
|
||||
this.code = 403;
|
||||
}
|
||||
};
|
||||
16
app-shell/src/services/notifications/errors/validation.js
Normal file
16
app-shell/src/services/notifications/errors/validation.js
Normal file
@ -0,0 +1,16 @@
|
||||
const { getNotification, isNotification } = require('../helpers');
|
||||
|
||||
module.exports = class ValidationError extends Error {
|
||||
constructor(messageCode) {
|
||||
let message;
|
||||
|
||||
if (messageCode && isNotification(messageCode)) {
|
||||
message = getNotification(messageCode);
|
||||
}
|
||||
|
||||
message = message || getNotification('errors.validation.message');
|
||||
|
||||
super(message);
|
||||
this.code = 400;
|
||||
}
|
||||
};
|
||||
30
app-shell/src/services/notifications/helpers.js
Normal file
30
app-shell/src/services/notifications/helpers.js
Normal file
@ -0,0 +1,30 @@
|
||||
const _get = require('lodash/get');
|
||||
const errors = require('./list');
|
||||
|
||||
function format(message, args) {
|
||||
if (!message) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return message.replace(/{(\d+)}/g, function (match, number) {
|
||||
return typeof args[number] != 'undefined' ? args[number] : match;
|
||||
});
|
||||
}
|
||||
|
||||
const isNotification = (key) => {
|
||||
const message = _get(errors, key);
|
||||
return !!message;
|
||||
};
|
||||
|
||||
const getNotification = (key, ...args) => {
|
||||
const message = _get(errors, key);
|
||||
|
||||
if (!message) {
|
||||
return key;
|
||||
}
|
||||
|
||||
return format(message, args);
|
||||
};
|
||||
|
||||
exports.getNotification = getNotification;
|
||||
exports.isNotification = isNotification;
|
||||
100
app-shell/src/services/notifications/list.js
Normal file
100
app-shell/src/services/notifications/list.js
Normal file
@ -0,0 +1,100 @@
|
||||
const errors = {
|
||||
app: {
|
||||
title: 'test',
|
||||
},
|
||||
|
||||
auth: {
|
||||
userDisabled: 'Your account is disabled',
|
||||
forbidden: 'Forbidden',
|
||||
unauthorized: 'Unauthorized',
|
||||
userNotFound: `Sorry, we don't recognize your credentials`,
|
||||
wrongPassword: `Sorry, we don't recognize your credentials`,
|
||||
weakPassword: 'This password is too weak',
|
||||
emailAlreadyInUse: 'Email is already in use',
|
||||
invalidEmail: 'Please provide a valid email',
|
||||
passwordReset: {
|
||||
invalidToken: 'Password reset link is invalid or has expired',
|
||||
error: `Email not recognized`,
|
||||
},
|
||||
passwordUpdate: {
|
||||
samePassword: `You can't use the same password. Please create new password`,
|
||||
},
|
||||
userNotVerified: `Sorry, your email has not been verified yet`,
|
||||
emailAddressVerificationEmail: {
|
||||
invalidToken: 'Email verification link is invalid or has expired',
|
||||
error: `Email not recognized`,
|
||||
},
|
||||
},
|
||||
|
||||
iam: {
|
||||
errors: {
|
||||
userAlreadyExists: 'User with this email already exists',
|
||||
userNotFound: 'User not found',
|
||||
disablingHimself: `You can't disable yourself`,
|
||||
revokingOwnPermission: `You can't revoke your own owner permission`,
|
||||
deletingHimself: `You can't delete yourself`,
|
||||
emailRequired: 'Email is required',
|
||||
},
|
||||
},
|
||||
|
||||
importer: {
|
||||
errors: {
|
||||
invalidFileEmpty: 'The file is empty',
|
||||
invalidFileExcel: 'Only excel (.xlsx) files are allowed',
|
||||
invalidFileUpload:
|
||||
'Invalid file. Make sure you are using the last version of the template.',
|
||||
importHashRequired: 'Import hash is required',
|
||||
importHashExistent: 'Data has already been imported',
|
||||
userEmailMissing: 'Some items in the CSV do not have an email',
|
||||
},
|
||||
},
|
||||
|
||||
errors: {
|
||||
forbidden: {
|
||||
message: 'Forbidden',
|
||||
},
|
||||
validation: {
|
||||
message: 'An error occurred',
|
||||
},
|
||||
searchQueryRequired: {
|
||||
message: 'Search query is required',
|
||||
},
|
||||
},
|
||||
|
||||
emails: {
|
||||
invitation: {
|
||||
subject: `You've been invited to {0}`,
|
||||
body: `
|
||||
<p>Hello,</p>
|
||||
<p>You've been invited to {0} set password for your {1} account.</p>
|
||||
<p><a href='{2}'>{2}</a></p>
|
||||
<p>Thanks,</p>
|
||||
<p>Your {0} team</p>
|
||||
`,
|
||||
},
|
||||
emailAddressVerification: {
|
||||
subject: `Verify your email for {0}`,
|
||||
body: `
|
||||
<p>Hello,</p>
|
||||
<p>Follow this link to verify your email address.</p>
|
||||
<p><a href='{0}'>{0}</a></p>
|
||||
<p>If you didn't ask to verify this address, you can ignore this email.</p>
|
||||
<p>Thanks,</p>
|
||||
<p>Your {1} team</p>
|
||||
`,
|
||||
},
|
||||
passwordReset: {
|
||||
subject: `Reset your password for {0}`,
|
||||
body: `
|
||||
<p>Hello,</p>
|
||||
<p>Follow this link to reset your {0} password for your {1} account.</p>
|
||||
<p><a href='{2}'>{2}</a></p>
|
||||
<p>If you didn't ask to reset your password, you can ignore this email.</p>
|
||||
<p>Thanks,</p>
|
||||
<p>Your {0} team</p>
|
||||
`,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = errors;
|
||||
1002
app-shell/src/services/vcs.js
Normal file
1002
app-shell/src/services/vcs.js
Normal file
File diff suppressed because it is too large
Load Diff
3044
app-shell/yarn.lock
Normal file
3044
app-shell/yarn.lock
Normal file
File diff suppressed because it is too large
Load Diff
11
backend/.prettierrc
Normal file
11
backend/.prettierrc
Normal file
@ -0,0 +1,11 @@
|
||||
{
|
||||
"singleQuote": true,
|
||||
"tabWidth": 2,
|
||||
"printWidth": 80,
|
||||
"trailingComma": "all",
|
||||
"quoteProps": "as-needed",
|
||||
"jsxSingleQuote": true,
|
||||
"bracketSpacing": true,
|
||||
"bracketSameLine": false,
|
||||
"arrowParens": "always"
|
||||
}
|
||||
7
backend/.sequelizerc
Normal file
7
backend/.sequelizerc
Normal file
@ -0,0 +1,7 @@
|
||||
const path = require('path');
|
||||
module.exports = {
|
||||
"config": path.resolve("src", "db", "db.config.js"),
|
||||
"models-path": path.resolve("src", "db", "models"),
|
||||
"seeders-path": path.resolve("src", "db", "seeders"),
|
||||
"migrations-path": path.resolve("src", "db", "migrations")
|
||||
};
|
||||
23
backend/Dockerfile
Normal file
23
backend/Dockerfile
Normal file
@ -0,0 +1,23 @@
|
||||
FROM node:20.15.1-alpine
|
||||
|
||||
RUN apk update && apk add bash
|
||||
# Create app directory
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# Install app dependencies
|
||||
# A wildcard is used to ensure both package.json AND package-lock.json are copied
|
||||
# where available (npm@5+)
|
||||
COPY package*.json ./
|
||||
|
||||
RUN yarn install
|
||||
# If you are building your code for production
|
||||
# RUN npm ci --only=production
|
||||
|
||||
|
||||
# Bundle app source
|
||||
COPY . .
|
||||
|
||||
|
||||
EXPOSE 8080
|
||||
|
||||
CMD [ "yarn", "start" ]
|
||||
67
backend/README.md
Normal file
67
backend/README.md
Normal file
@ -0,0 +1,67 @@
|
||||
#i want to create a website for pdf & image convert - template backend,
|
||||
|
||||
#### Run App on local machine:
|
||||
|
||||
##### Install local dependencies:
|
||||
|
||||
- `yarn install`
|
||||
|
||||
---
|
||||
|
||||
##### Adjust local db:
|
||||
|
||||
###### 1. Install postgres:
|
||||
|
||||
- MacOS:
|
||||
|
||||
- `brew install postgres`
|
||||
|
||||
- Ubuntu:
|
||||
- `sudo apt update`
|
||||
- `sudo apt install postgresql postgresql-contrib`
|
||||
|
||||
###### 2. Create db and admin user:
|
||||
|
||||
- Before run and test connection, make sure you have created a database as described in the above configuration. You can use the `psql` command to create a user and database.
|
||||
|
||||
- `psql postgres --u postgres`
|
||||
|
||||
- Next, type this command for creating a new user with password then give access for creating the database.
|
||||
|
||||
- `postgres-# CREATE ROLE admin WITH LOGIN PASSWORD 'admin_pass';`
|
||||
- `postgres-# ALTER ROLE admin CREATEDB;`
|
||||
|
||||
- Quit `psql` then log in again using the new user that previously created.
|
||||
|
||||
- `postgres-# \q`
|
||||
- `psql postgres -U admin`
|
||||
|
||||
- Type this command to creating a new database.
|
||||
|
||||
- `postgres=> CREATE DATABASE db_i_want_to_create_a_website_for_pdf___image_convert;`
|
||||
|
||||
- Then give that new user privileges to the new database then quit the `psql`.
|
||||
- `postgres=> GRANT ALL PRIVILEGES ON DATABASE db_i_want_to_create_a_website_for_pdf___image_convert TO admin;`
|
||||
- `postgres=> \q`
|
||||
|
||||
---
|
||||
|
||||
#### Api Documentation (Swagger)
|
||||
|
||||
http://localhost:8080/api-docs (local host)
|
||||
|
||||
http://host_name/api-docs
|
||||
|
||||
---
|
||||
|
||||
##### Setup database tables or update after schema change
|
||||
|
||||
- `yarn db:migrate`
|
||||
|
||||
##### Seed the initial data (admin accounts, relevant for the first setup):
|
||||
|
||||
- `yarn db:seed`
|
||||
|
||||
##### Start build:
|
||||
|
||||
- `yarn start`
|
||||
51
backend/package.json
Normal file
51
backend/package.json
Normal file
@ -0,0 +1,51 @@
|
||||
{
|
||||
"name": "iwanttocreateawebsiteforpdfimageconvert",
|
||||
"description": "i want to create a website for pdf & image convert - template backend",
|
||||
"scripts": {
|
||||
"start": "npm run db:migrate && npm run db:seed && nodemon ./src/index.js",
|
||||
"db:migrate": "sequelize-cli db:migrate",
|
||||
"db:seed": "sequelize-cli db:seed:all",
|
||||
"db:drop": "sequelize-cli db:drop",
|
||||
"db:create": "sequelize-cli db:create"
|
||||
},
|
||||
"dependencies": {
|
||||
"@google-cloud/storage": "^5.18.2",
|
||||
"axios": "^1.6.7",
|
||||
"bcrypt": "5.1.1",
|
||||
"cors": "2.8.5",
|
||||
"csv-parser": "^3.0.0",
|
||||
"express": "4.18.2",
|
||||
"formidable": "1.2.2",
|
||||
"helmet": "4.1.1",
|
||||
"json2csv": "^5.0.7",
|
||||
"jsonwebtoken": "8.5.1",
|
||||
"lodash": "4.17.21",
|
||||
"moment": "2.30.1",
|
||||
"multer": "^1.4.4",
|
||||
"mysql2": "2.2.5",
|
||||
"nodemailer": "6.9.9",
|
||||
"passport": "^0.7.0",
|
||||
"passport-google-oauth2": "^0.2.0",
|
||||
"passport-jwt": "^4.0.1",
|
||||
"passport-microsoft": "^0.1.0",
|
||||
"pg": "8.4.1",
|
||||
"pg-hstore": "2.3.4",
|
||||
"sequelize": "6.35.2",
|
||||
"sequelize-json-schema": "^2.1.1",
|
||||
"sqlite": "4.0.15",
|
||||
"swagger-jsdoc": "^6.2.8",
|
||||
"swagger-ui-express": "^5.0.0",
|
||||
"tedious": "^18.2.4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
},
|
||||
"private": true,
|
||||
"devDependencies": {
|
||||
"cross-env": "7.0.3",
|
||||
"mocha": "8.1.3",
|
||||
"node-mocks-http": "1.9.0",
|
||||
"nodemon": "2.0.5",
|
||||
"sequelize-cli": "6.6.2"
|
||||
}
|
||||
}
|
||||
79
backend/src/auth/auth.js
Normal file
79
backend/src/auth/auth.js
Normal file
@ -0,0 +1,79 @@
|
||||
const config = require('../config');
|
||||
const providers = config.providers;
|
||||
const helpers = require('../helpers');
|
||||
const db = require('../db/models');
|
||||
|
||||
const passport = require('passport');
|
||||
const JWTstrategy = require('passport-jwt').Strategy;
|
||||
const ExtractJWT = require('passport-jwt').ExtractJwt;
|
||||
const GoogleStrategy = require('passport-google-oauth2').Strategy;
|
||||
const MicrosoftStrategy = require('passport-microsoft').Strategy;
|
||||
const UsersDBApi = require('../db/api/users');
|
||||
|
||||
passport.use(
|
||||
new JWTstrategy(
|
||||
{
|
||||
passReqToCallback: true,
|
||||
secretOrKey: config.secret_key,
|
||||
jwtFromRequest: ExtractJWT.fromAuthHeaderAsBearerToken(),
|
||||
},
|
||||
async (req, token, done) => {
|
||||
try {
|
||||
const user = await UsersDBApi.findBy({ email: token.user.email });
|
||||
|
||||
if (user && user.disabled) {
|
||||
return done(new Error(`User '${user.email}' is disabled`));
|
||||
}
|
||||
|
||||
req.currentUser = user;
|
||||
|
||||
return done(null, user);
|
||||
} catch (error) {
|
||||
done(error);
|
||||
}
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
passport.use(
|
||||
new GoogleStrategy(
|
||||
{
|
||||
clientID: config.google.clientId,
|
||||
clientSecret: config.google.clientSecret,
|
||||
callbackURL: config.apiUrl + '/auth/signin/google/callback',
|
||||
passReqToCallback: true,
|
||||
},
|
||||
function (request, accessToken, refreshToken, profile, done) {
|
||||
socialStrategy(profile.email, profile, providers.GOOGLE, done);
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
passport.use(
|
||||
new MicrosoftStrategy(
|
||||
{
|
||||
clientID: config.microsoft.clientId,
|
||||
clientSecret: config.microsoft.clientSecret,
|
||||
callbackURL: config.apiUrl + '/auth/signin/microsoft/callback',
|
||||
passReqToCallback: true,
|
||||
},
|
||||
function (request, accessToken, refreshToken, profile, done) {
|
||||
const email = profile._json.mail || profile._json.userPrincipalName;
|
||||
socialStrategy(email, profile, providers.MICROSOFT, done);
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
function socialStrategy(email, profile, provider, done) {
|
||||
db.users
|
||||
.findOrCreate({ where: { email, provider } })
|
||||
.then(([user, created]) => {
|
||||
const body = {
|
||||
id: user.id,
|
||||
email: user.email,
|
||||
name: profile.displayName,
|
||||
};
|
||||
const token = helpers.jwtSign({ user: body });
|
||||
return done(null, { token });
|
||||
});
|
||||
}
|
||||
71
backend/src/config.js
Normal file
71
backend/src/config.js
Normal file
@ -0,0 +1,71 @@
|
||||
const os = require('os');
|
||||
|
||||
const config = {
|
||||
gcloud: {
|
||||
bucket: 'fldemo-files',
|
||||
hash: '76d6b303a93c474b60ddd73dbaab06ad',
|
||||
},
|
||||
bcrypt: {
|
||||
saltRounds: 12,
|
||||
},
|
||||
admin_pass: 'password',
|
||||
admin_email: 'admin@flatlogic.com',
|
||||
providers: {
|
||||
LOCAL: 'local',
|
||||
GOOGLE: 'google',
|
||||
MICROSOFT: 'microsoft',
|
||||
},
|
||||
secret_key: 'HUEyqESqgQ1yTwzVlO6wprC9Kf1J1xuA',
|
||||
remote: '',
|
||||
port: process.env.NODE_ENV === 'production' ? '' : '8080',
|
||||
hostUI: process.env.NODE_ENV === 'production' ? '' : 'http://localhost',
|
||||
portUI: process.env.NODE_ENV === 'production' ? '' : '3000',
|
||||
|
||||
portUIProd: process.env.NODE_ENV === 'production' ? '' : ':3000',
|
||||
|
||||
swaggerUI: process.env.NODE_ENV === 'production' ? '' : 'http://localhost',
|
||||
swaggerPort: process.env.NODE_ENV === 'production' ? '' : ':8080',
|
||||
google: {
|
||||
clientId:
|
||||
'671001533244-kf1k1gmp6mnl0r030qmvdu6v36ghmim6.apps.googleusercontent.com',
|
||||
clientSecret: 'Yo4qbKZniqvojzUQ60iKlxqR',
|
||||
},
|
||||
microsoft: {
|
||||
clientId: '4696f457-31af-40de-897c-e00d7d4cff73',
|
||||
clientSecret: 'm8jzZ.5UpHF3=-dXzyxiZ4e[F8OF54@p',
|
||||
},
|
||||
uploadDir: os.tmpdir(),
|
||||
email: {
|
||||
from: 'i want to create a website for pdf & image convert <app@flatlogic.app>',
|
||||
host: 'email-smtp.us-east-1.amazonaws.com',
|
||||
port: 587,
|
||||
auth: {
|
||||
user: 'AKIAVEW7G4PQUBGM52OF',
|
||||
pass: process.env.EMAIL_PASS,
|
||||
},
|
||||
tls: {
|
||||
rejectUnauthorized: false,
|
||||
},
|
||||
},
|
||||
roles: {
|
||||
admin: 'Administrator',
|
||||
user: 'User',
|
||||
},
|
||||
|
||||
project_uuid: '0fdecac5-300d-420e-8b10-78ac65cdc9f4',
|
||||
flHost:
|
||||
process.env.NODE_ENV === 'production' ||
|
||||
process.env.NODE_ENV === 'dev_stage'
|
||||
? 'https://flatlogic.com/projects'
|
||||
: 'http://localhost:3000/projects',
|
||||
};
|
||||
config.pexelsKey = 'Vc99rnmOhHhJAbgGQoKLZtsaIVfkeownoQNbTj78VemUjKh08ZYRbf18';
|
||||
config.pexelsQuery = 'abstract digital transformation';
|
||||
config.host =
|
||||
process.env.NODE_ENV === 'production' ? config.remote : 'http://localhost';
|
||||
config.apiUrl = `${config.host}${config.port ? `:${config.port}` : ``}/api`;
|
||||
config.swaggerUrl = `${config.swaggerUI}${config.swaggerPort}`;
|
||||
config.uiUrl = `${config.hostUI}${config.portUI ? `:${config.portUI}` : ``}/#`;
|
||||
config.backUrl = `${config.hostUI}${config.portUI ? `:${config.portUI}` : ``}`;
|
||||
|
||||
module.exports = config;
|
||||
336
backend/src/db/api/conversions.js
Normal file
336
backend/src/db/api/conversions.js
Normal file
@ -0,0 +1,336 @@
|
||||
const db = require('../models');
|
||||
const FileDBApi = require('./file');
|
||||
const crypto = require('crypto');
|
||||
const Utils = require('../utils');
|
||||
|
||||
const Sequelize = db.Sequelize;
|
||||
const Op = Sequelize.Op;
|
||||
|
||||
module.exports = class ConversionsDBApi {
|
||||
static async create(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const conversions = await db.conversions.create(
|
||||
{
|
||||
id: data.id || undefined,
|
||||
|
||||
file_name: data.file_name || null,
|
||||
conversion_type: data.conversion_type || null,
|
||||
conversion_date: data.conversion_date || null,
|
||||
importHash: data.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await conversions.setUser(data.user || null, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
return conversions;
|
||||
}
|
||||
|
||||
static async bulkImport(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
// Prepare data - wrapping individual data transformations in a map() method
|
||||
const conversionsData = data.map((item, index) => ({
|
||||
id: item.id || undefined,
|
||||
|
||||
file_name: item.file_name || null,
|
||||
conversion_type: item.conversion_type || null,
|
||||
conversion_date: item.conversion_date || null,
|
||||
importHash: item.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
createdAt: new Date(Date.now() + index * 1000),
|
||||
}));
|
||||
|
||||
// Bulk create items
|
||||
const conversions = await db.conversions.bulkCreate(conversionsData, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
// For each item created, replace relation files
|
||||
|
||||
return conversions;
|
||||
}
|
||||
|
||||
static async update(id, data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const conversions = await db.conversions.findByPk(id, {}, { transaction });
|
||||
|
||||
const updatePayload = {};
|
||||
|
||||
if (data.file_name !== undefined) updatePayload.file_name = data.file_name;
|
||||
|
||||
if (data.conversion_type !== undefined)
|
||||
updatePayload.conversion_type = data.conversion_type;
|
||||
|
||||
if (data.conversion_date !== undefined)
|
||||
updatePayload.conversion_date = data.conversion_date;
|
||||
|
||||
updatePayload.updatedById = currentUser.id;
|
||||
|
||||
await conversions.update(updatePayload, { transaction });
|
||||
|
||||
if (data.user !== undefined) {
|
||||
await conversions.setUser(
|
||||
data.user,
|
||||
|
||||
{ transaction },
|
||||
);
|
||||
}
|
||||
|
||||
return conversions;
|
||||
}
|
||||
|
||||
static async deleteByIds(ids, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const conversions = await db.conversions.findAll({
|
||||
where: {
|
||||
id: {
|
||||
[Op.in]: ids,
|
||||
},
|
||||
},
|
||||
transaction,
|
||||
});
|
||||
|
||||
await db.sequelize.transaction(async (transaction) => {
|
||||
for (const record of conversions) {
|
||||
await record.update({ deletedBy: currentUser.id }, { transaction });
|
||||
}
|
||||
for (const record of conversions) {
|
||||
await record.destroy({ transaction });
|
||||
}
|
||||
});
|
||||
|
||||
return conversions;
|
||||
}
|
||||
|
||||
static async remove(id, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const conversions = await db.conversions.findByPk(id, options);
|
||||
|
||||
await conversions.update(
|
||||
{
|
||||
deletedBy: currentUser.id,
|
||||
},
|
||||
{
|
||||
transaction,
|
||||
},
|
||||
);
|
||||
|
||||
await conversions.destroy({
|
||||
transaction,
|
||||
});
|
||||
|
||||
return conversions;
|
||||
}
|
||||
|
||||
static async findBy(where, options) {
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const conversions = await db.conversions.findOne(
|
||||
{ where },
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
if (!conversions) {
|
||||
return conversions;
|
||||
}
|
||||
|
||||
const output = conversions.get({ plain: true });
|
||||
|
||||
output.user = await conversions.getUser({
|
||||
transaction,
|
||||
});
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
static async findAll(filter, options) {
|
||||
const limit = filter.limit || 0;
|
||||
let offset = 0;
|
||||
let where = {};
|
||||
const currentPage = +filter.page;
|
||||
|
||||
offset = currentPage * limit;
|
||||
|
||||
const orderBy = null;
|
||||
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
let include = [
|
||||
{
|
||||
model: db.users,
|
||||
as: 'user',
|
||||
|
||||
where: filter.user
|
||||
? {
|
||||
[Op.or]: [
|
||||
{
|
||||
id: {
|
||||
[Op.in]: filter.user
|
||||
.split('|')
|
||||
.map((term) => Utils.uuid(term)),
|
||||
},
|
||||
},
|
||||
{
|
||||
firstName: {
|
||||
[Op.or]: filter.user
|
||||
.split('|')
|
||||
.map((term) => ({ [Op.iLike]: `%${term}%` })),
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
: {},
|
||||
},
|
||||
];
|
||||
|
||||
if (filter) {
|
||||
if (filter.id) {
|
||||
where = {
|
||||
...where,
|
||||
['id']: Utils.uuid(filter.id),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.file_name) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike('conversions', 'file_name', filter.file_name),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.conversion_dateRange) {
|
||||
const [start, end] = filter.conversion_dateRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
conversion_date: {
|
||||
...where.conversion_date,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
conversion_date: {
|
||||
...where.conversion_date,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.active !== undefined) {
|
||||
where = {
|
||||
...where,
|
||||
active: filter.active === true || filter.active === 'true',
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.conversion_type) {
|
||||
where = {
|
||||
...where,
|
||||
conversion_type: filter.conversion_type,
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.createdAtRange) {
|
||||
const [start, end] = filter.createdAtRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const queryOptions = {
|
||||
where,
|
||||
include,
|
||||
distinct: true,
|
||||
order:
|
||||
filter.field && filter.sort
|
||||
? [[filter.field, filter.sort]]
|
||||
: [['createdAt', 'desc']],
|
||||
transaction: options?.transaction,
|
||||
logging: console.log,
|
||||
};
|
||||
|
||||
if (!options?.countOnly) {
|
||||
queryOptions.limit = limit ? Number(limit) : undefined;
|
||||
queryOptions.offset = offset ? Number(offset) : undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
const { rows, count } = await db.conversions.findAndCountAll(
|
||||
queryOptions,
|
||||
);
|
||||
|
||||
return {
|
||||
rows: options?.countOnly ? [] : rows,
|
||||
count: count,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error executing query:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async findAllAutocomplete(query, limit, offset) {
|
||||
let where = {};
|
||||
|
||||
if (query) {
|
||||
where = {
|
||||
[Op.or]: [
|
||||
{ ['id']: Utils.uuid(query) },
|
||||
Utils.ilike('conversions', 'file_name', query),
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
const records = await db.conversions.findAll({
|
||||
attributes: ['id', 'file_name'],
|
||||
where,
|
||||
limit: limit ? Number(limit) : undefined,
|
||||
offset: offset ? Number(offset) : undefined,
|
||||
orderBy: [['file_name', 'ASC']],
|
||||
});
|
||||
|
||||
return records.map((record) => ({
|
||||
id: record.id,
|
||||
label: record.file_name,
|
||||
}));
|
||||
}
|
||||
};
|
||||
73
backend/src/db/api/file.js
Normal file
73
backend/src/db/api/file.js
Normal file
@ -0,0 +1,73 @@
|
||||
const db = require('../models');
|
||||
const assert = require('assert');
|
||||
const services = require('../../services/file');
|
||||
|
||||
module.exports = class FileDBApi {
|
||||
static async replaceRelationFiles(relation, rawFiles, options) {
|
||||
assert(relation.belongsTo, 'belongsTo is required');
|
||||
assert(relation.belongsToColumn, 'belongsToColumn is required');
|
||||
assert(relation.belongsToId, 'belongsToId is required');
|
||||
|
||||
let files = [];
|
||||
|
||||
if (Array.isArray(rawFiles)) {
|
||||
files = rawFiles;
|
||||
} else {
|
||||
files = rawFiles ? [rawFiles] : [];
|
||||
}
|
||||
|
||||
await this._removeLegacyFiles(relation, files, options);
|
||||
await this._addFiles(relation, files, options);
|
||||
}
|
||||
|
||||
static async _addFiles(relation, files, options) {
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
|
||||
const inexistentFiles = files.filter((file) => !!file.new);
|
||||
|
||||
for (const file of inexistentFiles) {
|
||||
await db.file.create(
|
||||
{
|
||||
belongsTo: relation.belongsTo,
|
||||
belongsToColumn: relation.belongsToColumn,
|
||||
belongsToId: relation.belongsToId,
|
||||
name: file.name,
|
||||
sizeInBytes: file.sizeInBytes,
|
||||
privateUrl: file.privateUrl,
|
||||
publicUrl: file.publicUrl,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
},
|
||||
{
|
||||
transaction,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
static async _removeLegacyFiles(relation, files, options) {
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const filesToDelete = await db.file.findAll({
|
||||
where: {
|
||||
belongsTo: relation.belongsTo,
|
||||
belongsToId: relation.belongsToId,
|
||||
belongsToColumn: relation.belongsToColumn,
|
||||
id: {
|
||||
[db.Sequelize.Op.notIn]: files
|
||||
.filter((file) => !file.new)
|
||||
.map((file) => file.id),
|
||||
},
|
||||
},
|
||||
transaction,
|
||||
});
|
||||
|
||||
for (let file of filesToDelete) {
|
||||
await services.deleteGCloud(file.privateUrl);
|
||||
await file.destroy({
|
||||
transaction,
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
358
backend/src/db/api/files.js
Normal file
358
backend/src/db/api/files.js
Normal file
@ -0,0 +1,358 @@
|
||||
const db = require('../models');
|
||||
const FileDBApi = require('./file');
|
||||
const crypto = require('crypto');
|
||||
const Utils = require('../utils');
|
||||
|
||||
const Sequelize = db.Sequelize;
|
||||
const Op = Sequelize.Op;
|
||||
|
||||
module.exports = class FilesDBApi {
|
||||
static async create(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const files = await db.files.create(
|
||||
{
|
||||
id: data.id || undefined,
|
||||
|
||||
file_name: data.file_name || null,
|
||||
upload_date: data.upload_date || null,
|
||||
importHash: data.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await files.setUser(data.user || null, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await FileDBApi.replaceRelationFiles(
|
||||
{
|
||||
belongsTo: db.files.getTableName(),
|
||||
belongsToColumn: 'file_data',
|
||||
belongsToId: files.id,
|
||||
},
|
||||
data.file_data,
|
||||
options,
|
||||
);
|
||||
|
||||
return files;
|
||||
}
|
||||
|
||||
static async bulkImport(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
// Prepare data - wrapping individual data transformations in a map() method
|
||||
const filesData = data.map((item, index) => ({
|
||||
id: item.id || undefined,
|
||||
|
||||
file_name: item.file_name || null,
|
||||
upload_date: item.upload_date || null,
|
||||
importHash: item.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
createdAt: new Date(Date.now() + index * 1000),
|
||||
}));
|
||||
|
||||
// Bulk create items
|
||||
const files = await db.files.bulkCreate(filesData, { transaction });
|
||||
|
||||
// For each item created, replace relation files
|
||||
|
||||
for (let i = 0; i < files.length; i++) {
|
||||
await FileDBApi.replaceRelationFiles(
|
||||
{
|
||||
belongsTo: db.files.getTableName(),
|
||||
belongsToColumn: 'file_data',
|
||||
belongsToId: files[i].id,
|
||||
},
|
||||
data[i].file_data,
|
||||
options,
|
||||
);
|
||||
}
|
||||
|
||||
return files;
|
||||
}
|
||||
|
||||
static async update(id, data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const files = await db.files.findByPk(id, {}, { transaction });
|
||||
|
||||
const updatePayload = {};
|
||||
|
||||
if (data.file_name !== undefined) updatePayload.file_name = data.file_name;
|
||||
|
||||
if (data.upload_date !== undefined)
|
||||
updatePayload.upload_date = data.upload_date;
|
||||
|
||||
updatePayload.updatedById = currentUser.id;
|
||||
|
||||
await files.update(updatePayload, { transaction });
|
||||
|
||||
if (data.user !== undefined) {
|
||||
await files.setUser(
|
||||
data.user,
|
||||
|
||||
{ transaction },
|
||||
);
|
||||
}
|
||||
|
||||
await FileDBApi.replaceRelationFiles(
|
||||
{
|
||||
belongsTo: db.files.getTableName(),
|
||||
belongsToColumn: 'file_data',
|
||||
belongsToId: files.id,
|
||||
},
|
||||
data.file_data,
|
||||
options,
|
||||
);
|
||||
|
||||
return files;
|
||||
}
|
||||
|
||||
static async deleteByIds(ids, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const files = await db.files.findAll({
|
||||
where: {
|
||||
id: {
|
||||
[Op.in]: ids,
|
||||
},
|
||||
},
|
||||
transaction,
|
||||
});
|
||||
|
||||
await db.sequelize.transaction(async (transaction) => {
|
||||
for (const record of files) {
|
||||
await record.update({ deletedBy: currentUser.id }, { transaction });
|
||||
}
|
||||
for (const record of files) {
|
||||
await record.destroy({ transaction });
|
||||
}
|
||||
});
|
||||
|
||||
return files;
|
||||
}
|
||||
|
||||
static async remove(id, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const files = await db.files.findByPk(id, options);
|
||||
|
||||
await files.update(
|
||||
{
|
||||
deletedBy: currentUser.id,
|
||||
},
|
||||
{
|
||||
transaction,
|
||||
},
|
||||
);
|
||||
|
||||
await files.destroy({
|
||||
transaction,
|
||||
});
|
||||
|
||||
return files;
|
||||
}
|
||||
|
||||
static async findBy(where, options) {
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const files = await db.files.findOne({ where }, { transaction });
|
||||
|
||||
if (!files) {
|
||||
return files;
|
||||
}
|
||||
|
||||
const output = files.get({ plain: true });
|
||||
|
||||
output.file_data = await files.getFile_data({
|
||||
transaction,
|
||||
});
|
||||
|
||||
output.user = await files.getUser({
|
||||
transaction,
|
||||
});
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
static async findAll(filter, options) {
|
||||
const limit = filter.limit || 0;
|
||||
let offset = 0;
|
||||
let where = {};
|
||||
const currentPage = +filter.page;
|
||||
|
||||
offset = currentPage * limit;
|
||||
|
||||
const orderBy = null;
|
||||
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
let include = [
|
||||
{
|
||||
model: db.users,
|
||||
as: 'user',
|
||||
|
||||
where: filter.user
|
||||
? {
|
||||
[Op.or]: [
|
||||
{
|
||||
id: {
|
||||
[Op.in]: filter.user
|
||||
.split('|')
|
||||
.map((term) => Utils.uuid(term)),
|
||||
},
|
||||
},
|
||||
{
|
||||
firstName: {
|
||||
[Op.or]: filter.user
|
||||
.split('|')
|
||||
.map((term) => ({ [Op.iLike]: `%${term}%` })),
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
: {},
|
||||
},
|
||||
|
||||
{
|
||||
model: db.file,
|
||||
as: 'file_data',
|
||||
},
|
||||
];
|
||||
|
||||
if (filter) {
|
||||
if (filter.id) {
|
||||
where = {
|
||||
...where,
|
||||
['id']: Utils.uuid(filter.id),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.file_name) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike('files', 'file_name', filter.file_name),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.upload_dateRange) {
|
||||
const [start, end] = filter.upload_dateRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
upload_date: {
|
||||
...where.upload_date,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
upload_date: {
|
||||
...where.upload_date,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.active !== undefined) {
|
||||
where = {
|
||||
...where,
|
||||
active: filter.active === true || filter.active === 'true',
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.createdAtRange) {
|
||||
const [start, end] = filter.createdAtRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const queryOptions = {
|
||||
where,
|
||||
include,
|
||||
distinct: true,
|
||||
order:
|
||||
filter.field && filter.sort
|
||||
? [[filter.field, filter.sort]]
|
||||
: [['createdAt', 'desc']],
|
||||
transaction: options?.transaction,
|
||||
logging: console.log,
|
||||
};
|
||||
|
||||
if (!options?.countOnly) {
|
||||
queryOptions.limit = limit ? Number(limit) : undefined;
|
||||
queryOptions.offset = offset ? Number(offset) : undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
const { rows, count } = await db.files.findAndCountAll(queryOptions);
|
||||
|
||||
return {
|
||||
rows: options?.countOnly ? [] : rows,
|
||||
count: count,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error executing query:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async findAllAutocomplete(query, limit, offset) {
|
||||
let where = {};
|
||||
|
||||
if (query) {
|
||||
where = {
|
||||
[Op.or]: [
|
||||
{ ['id']: Utils.uuid(query) },
|
||||
Utils.ilike('files', 'file_name', query),
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
const records = await db.files.findAll({
|
||||
attributes: ['id', 'file_name'],
|
||||
where,
|
||||
limit: limit ? Number(limit) : undefined,
|
||||
offset: offset ? Number(offset) : undefined,
|
||||
orderBy: [['file_name', 'ASC']],
|
||||
});
|
||||
|
||||
return records.map((record) => ({
|
||||
id: record.id,
|
||||
label: record.file_name,
|
||||
}));
|
||||
}
|
||||
};
|
||||
253
backend/src/db/api/permissions.js
Normal file
253
backend/src/db/api/permissions.js
Normal file
@ -0,0 +1,253 @@
|
||||
const db = require('../models');
|
||||
const FileDBApi = require('./file');
|
||||
const crypto = require('crypto');
|
||||
const Utils = require('../utils');
|
||||
|
||||
const Sequelize = db.Sequelize;
|
||||
const Op = Sequelize.Op;
|
||||
|
||||
module.exports = class PermissionsDBApi {
|
||||
static async create(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const permissions = await db.permissions.create(
|
||||
{
|
||||
id: data.id || undefined,
|
||||
|
||||
name: data.name || null,
|
||||
importHash: data.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
return permissions;
|
||||
}
|
||||
|
||||
static async bulkImport(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
// Prepare data - wrapping individual data transformations in a map() method
|
||||
const permissionsData = data.map((item, index) => ({
|
||||
id: item.id || undefined,
|
||||
|
||||
name: item.name || null,
|
||||
importHash: item.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
createdAt: new Date(Date.now() + index * 1000),
|
||||
}));
|
||||
|
||||
// Bulk create items
|
||||
const permissions = await db.permissions.bulkCreate(permissionsData, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
// For each item created, replace relation files
|
||||
|
||||
return permissions;
|
||||
}
|
||||
|
||||
static async update(id, data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const permissions = await db.permissions.findByPk(id, {}, { transaction });
|
||||
|
||||
const updatePayload = {};
|
||||
|
||||
if (data.name !== undefined) updatePayload.name = data.name;
|
||||
|
||||
updatePayload.updatedById = currentUser.id;
|
||||
|
||||
await permissions.update(updatePayload, { transaction });
|
||||
|
||||
return permissions;
|
||||
}
|
||||
|
||||
static async deleteByIds(ids, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const permissions = await db.permissions.findAll({
|
||||
where: {
|
||||
id: {
|
||||
[Op.in]: ids,
|
||||
},
|
||||
},
|
||||
transaction,
|
||||
});
|
||||
|
||||
await db.sequelize.transaction(async (transaction) => {
|
||||
for (const record of permissions) {
|
||||
await record.update({ deletedBy: currentUser.id }, { transaction });
|
||||
}
|
||||
for (const record of permissions) {
|
||||
await record.destroy({ transaction });
|
||||
}
|
||||
});
|
||||
|
||||
return permissions;
|
||||
}
|
||||
|
||||
static async remove(id, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const permissions = await db.permissions.findByPk(id, options);
|
||||
|
||||
await permissions.update(
|
||||
{
|
||||
deletedBy: currentUser.id,
|
||||
},
|
||||
{
|
||||
transaction,
|
||||
},
|
||||
);
|
||||
|
||||
await permissions.destroy({
|
||||
transaction,
|
||||
});
|
||||
|
||||
return permissions;
|
||||
}
|
||||
|
||||
static async findBy(where, options) {
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const permissions = await db.permissions.findOne(
|
||||
{ where },
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
if (!permissions) {
|
||||
return permissions;
|
||||
}
|
||||
|
||||
const output = permissions.get({ plain: true });
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
static async findAll(filter, options) {
|
||||
const limit = filter.limit || 0;
|
||||
let offset = 0;
|
||||
let where = {};
|
||||
const currentPage = +filter.page;
|
||||
|
||||
offset = currentPage * limit;
|
||||
|
||||
const orderBy = null;
|
||||
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
let include = [];
|
||||
|
||||
if (filter) {
|
||||
if (filter.id) {
|
||||
where = {
|
||||
...where,
|
||||
['id']: Utils.uuid(filter.id),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.name) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike('permissions', 'name', filter.name),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.active !== undefined) {
|
||||
where = {
|
||||
...where,
|
||||
active: filter.active === true || filter.active === 'true',
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.createdAtRange) {
|
||||
const [start, end] = filter.createdAtRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const queryOptions = {
|
||||
where,
|
||||
include,
|
||||
distinct: true,
|
||||
order:
|
||||
filter.field && filter.sort
|
||||
? [[filter.field, filter.sort]]
|
||||
: [['createdAt', 'desc']],
|
||||
transaction: options?.transaction,
|
||||
logging: console.log,
|
||||
};
|
||||
|
||||
if (!options?.countOnly) {
|
||||
queryOptions.limit = limit ? Number(limit) : undefined;
|
||||
queryOptions.offset = offset ? Number(offset) : undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
const { rows, count } = await db.permissions.findAndCountAll(
|
||||
queryOptions,
|
||||
);
|
||||
|
||||
return {
|
||||
rows: options?.countOnly ? [] : rows,
|
||||
count: count,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error executing query:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async findAllAutocomplete(query, limit, offset) {
|
||||
let where = {};
|
||||
|
||||
if (query) {
|
||||
where = {
|
||||
[Op.or]: [
|
||||
{ ['id']: Utils.uuid(query) },
|
||||
Utils.ilike('permissions', 'name', query),
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
const records = await db.permissions.findAll({
|
||||
attributes: ['id', 'name'],
|
||||
where,
|
||||
limit: limit ? Number(limit) : undefined,
|
||||
offset: offset ? Number(offset) : undefined,
|
||||
orderBy: [['name', 'ASC']],
|
||||
});
|
||||
|
||||
return records.map((record) => ({
|
||||
id: record.id,
|
||||
label: record.name,
|
||||
}));
|
||||
}
|
||||
};
|
||||
315
backend/src/db/api/roles.js
Normal file
315
backend/src/db/api/roles.js
Normal file
@ -0,0 +1,315 @@
|
||||
const db = require('../models');
|
||||
const FileDBApi = require('./file');
|
||||
const crypto = require('crypto');
|
||||
const Utils = require('../utils');
|
||||
|
||||
const Sequelize = db.Sequelize;
|
||||
const Op = Sequelize.Op;
|
||||
|
||||
module.exports = class RolesDBApi {
|
||||
static async create(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const roles = await db.roles.create(
|
||||
{
|
||||
id: data.id || undefined,
|
||||
|
||||
name: data.name || null,
|
||||
role_customization: data.role_customization || null,
|
||||
importHash: data.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await roles.setPermissions(data.permissions || [], {
|
||||
transaction,
|
||||
});
|
||||
|
||||
return roles;
|
||||
}
|
||||
|
||||
static async bulkImport(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
// Prepare data - wrapping individual data transformations in a map() method
|
||||
const rolesData = data.map((item, index) => ({
|
||||
id: item.id || undefined,
|
||||
|
||||
name: item.name || null,
|
||||
role_customization: item.role_customization || null,
|
||||
importHash: item.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
createdAt: new Date(Date.now() + index * 1000),
|
||||
}));
|
||||
|
||||
// Bulk create items
|
||||
const roles = await db.roles.bulkCreate(rolesData, { transaction });
|
||||
|
||||
// For each item created, replace relation files
|
||||
|
||||
return roles;
|
||||
}
|
||||
|
||||
static async update(id, data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const roles = await db.roles.findByPk(id, {}, { transaction });
|
||||
|
||||
const updatePayload = {};
|
||||
|
||||
if (data.name !== undefined) updatePayload.name = data.name;
|
||||
|
||||
if (data.role_customization !== undefined)
|
||||
updatePayload.role_customization = data.role_customization;
|
||||
|
||||
updatePayload.updatedById = currentUser.id;
|
||||
|
||||
await roles.update(updatePayload, { transaction });
|
||||
|
||||
if (data.permissions !== undefined) {
|
||||
await roles.setPermissions(data.permissions, { transaction });
|
||||
}
|
||||
|
||||
return roles;
|
||||
}
|
||||
|
||||
static async deleteByIds(ids, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const roles = await db.roles.findAll({
|
||||
where: {
|
||||
id: {
|
||||
[Op.in]: ids,
|
||||
},
|
||||
},
|
||||
transaction,
|
||||
});
|
||||
|
||||
await db.sequelize.transaction(async (transaction) => {
|
||||
for (const record of roles) {
|
||||
await record.update({ deletedBy: currentUser.id }, { transaction });
|
||||
}
|
||||
for (const record of roles) {
|
||||
await record.destroy({ transaction });
|
||||
}
|
||||
});
|
||||
|
||||
return roles;
|
||||
}
|
||||
|
||||
static async remove(id, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const roles = await db.roles.findByPk(id, options);
|
||||
|
||||
await roles.update(
|
||||
{
|
||||
deletedBy: currentUser.id,
|
||||
},
|
||||
{
|
||||
transaction,
|
||||
},
|
||||
);
|
||||
|
||||
await roles.destroy({
|
||||
transaction,
|
||||
});
|
||||
|
||||
return roles;
|
||||
}
|
||||
|
||||
static async findBy(where, options) {
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const roles = await db.roles.findOne({ where }, { transaction });
|
||||
|
||||
if (!roles) {
|
||||
return roles;
|
||||
}
|
||||
|
||||
const output = roles.get({ plain: true });
|
||||
|
||||
output.users_app_role = await roles.getUsers_app_role({
|
||||
transaction,
|
||||
});
|
||||
|
||||
output.permissions = await roles.getPermissions({
|
||||
transaction,
|
||||
});
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
static async findAll(filter, options) {
|
||||
const limit = filter.limit || 0;
|
||||
let offset = 0;
|
||||
let where = {};
|
||||
const currentPage = +filter.page;
|
||||
|
||||
offset = currentPage * limit;
|
||||
|
||||
const orderBy = null;
|
||||
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
let include = [
|
||||
{
|
||||
model: db.permissions,
|
||||
as: 'permissions',
|
||||
},
|
||||
];
|
||||
|
||||
if (filter) {
|
||||
if (filter.id) {
|
||||
where = {
|
||||
...where,
|
||||
['id']: Utils.uuid(filter.id),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.name) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike('roles', 'name', filter.name),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.role_customization) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'roles',
|
||||
'role_customization',
|
||||
filter.role_customization,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.active !== undefined) {
|
||||
where = {
|
||||
...where,
|
||||
active: filter.active === true || filter.active === 'true',
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.permissions) {
|
||||
const searchTerms = filter.permissions.split('|');
|
||||
|
||||
include = [
|
||||
{
|
||||
model: db.permissions,
|
||||
as: 'permissions_filter',
|
||||
required: searchTerms.length > 0,
|
||||
where:
|
||||
searchTerms.length > 0
|
||||
? {
|
||||
[Op.or]: [
|
||||
{
|
||||
id: {
|
||||
[Op.in]: searchTerms.map((term) => Utils.uuid(term)),
|
||||
},
|
||||
},
|
||||
{
|
||||
name: {
|
||||
[Op.or]: searchTerms.map((term) => ({
|
||||
[Op.iLike]: `%${term}%`,
|
||||
})),
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
: undefined,
|
||||
},
|
||||
...include,
|
||||
];
|
||||
}
|
||||
|
||||
if (filter.createdAtRange) {
|
||||
const [start, end] = filter.createdAtRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const queryOptions = {
|
||||
where,
|
||||
include,
|
||||
distinct: true,
|
||||
order:
|
||||
filter.field && filter.sort
|
||||
? [[filter.field, filter.sort]]
|
||||
: [['createdAt', 'desc']],
|
||||
transaction: options?.transaction,
|
||||
logging: console.log,
|
||||
};
|
||||
|
||||
if (!options?.countOnly) {
|
||||
queryOptions.limit = limit ? Number(limit) : undefined;
|
||||
queryOptions.offset = offset ? Number(offset) : undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
const { rows, count } = await db.roles.findAndCountAll(queryOptions);
|
||||
|
||||
return {
|
||||
rows: options?.countOnly ? [] : rows,
|
||||
count: count,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error executing query:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async findAllAutocomplete(query, limit, offset) {
|
||||
let where = {};
|
||||
|
||||
if (query) {
|
||||
where = {
|
||||
[Op.or]: [
|
||||
{ ['id']: Utils.uuid(query) },
|
||||
Utils.ilike('roles', 'name', query),
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
const records = await db.roles.findAll({
|
||||
attributes: ['id', 'name'],
|
||||
where,
|
||||
limit: limit ? Number(limit) : undefined,
|
||||
offset: offset ? Number(offset) : undefined,
|
||||
orderBy: [['name', 'ASC']],
|
||||
});
|
||||
|
||||
return records.map((record) => ({
|
||||
id: record.id,
|
||||
label: record.name,
|
||||
}));
|
||||
}
|
||||
};
|
||||
750
backend/src/db/api/users.js
Normal file
750
backend/src/db/api/users.js
Normal file
@ -0,0 +1,750 @@
|
||||
const db = require('../models');
|
||||
const FileDBApi = require('./file');
|
||||
const crypto = require('crypto');
|
||||
const Utils = require('../utils');
|
||||
|
||||
const bcrypt = require('bcrypt');
|
||||
const config = require('../../config');
|
||||
|
||||
const Sequelize = db.Sequelize;
|
||||
const Op = Sequelize.Op;
|
||||
|
||||
module.exports = class UsersDBApi {
|
||||
static async create(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const users = await db.users.create(
|
||||
{
|
||||
id: data.data.id || undefined,
|
||||
|
||||
firstName: data.data.firstName || null,
|
||||
lastName: data.data.lastName || null,
|
||||
phoneNumber: data.data.phoneNumber || null,
|
||||
email: data.data.email || null,
|
||||
disabled: data.data.disabled || false,
|
||||
|
||||
password: data.data.password || null,
|
||||
emailVerified: data.data.emailVerified || true,
|
||||
|
||||
emailVerificationToken: data.data.emailVerificationToken || null,
|
||||
emailVerificationTokenExpiresAt:
|
||||
data.data.emailVerificationTokenExpiresAt || null,
|
||||
passwordResetToken: data.data.passwordResetToken || null,
|
||||
passwordResetTokenExpiresAt:
|
||||
data.data.passwordResetTokenExpiresAt || null,
|
||||
provider: data.data.provider || null,
|
||||
importHash: data.data.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
if (!data.data.app_role) {
|
||||
const role = await db.roles.findOne({
|
||||
where: { name: 'User' },
|
||||
});
|
||||
if (role) {
|
||||
await users.setApp_role(role, {
|
||||
transaction,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
await users.setApp_role(data.data.app_role || null, {
|
||||
transaction,
|
||||
});
|
||||
}
|
||||
|
||||
await users.setCustom_permissions(data.data.custom_permissions || [], {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await FileDBApi.replaceRelationFiles(
|
||||
{
|
||||
belongsTo: db.users.getTableName(),
|
||||
belongsToColumn: 'avatar',
|
||||
belongsToId: users.id,
|
||||
},
|
||||
data.data.avatar,
|
||||
options,
|
||||
);
|
||||
|
||||
return users;
|
||||
}
|
||||
|
||||
static async bulkImport(data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
// Prepare data - wrapping individual data transformations in a map() method
|
||||
const usersData = data.map((item, index) => ({
|
||||
id: item.id || undefined,
|
||||
|
||||
firstName: item.firstName || null,
|
||||
lastName: item.lastName || null,
|
||||
phoneNumber: item.phoneNumber || null,
|
||||
email: item.email || null,
|
||||
disabled: item.disabled || false,
|
||||
|
||||
password: item.password || null,
|
||||
emailVerified: item.emailVerified || false,
|
||||
|
||||
emailVerificationToken: item.emailVerificationToken || null,
|
||||
emailVerificationTokenExpiresAt:
|
||||
item.emailVerificationTokenExpiresAt || null,
|
||||
passwordResetToken: item.passwordResetToken || null,
|
||||
passwordResetTokenExpiresAt: item.passwordResetTokenExpiresAt || null,
|
||||
provider: item.provider || null,
|
||||
importHash: item.importHash || null,
|
||||
createdById: currentUser.id,
|
||||
updatedById: currentUser.id,
|
||||
createdAt: new Date(Date.now() + index * 1000),
|
||||
}));
|
||||
|
||||
// Bulk create items
|
||||
const users = await db.users.bulkCreate(usersData, { transaction });
|
||||
|
||||
// For each item created, replace relation files
|
||||
|
||||
for (let i = 0; i < users.length; i++) {
|
||||
await FileDBApi.replaceRelationFiles(
|
||||
{
|
||||
belongsTo: db.users.getTableName(),
|
||||
belongsToColumn: 'avatar',
|
||||
belongsToId: users[i].id,
|
||||
},
|
||||
data[i].avatar,
|
||||
options,
|
||||
);
|
||||
}
|
||||
|
||||
return users;
|
||||
}
|
||||
|
||||
static async update(id, data, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const users = await db.users.findByPk(id, {}, { transaction });
|
||||
|
||||
if (!data?.app_role) {
|
||||
data.app_role = users?.app_role?.id;
|
||||
}
|
||||
if (!data?.custom_permissions) {
|
||||
data.custom_permissions = users?.custom_permissions?.map(
|
||||
(item) => item.id,
|
||||
);
|
||||
}
|
||||
|
||||
if (data.password) {
|
||||
data.password = bcrypt.hashSync(data.password, config.bcrypt.saltRounds);
|
||||
} else {
|
||||
data.password = users.password;
|
||||
}
|
||||
|
||||
const updatePayload = {};
|
||||
|
||||
if (data.firstName !== undefined) updatePayload.firstName = data.firstName;
|
||||
|
||||
if (data.lastName !== undefined) updatePayload.lastName = data.lastName;
|
||||
|
||||
if (data.phoneNumber !== undefined)
|
||||
updatePayload.phoneNumber = data.phoneNumber;
|
||||
|
||||
if (data.email !== undefined) updatePayload.email = data.email;
|
||||
|
||||
if (data.disabled !== undefined) updatePayload.disabled = data.disabled;
|
||||
|
||||
if (data.password !== undefined) updatePayload.password = data.password;
|
||||
|
||||
if (data.emailVerified !== undefined)
|
||||
updatePayload.emailVerified = data.emailVerified;
|
||||
else updatePayload.emailVerified = true;
|
||||
|
||||
if (data.emailVerificationToken !== undefined)
|
||||
updatePayload.emailVerificationToken = data.emailVerificationToken;
|
||||
|
||||
if (data.emailVerificationTokenExpiresAt !== undefined)
|
||||
updatePayload.emailVerificationTokenExpiresAt =
|
||||
data.emailVerificationTokenExpiresAt;
|
||||
|
||||
if (data.passwordResetToken !== undefined)
|
||||
updatePayload.passwordResetToken = data.passwordResetToken;
|
||||
|
||||
if (data.passwordResetTokenExpiresAt !== undefined)
|
||||
updatePayload.passwordResetTokenExpiresAt =
|
||||
data.passwordResetTokenExpiresAt;
|
||||
|
||||
if (data.provider !== undefined) updatePayload.provider = data.provider;
|
||||
|
||||
updatePayload.updatedById = currentUser.id;
|
||||
|
||||
await users.update(updatePayload, { transaction });
|
||||
|
||||
if (data.app_role !== undefined) {
|
||||
await users.setApp_role(
|
||||
data.app_role,
|
||||
|
||||
{ transaction },
|
||||
);
|
||||
}
|
||||
|
||||
if (data.custom_permissions !== undefined) {
|
||||
await users.setCustom_permissions(data.custom_permissions, {
|
||||
transaction,
|
||||
});
|
||||
}
|
||||
|
||||
await FileDBApi.replaceRelationFiles(
|
||||
{
|
||||
belongsTo: db.users.getTableName(),
|
||||
belongsToColumn: 'avatar',
|
||||
belongsToId: users.id,
|
||||
},
|
||||
data.avatar,
|
||||
options,
|
||||
);
|
||||
|
||||
return users;
|
||||
}
|
||||
|
||||
static async deleteByIds(ids, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const users = await db.users.findAll({
|
||||
where: {
|
||||
id: {
|
||||
[Op.in]: ids,
|
||||
},
|
||||
},
|
||||
transaction,
|
||||
});
|
||||
|
||||
await db.sequelize.transaction(async (transaction) => {
|
||||
for (const record of users) {
|
||||
await record.update({ deletedBy: currentUser.id }, { transaction });
|
||||
}
|
||||
for (const record of users) {
|
||||
await record.destroy({ transaction });
|
||||
}
|
||||
});
|
||||
|
||||
return users;
|
||||
}
|
||||
|
||||
static async remove(id, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const users = await db.users.findByPk(id, options);
|
||||
|
||||
await users.update(
|
||||
{
|
||||
deletedBy: currentUser.id,
|
||||
},
|
||||
{
|
||||
transaction,
|
||||
},
|
||||
);
|
||||
|
||||
await users.destroy({
|
||||
transaction,
|
||||
});
|
||||
|
||||
return users;
|
||||
}
|
||||
|
||||
static async findBy(where, options) {
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const users = await db.users.findOne({ where }, { transaction });
|
||||
|
||||
if (!users) {
|
||||
return users;
|
||||
}
|
||||
|
||||
const output = users.get({ plain: true });
|
||||
|
||||
output.conversions_user = await users.getConversions_user({
|
||||
transaction,
|
||||
});
|
||||
|
||||
output.files_user = await users.getFiles_user({
|
||||
transaction,
|
||||
});
|
||||
|
||||
output.avatar = await users.getAvatar({
|
||||
transaction,
|
||||
});
|
||||
|
||||
output.app_role = await users.getApp_role({
|
||||
transaction,
|
||||
});
|
||||
|
||||
if (output.app_role) {
|
||||
output.app_role_permissions = await output.app_role.getPermissions({
|
||||
transaction,
|
||||
});
|
||||
}
|
||||
|
||||
output.custom_permissions = await users.getCustom_permissions({
|
||||
transaction,
|
||||
});
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
static async findAll(filter, options) {
|
||||
const limit = filter.limit || 0;
|
||||
let offset = 0;
|
||||
let where = {};
|
||||
const currentPage = +filter.page;
|
||||
|
||||
offset = currentPage * limit;
|
||||
|
||||
const orderBy = null;
|
||||
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
let include = [
|
||||
{
|
||||
model: db.roles,
|
||||
as: 'app_role',
|
||||
|
||||
where: filter.app_role
|
||||
? {
|
||||
[Op.or]: [
|
||||
{
|
||||
id: {
|
||||
[Op.in]: filter.app_role
|
||||
.split('|')
|
||||
.map((term) => Utils.uuid(term)),
|
||||
},
|
||||
},
|
||||
{
|
||||
name: {
|
||||
[Op.or]: filter.app_role
|
||||
.split('|')
|
||||
.map((term) => ({ [Op.iLike]: `%${term}%` })),
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
: {},
|
||||
},
|
||||
|
||||
{
|
||||
model: db.permissions,
|
||||
as: 'custom_permissions',
|
||||
},
|
||||
|
||||
{
|
||||
model: db.file,
|
||||
as: 'avatar',
|
||||
},
|
||||
];
|
||||
|
||||
if (filter) {
|
||||
if (filter.id) {
|
||||
where = {
|
||||
...where,
|
||||
['id']: Utils.uuid(filter.id),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.firstName) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike('users', 'firstName', filter.firstName),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.lastName) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike('users', 'lastName', filter.lastName),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.phoneNumber) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike('users', 'phoneNumber', filter.phoneNumber),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.email) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike('users', 'email', filter.email),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.password) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike('users', 'password', filter.password),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.emailVerificationToken) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'users',
|
||||
'emailVerificationToken',
|
||||
filter.emailVerificationToken,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.passwordResetToken) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike(
|
||||
'users',
|
||||
'passwordResetToken',
|
||||
filter.passwordResetToken,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.provider) {
|
||||
where = {
|
||||
...where,
|
||||
[Op.and]: Utils.ilike('users', 'provider', filter.provider),
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.emailVerificationTokenExpiresAtRange) {
|
||||
const [start, end] = filter.emailVerificationTokenExpiresAtRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
emailVerificationTokenExpiresAt: {
|
||||
...where.emailVerificationTokenExpiresAt,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
emailVerificationTokenExpiresAt: {
|
||||
...where.emailVerificationTokenExpiresAt,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.passwordResetTokenExpiresAtRange) {
|
||||
const [start, end] = filter.passwordResetTokenExpiresAtRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
passwordResetTokenExpiresAt: {
|
||||
...where.passwordResetTokenExpiresAt,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
passwordResetTokenExpiresAt: {
|
||||
...where.passwordResetTokenExpiresAt,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.active !== undefined) {
|
||||
where = {
|
||||
...where,
|
||||
active: filter.active === true || filter.active === 'true',
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.disabled) {
|
||||
where = {
|
||||
...where,
|
||||
disabled: filter.disabled,
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.emailVerified) {
|
||||
where = {
|
||||
...where,
|
||||
emailVerified: filter.emailVerified,
|
||||
};
|
||||
}
|
||||
|
||||
if (filter.custom_permissions) {
|
||||
const searchTerms = filter.custom_permissions.split('|');
|
||||
|
||||
include = [
|
||||
{
|
||||
model: db.permissions,
|
||||
as: 'custom_permissions_filter',
|
||||
required: searchTerms.length > 0,
|
||||
where:
|
||||
searchTerms.length > 0
|
||||
? {
|
||||
[Op.or]: [
|
||||
{
|
||||
id: {
|
||||
[Op.in]: searchTerms.map((term) => Utils.uuid(term)),
|
||||
},
|
||||
},
|
||||
{
|
||||
name: {
|
||||
[Op.or]: searchTerms.map((term) => ({
|
||||
[Op.iLike]: `%${term}%`,
|
||||
})),
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
: undefined,
|
||||
},
|
||||
...include,
|
||||
];
|
||||
}
|
||||
|
||||
if (filter.createdAtRange) {
|
||||
const [start, end] = filter.createdAtRange;
|
||||
|
||||
if (start !== undefined && start !== null && start !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.gte]: start,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (end !== undefined && end !== null && end !== '') {
|
||||
where = {
|
||||
...where,
|
||||
['createdAt']: {
|
||||
...where.createdAt,
|
||||
[Op.lte]: end,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const queryOptions = {
|
||||
where,
|
||||
include,
|
||||
distinct: true,
|
||||
order:
|
||||
filter.field && filter.sort
|
||||
? [[filter.field, filter.sort]]
|
||||
: [['createdAt', 'desc']],
|
||||
transaction: options?.transaction,
|
||||
logging: console.log,
|
||||
};
|
||||
|
||||
if (!options?.countOnly) {
|
||||
queryOptions.limit = limit ? Number(limit) : undefined;
|
||||
queryOptions.offset = offset ? Number(offset) : undefined;
|
||||
}
|
||||
|
||||
try {
|
||||
const { rows, count } = await db.users.findAndCountAll(queryOptions);
|
||||
|
||||
return {
|
||||
rows: options?.countOnly ? [] : rows,
|
||||
count: count,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error executing query:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async findAllAutocomplete(query, limit, offset) {
|
||||
let where = {};
|
||||
|
||||
if (query) {
|
||||
where = {
|
||||
[Op.or]: [
|
||||
{ ['id']: Utils.uuid(query) },
|
||||
Utils.ilike('users', 'firstName', query),
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
const records = await db.users.findAll({
|
||||
attributes: ['id', 'firstName'],
|
||||
where,
|
||||
limit: limit ? Number(limit) : undefined,
|
||||
offset: offset ? Number(offset) : undefined,
|
||||
orderBy: [['firstName', 'ASC']],
|
||||
});
|
||||
|
||||
return records.map((record) => ({
|
||||
id: record.id,
|
||||
label: record.firstName,
|
||||
}));
|
||||
}
|
||||
|
||||
static async createFromAuth(data, options) {
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
const users = await db.users.create(
|
||||
{
|
||||
email: data.email,
|
||||
firstName: data.firstName,
|
||||
authenticationUid: data.authenticationUid,
|
||||
password: data.password,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
const app_role = await db.roles.findOne({
|
||||
where: { name: 'User' },
|
||||
});
|
||||
if (app_role?.id) {
|
||||
await users.setApp_role(app_role?.id || null, {
|
||||
transaction,
|
||||
});
|
||||
}
|
||||
|
||||
await users.update(
|
||||
{
|
||||
authenticationUid: users.id,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
delete users.password;
|
||||
return users;
|
||||
}
|
||||
|
||||
static async updatePassword(id, password, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const users = await db.users.findByPk(id, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await users.update(
|
||||
{
|
||||
password,
|
||||
authenticationUid: id,
|
||||
updatedById: currentUser.id,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
return users;
|
||||
}
|
||||
|
||||
static async generateEmailVerificationToken(email, options) {
|
||||
return this._generateToken(
|
||||
['emailVerificationToken', 'emailVerificationTokenExpiresAt'],
|
||||
email,
|
||||
options,
|
||||
);
|
||||
}
|
||||
|
||||
static async generatePasswordResetToken(email, options) {
|
||||
return this._generateToken(
|
||||
['passwordResetToken', 'passwordResetTokenExpiresAt'],
|
||||
email,
|
||||
options,
|
||||
);
|
||||
}
|
||||
|
||||
static async findByPasswordResetToken(token, options) {
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
return db.users.findOne(
|
||||
{
|
||||
where: {
|
||||
passwordResetToken: token,
|
||||
passwordResetTokenExpiresAt: {
|
||||
[db.Sequelize.Op.gt]: Date.now(),
|
||||
},
|
||||
},
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
}
|
||||
|
||||
static async findByEmailVerificationToken(token, options) {
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
return db.users.findOne(
|
||||
{
|
||||
where: {
|
||||
emailVerificationToken: token,
|
||||
emailVerificationTokenExpiresAt: {
|
||||
[db.Sequelize.Op.gt]: Date.now(),
|
||||
},
|
||||
},
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
}
|
||||
|
||||
static async markEmailVerified(id, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
|
||||
const users = await db.users.findByPk(id, {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await users.update(
|
||||
{
|
||||
emailVerified: true,
|
||||
updatedById: currentUser.id,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
static async _generateToken(keyNames, email, options) {
|
||||
const currentUser = (options && options.currentUser) || { id: null };
|
||||
const transaction = (options && options.transaction) || undefined;
|
||||
const users = await db.users.findOne(
|
||||
{
|
||||
where: { email: email.toLowerCase() },
|
||||
},
|
||||
{
|
||||
transaction,
|
||||
},
|
||||
);
|
||||
|
||||
const token = crypto.randomBytes(20).toString('hex');
|
||||
const tokenExpiresAt = Date.now() + 360000;
|
||||
|
||||
if (users) {
|
||||
await users.update(
|
||||
{
|
||||
[keyNames[0]]: token,
|
||||
[keyNames[1]]: tokenExpiresAt,
|
||||
updatedById: currentUser.id,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
}
|
||||
|
||||
return token;
|
||||
}
|
||||
};
|
||||
31
backend/src/db/db.config.js
Normal file
31
backend/src/db/db.config.js
Normal file
@ -0,0 +1,31 @@
|
||||
module.exports = {
|
||||
production: {
|
||||
dialect: 'postgres',
|
||||
username: process.env.DB_USER,
|
||||
password: process.env.DB_PASS,
|
||||
database: process.env.DB_NAME,
|
||||
host: process.env.DB_HOST,
|
||||
port: process.env.DB_PORT,
|
||||
logging: console.log,
|
||||
seederStorage: 'sequelize',
|
||||
},
|
||||
development: {
|
||||
username: 'postgres',
|
||||
dialect: 'postgres',
|
||||
password: '',
|
||||
database: 'db_i_want_to_create_a_website_for_pdf___image_convert',
|
||||
host: process.env.DB_HOST || 'localhost',
|
||||
logging: console.log,
|
||||
seederStorage: 'sequelize',
|
||||
},
|
||||
dev_stage: {
|
||||
dialect: 'postgres',
|
||||
username: process.env.DB_USER,
|
||||
password: process.env.DB_PASS,
|
||||
database: process.env.DB_NAME,
|
||||
host: process.env.DB_HOST,
|
||||
port: process.env.DB_PORT,
|
||||
logging: console.log,
|
||||
seederStorage: 'sequelize',
|
||||
},
|
||||
};
|
||||
525
backend/src/db/migrations/1742114441090.js
Normal file
525
backend/src/db/migrations/1742114441090.js
Normal file
@ -0,0 +1,525 @@
|
||||
module.exports = {
|
||||
/**
|
||||
* @param {QueryInterface} queryInterface
|
||||
* @param {Sequelize} Sequelize
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async up(queryInterface, Sequelize) {
|
||||
/**
|
||||
* @type {Transaction}
|
||||
*/
|
||||
const transaction = await queryInterface.sequelize.transaction();
|
||||
try {
|
||||
await queryInterface.createTable(
|
||||
'users',
|
||||
{
|
||||
id: {
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
defaultValue: Sequelize.DataTypes.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
createdById: {
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
references: {
|
||||
key: 'id',
|
||||
model: 'users',
|
||||
},
|
||||
},
|
||||
updatedById: {
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
references: {
|
||||
key: 'id',
|
||||
model: 'users',
|
||||
},
|
||||
},
|
||||
createdAt: { type: Sequelize.DataTypes.DATE },
|
||||
updatedAt: { type: Sequelize.DataTypes.DATE },
|
||||
deletedAt: { type: Sequelize.DataTypes.DATE },
|
||||
importHash: {
|
||||
type: Sequelize.DataTypes.STRING(255),
|
||||
allowNull: true,
|
||||
unique: true,
|
||||
},
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.createTable(
|
||||
'conversions',
|
||||
{
|
||||
id: {
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
defaultValue: Sequelize.DataTypes.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
createdById: {
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
references: {
|
||||
key: 'id',
|
||||
model: 'users',
|
||||
},
|
||||
},
|
||||
updatedById: {
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
references: {
|
||||
key: 'id',
|
||||
model: 'users',
|
||||
},
|
||||
},
|
||||
createdAt: { type: Sequelize.DataTypes.DATE },
|
||||
updatedAt: { type: Sequelize.DataTypes.DATE },
|
||||
deletedAt: { type: Sequelize.DataTypes.DATE },
|
||||
importHash: {
|
||||
type: Sequelize.DataTypes.STRING(255),
|
||||
allowNull: true,
|
||||
unique: true,
|
||||
},
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.createTable(
|
||||
'files',
|
||||
{
|
||||
id: {
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
defaultValue: Sequelize.DataTypes.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
createdById: {
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
references: {
|
||||
key: 'id',
|
||||
model: 'users',
|
||||
},
|
||||
},
|
||||
updatedById: {
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
references: {
|
||||
key: 'id',
|
||||
model: 'users',
|
||||
},
|
||||
},
|
||||
createdAt: { type: Sequelize.DataTypes.DATE },
|
||||
updatedAt: { type: Sequelize.DataTypes.DATE },
|
||||
deletedAt: { type: Sequelize.DataTypes.DATE },
|
||||
importHash: {
|
||||
type: Sequelize.DataTypes.STRING(255),
|
||||
allowNull: true,
|
||||
unique: true,
|
||||
},
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.createTable(
|
||||
'roles',
|
||||
{
|
||||
id: {
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
defaultValue: Sequelize.DataTypes.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
createdById: {
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
references: {
|
||||
key: 'id',
|
||||
model: 'users',
|
||||
},
|
||||
},
|
||||
updatedById: {
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
references: {
|
||||
key: 'id',
|
||||
model: 'users',
|
||||
},
|
||||
},
|
||||
createdAt: { type: Sequelize.DataTypes.DATE },
|
||||
updatedAt: { type: Sequelize.DataTypes.DATE },
|
||||
deletedAt: { type: Sequelize.DataTypes.DATE },
|
||||
importHash: {
|
||||
type: Sequelize.DataTypes.STRING(255),
|
||||
allowNull: true,
|
||||
unique: true,
|
||||
},
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.createTable(
|
||||
'permissions',
|
||||
{
|
||||
id: {
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
defaultValue: Sequelize.DataTypes.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
createdById: {
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
references: {
|
||||
key: 'id',
|
||||
model: 'users',
|
||||
},
|
||||
},
|
||||
updatedById: {
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
references: {
|
||||
key: 'id',
|
||||
model: 'users',
|
||||
},
|
||||
},
|
||||
createdAt: { type: Sequelize.DataTypes.DATE },
|
||||
updatedAt: { type: Sequelize.DataTypes.DATE },
|
||||
deletedAt: { type: Sequelize.DataTypes.DATE },
|
||||
importHash: {
|
||||
type: Sequelize.DataTypes.STRING(255),
|
||||
allowNull: true,
|
||||
unique: true,
|
||||
},
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.addColumn(
|
||||
'users',
|
||||
'firstName',
|
||||
{
|
||||
type: Sequelize.DataTypes.TEXT,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.addColumn(
|
||||
'users',
|
||||
'lastName',
|
||||
{
|
||||
type: Sequelize.DataTypes.TEXT,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.addColumn(
|
||||
'users',
|
||||
'phoneNumber',
|
||||
{
|
||||
type: Sequelize.DataTypes.TEXT,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.addColumn(
|
||||
'users',
|
||||
'email',
|
||||
{
|
||||
type: Sequelize.DataTypes.TEXT,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.addColumn(
|
||||
'users',
|
||||
'disabled',
|
||||
{
|
||||
type: Sequelize.DataTypes.BOOLEAN,
|
||||
|
||||
defaultValue: false,
|
||||
allowNull: false,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.addColumn(
|
||||
'users',
|
||||
'password',
|
||||
{
|
||||
type: Sequelize.DataTypes.TEXT,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.addColumn(
|
||||
'users',
|
||||
'emailVerified',
|
||||
{
|
||||
type: Sequelize.DataTypes.BOOLEAN,
|
||||
|
||||
defaultValue: false,
|
||||
allowNull: false,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.addColumn(
|
||||
'users',
|
||||
'emailVerificationToken',
|
||||
{
|
||||
type: Sequelize.DataTypes.TEXT,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.addColumn(
|
||||
'users',
|
||||
'emailVerificationTokenExpiresAt',
|
||||
{
|
||||
type: Sequelize.DataTypes.DATE,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.addColumn(
|
||||
'users',
|
||||
'passwordResetToken',
|
||||
{
|
||||
type: Sequelize.DataTypes.TEXT,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.addColumn(
|
||||
'users',
|
||||
'passwordResetTokenExpiresAt',
|
||||
{
|
||||
type: Sequelize.DataTypes.DATE,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.addColumn(
|
||||
'users',
|
||||
'provider',
|
||||
{
|
||||
type: Sequelize.DataTypes.TEXT,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.addColumn(
|
||||
'conversions',
|
||||
'file_name',
|
||||
{
|
||||
type: Sequelize.DataTypes.TEXT,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.addColumn(
|
||||
'conversions',
|
||||
'conversion_type',
|
||||
{
|
||||
type: Sequelize.DataTypes.ENUM,
|
||||
|
||||
values: [
|
||||
'pdf_to_word',
|
||||
'pdf_to_excel',
|
||||
'image_to_jpg',
|
||||
'image_to_png',
|
||||
],
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.addColumn(
|
||||
'conversions',
|
||||
'userId',
|
||||
{
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
|
||||
references: {
|
||||
model: 'users',
|
||||
key: 'id',
|
||||
},
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.addColumn(
|
||||
'conversions',
|
||||
'conversion_date',
|
||||
{
|
||||
type: Sequelize.DataTypes.DATE,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.addColumn(
|
||||
'files',
|
||||
'file_name',
|
||||
{
|
||||
type: Sequelize.DataTypes.TEXT,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.addColumn(
|
||||
'files',
|
||||
'userId',
|
||||
{
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
|
||||
references: {
|
||||
model: 'users',
|
||||
key: 'id',
|
||||
},
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.addColumn(
|
||||
'files',
|
||||
'upload_date',
|
||||
{
|
||||
type: Sequelize.DataTypes.DATE,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.addColumn(
|
||||
'permissions',
|
||||
'name',
|
||||
{
|
||||
type: Sequelize.DataTypes.TEXT,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.addColumn(
|
||||
'roles',
|
||||
'name',
|
||||
{
|
||||
type: Sequelize.DataTypes.TEXT,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.addColumn(
|
||||
'roles',
|
||||
'role_customization',
|
||||
{
|
||||
type: Sequelize.DataTypes.TEXT,
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.addColumn(
|
||||
'users',
|
||||
'app_roleId',
|
||||
{
|
||||
type: Sequelize.DataTypes.UUID,
|
||||
|
||||
references: {
|
||||
model: 'roles',
|
||||
key: 'id',
|
||||
},
|
||||
},
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await transaction.commit();
|
||||
} catch (err) {
|
||||
await transaction.rollback();
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
/**
|
||||
* @param {QueryInterface} queryInterface
|
||||
* @param {Sequelize} Sequelize
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async down(queryInterface, Sequelize) {
|
||||
/**
|
||||
* @type {Transaction}
|
||||
*/
|
||||
const transaction = await queryInterface.sequelize.transaction();
|
||||
try {
|
||||
await queryInterface.removeColumn('users', 'app_roleId', { transaction });
|
||||
|
||||
await queryInterface.removeColumn('roles', 'role_customization', {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await queryInterface.removeColumn('roles', 'name', { transaction });
|
||||
|
||||
await queryInterface.removeColumn('permissions', 'name', { transaction });
|
||||
|
||||
await queryInterface.removeColumn('files', 'upload_date', {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await queryInterface.removeColumn('files', 'userId', { transaction });
|
||||
|
||||
await queryInterface.removeColumn('files', 'file_name', { transaction });
|
||||
|
||||
await queryInterface.removeColumn('conversions', 'conversion_date', {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await queryInterface.removeColumn('conversions', 'userId', {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await queryInterface.removeColumn('conversions', 'conversion_type', {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await queryInterface.removeColumn('conversions', 'file_name', {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await queryInterface.removeColumn('users', 'provider', { transaction });
|
||||
|
||||
await queryInterface.removeColumn(
|
||||
'users',
|
||||
'passwordResetTokenExpiresAt',
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.removeColumn('users', 'passwordResetToken', {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await queryInterface.removeColumn(
|
||||
'users',
|
||||
'emailVerificationTokenExpiresAt',
|
||||
{ transaction },
|
||||
);
|
||||
|
||||
await queryInterface.removeColumn('users', 'emailVerificationToken', {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await queryInterface.removeColumn('users', 'emailVerified', {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await queryInterface.removeColumn('users', 'password', { transaction });
|
||||
|
||||
await queryInterface.removeColumn('users', 'disabled', { transaction });
|
||||
|
||||
await queryInterface.removeColumn('users', 'email', { transaction });
|
||||
|
||||
await queryInterface.removeColumn('users', 'phoneNumber', {
|
||||
transaction,
|
||||
});
|
||||
|
||||
await queryInterface.removeColumn('users', 'lastName', { transaction });
|
||||
|
||||
await queryInterface.removeColumn('users', 'firstName', { transaction });
|
||||
|
||||
await queryInterface.dropTable('permissions', { transaction });
|
||||
|
||||
await queryInterface.dropTable('roles', { transaction });
|
||||
|
||||
await queryInterface.dropTable('files', { transaction });
|
||||
|
||||
await queryInterface.dropTable('conversions', { transaction });
|
||||
|
||||
await queryInterface.dropTable('users', { transaction });
|
||||
|
||||
await transaction.commit();
|
||||
} catch (err) {
|
||||
await transaction.rollback();
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
};
|
||||
67
backend/src/db/models/conversions.js
Normal file
67
backend/src/db/models/conversions.js
Normal file
@ -0,0 +1,67 @@
|
||||
const config = require('../../config');
|
||||
const providers = config.providers;
|
||||
const crypto = require('crypto');
|
||||
const bcrypt = require('bcrypt');
|
||||
const moment = require('moment');
|
||||
|
||||
module.exports = function (sequelize, DataTypes) {
|
||||
const conversions = sequelize.define(
|
||||
'conversions',
|
||||
{
|
||||
id: {
|
||||
type: DataTypes.UUID,
|
||||
defaultValue: DataTypes.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
|
||||
file_name: {
|
||||
type: DataTypes.TEXT,
|
||||
},
|
||||
|
||||
conversion_type: {
|
||||
type: DataTypes.ENUM,
|
||||
|
||||
values: ['pdf_to_word', 'pdf_to_excel', 'image_to_jpg', 'image_to_png'],
|
||||
},
|
||||
|
||||
conversion_date: {
|
||||
type: DataTypes.DATE,
|
||||
},
|
||||
|
||||
importHash: {
|
||||
type: DataTypes.STRING(255),
|
||||
allowNull: true,
|
||||
unique: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
paranoid: true,
|
||||
freezeTableName: true,
|
||||
},
|
||||
);
|
||||
|
||||
conversions.associate = (db) => {
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
//end loop
|
||||
|
||||
db.conversions.belongsTo(db.users, {
|
||||
as: 'user',
|
||||
foreignKey: {
|
||||
name: 'userId',
|
||||
},
|
||||
constraints: false,
|
||||
});
|
||||
|
||||
db.conversions.belongsTo(db.users, {
|
||||
as: 'createdBy',
|
||||
});
|
||||
|
||||
db.conversions.belongsTo(db.users, {
|
||||
as: 'updatedBy',
|
||||
});
|
||||
};
|
||||
|
||||
return conversions;
|
||||
};
|
||||
53
backend/src/db/models/file.js
Normal file
53
backend/src/db/models/file.js
Normal file
@ -0,0 +1,53 @@
|
||||
module.exports = function (sequelize, DataTypes) {
|
||||
const file = sequelize.define(
|
||||
'file',
|
||||
{
|
||||
id: {
|
||||
type: DataTypes.UUID,
|
||||
defaultValue: DataTypes.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
belongsTo: DataTypes.STRING(255),
|
||||
belongsToId: DataTypes.UUID,
|
||||
belongsToColumn: DataTypes.STRING(255),
|
||||
name: {
|
||||
type: DataTypes.STRING(2083),
|
||||
allowNull: false,
|
||||
validate: {
|
||||
notEmpty: true,
|
||||
},
|
||||
},
|
||||
sizeInBytes: {
|
||||
type: DataTypes.INTEGER,
|
||||
allowNull: true,
|
||||
},
|
||||
privateUrl: {
|
||||
type: DataTypes.STRING(2083),
|
||||
allowNull: true,
|
||||
},
|
||||
publicUrl: {
|
||||
type: DataTypes.STRING(2083),
|
||||
allowNull: false,
|
||||
validate: {
|
||||
notEmpty: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
paranoid: true,
|
||||
},
|
||||
);
|
||||
|
||||
file.associate = (db) => {
|
||||
db.file.belongsTo(db.users, {
|
||||
as: 'createdBy',
|
||||
});
|
||||
|
||||
db.file.belongsTo(db.users, {
|
||||
as: 'updatedBy',
|
||||
});
|
||||
};
|
||||
|
||||
return file;
|
||||
};
|
||||
71
backend/src/db/models/files.js
Normal file
71
backend/src/db/models/files.js
Normal file
@ -0,0 +1,71 @@
|
||||
const config = require('../../config');
|
||||
const providers = config.providers;
|
||||
const crypto = require('crypto');
|
||||
const bcrypt = require('bcrypt');
|
||||
const moment = require('moment');
|
||||
|
||||
module.exports = function (sequelize, DataTypes) {
|
||||
const files = sequelize.define(
|
||||
'files',
|
||||
{
|
||||
id: {
|
||||
type: DataTypes.UUID,
|
||||
defaultValue: DataTypes.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
|
||||
file_name: {
|
||||
type: DataTypes.TEXT,
|
||||
},
|
||||
|
||||
upload_date: {
|
||||
type: DataTypes.DATE,
|
||||
},
|
||||
|
||||
importHash: {
|
||||
type: DataTypes.STRING(255),
|
||||
allowNull: true,
|
||||
unique: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
paranoid: true,
|
||||
freezeTableName: true,
|
||||
},
|
||||
);
|
||||
|
||||
files.associate = (db) => {
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
//end loop
|
||||
|
||||
db.files.belongsTo(db.users, {
|
||||
as: 'user',
|
||||
foreignKey: {
|
||||
name: 'userId',
|
||||
},
|
||||
constraints: false,
|
||||
});
|
||||
|
||||
db.files.hasMany(db.file, {
|
||||
as: 'file_data',
|
||||
foreignKey: 'belongsToId',
|
||||
constraints: false,
|
||||
scope: {
|
||||
belongsTo: db.files.getTableName(),
|
||||
belongsToColumn: 'file_data',
|
||||
},
|
||||
});
|
||||
|
||||
db.files.belongsTo(db.users, {
|
||||
as: 'createdBy',
|
||||
});
|
||||
|
||||
db.files.belongsTo(db.users, {
|
||||
as: 'updatedBy',
|
||||
});
|
||||
};
|
||||
|
||||
return files;
|
||||
};
|
||||
47
backend/src/db/models/index.js
Normal file
47
backend/src/db/models/index.js
Normal file
@ -0,0 +1,47 @@
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const Sequelize = require('sequelize');
|
||||
const basename = path.basename(__filename);
|
||||
const env = process.env.NODE_ENV || 'development';
|
||||
const config = require('../db.config')[env];
|
||||
const db = {};
|
||||
|
||||
let sequelize;
|
||||
console.log(env);
|
||||
if (config.use_env_variable) {
|
||||
sequelize = new Sequelize(process.env[config.use_env_variable], config);
|
||||
} else {
|
||||
sequelize = new Sequelize(
|
||||
config.database,
|
||||
config.username,
|
||||
config.password,
|
||||
config,
|
||||
);
|
||||
}
|
||||
|
||||
fs.readdirSync(__dirname)
|
||||
.filter((file) => {
|
||||
return (
|
||||
file.indexOf('.') !== 0 && file !== basename && file.slice(-3) === '.js'
|
||||
);
|
||||
})
|
||||
.forEach((file) => {
|
||||
const model = require(path.join(__dirname, file))(
|
||||
sequelize,
|
||||
Sequelize.DataTypes,
|
||||
);
|
||||
db[model.name] = model;
|
||||
});
|
||||
|
||||
Object.keys(db).forEach((modelName) => {
|
||||
if (db[modelName].associate) {
|
||||
db[modelName].associate(db);
|
||||
}
|
||||
});
|
||||
|
||||
db.sequelize = sequelize;
|
||||
db.Sequelize = Sequelize;
|
||||
|
||||
module.exports = db;
|
||||
49
backend/src/db/models/permissions.js
Normal file
49
backend/src/db/models/permissions.js
Normal file
@ -0,0 +1,49 @@
|
||||
const config = require('../../config');
|
||||
const providers = config.providers;
|
||||
const crypto = require('crypto');
|
||||
const bcrypt = require('bcrypt');
|
||||
const moment = require('moment');
|
||||
|
||||
module.exports = function (sequelize, DataTypes) {
|
||||
const permissions = sequelize.define(
|
||||
'permissions',
|
||||
{
|
||||
id: {
|
||||
type: DataTypes.UUID,
|
||||
defaultValue: DataTypes.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
|
||||
name: {
|
||||
type: DataTypes.TEXT,
|
||||
},
|
||||
|
||||
importHash: {
|
||||
type: DataTypes.STRING(255),
|
||||
allowNull: true,
|
||||
unique: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
paranoid: true,
|
||||
freezeTableName: true,
|
||||
},
|
||||
);
|
||||
|
||||
permissions.associate = (db) => {
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
//end loop
|
||||
|
||||
db.permissions.belongsTo(db.users, {
|
||||
as: 'createdBy',
|
||||
});
|
||||
|
||||
db.permissions.belongsTo(db.users, {
|
||||
as: 'updatedBy',
|
||||
});
|
||||
};
|
||||
|
||||
return permissions;
|
||||
};
|
||||
79
backend/src/db/models/roles.js
Normal file
79
backend/src/db/models/roles.js
Normal file
@ -0,0 +1,79 @@
|
||||
const config = require('../../config');
|
||||
const providers = config.providers;
|
||||
const crypto = require('crypto');
|
||||
const bcrypt = require('bcrypt');
|
||||
const moment = require('moment');
|
||||
|
||||
module.exports = function (sequelize, DataTypes) {
|
||||
const roles = sequelize.define(
|
||||
'roles',
|
||||
{
|
||||
id: {
|
||||
type: DataTypes.UUID,
|
||||
defaultValue: DataTypes.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
|
||||
name: {
|
||||
type: DataTypes.TEXT,
|
||||
},
|
||||
|
||||
role_customization: {
|
||||
type: DataTypes.TEXT,
|
||||
},
|
||||
|
||||
importHash: {
|
||||
type: DataTypes.STRING(255),
|
||||
allowNull: true,
|
||||
unique: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
paranoid: true,
|
||||
freezeTableName: true,
|
||||
},
|
||||
);
|
||||
|
||||
roles.associate = (db) => {
|
||||
db.roles.belongsToMany(db.permissions, {
|
||||
as: 'permissions',
|
||||
foreignKey: {
|
||||
name: 'roles_permissionsId',
|
||||
},
|
||||
constraints: false,
|
||||
through: 'rolesPermissionsPermissions',
|
||||
});
|
||||
|
||||
db.roles.belongsToMany(db.permissions, {
|
||||
as: 'permissions_filter',
|
||||
foreignKey: {
|
||||
name: 'roles_permissionsId',
|
||||
},
|
||||
constraints: false,
|
||||
through: 'rolesPermissionsPermissions',
|
||||
});
|
||||
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
db.roles.hasMany(db.users, {
|
||||
as: 'users_app_role',
|
||||
foreignKey: {
|
||||
name: 'app_roleId',
|
||||
},
|
||||
constraints: false,
|
||||
});
|
||||
|
||||
//end loop
|
||||
|
||||
db.roles.belongsTo(db.users, {
|
||||
as: 'createdBy',
|
||||
});
|
||||
|
||||
db.roles.belongsTo(db.users, {
|
||||
as: 'updatedBy',
|
||||
});
|
||||
};
|
||||
|
||||
return roles;
|
||||
};
|
||||
187
backend/src/db/models/users.js
Normal file
187
backend/src/db/models/users.js
Normal file
@ -0,0 +1,187 @@
|
||||
const config = require('../../config');
|
||||
const providers = config.providers;
|
||||
const crypto = require('crypto');
|
||||
const bcrypt = require('bcrypt');
|
||||
const moment = require('moment');
|
||||
|
||||
module.exports = function (sequelize, DataTypes) {
|
||||
const users = sequelize.define(
|
||||
'users',
|
||||
{
|
||||
id: {
|
||||
type: DataTypes.UUID,
|
||||
defaultValue: DataTypes.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
|
||||
firstName: {
|
||||
type: DataTypes.TEXT,
|
||||
},
|
||||
|
||||
lastName: {
|
||||
type: DataTypes.TEXT,
|
||||
},
|
||||
|
||||
phoneNumber: {
|
||||
type: DataTypes.TEXT,
|
||||
},
|
||||
|
||||
email: {
|
||||
type: DataTypes.TEXT,
|
||||
},
|
||||
|
||||
disabled: {
|
||||
type: DataTypes.BOOLEAN,
|
||||
|
||||
allowNull: false,
|
||||
defaultValue: false,
|
||||
},
|
||||
|
||||
password: {
|
||||
type: DataTypes.TEXT,
|
||||
},
|
||||
|
||||
emailVerified: {
|
||||
type: DataTypes.BOOLEAN,
|
||||
|
||||
allowNull: false,
|
||||
defaultValue: false,
|
||||
},
|
||||
|
||||
emailVerificationToken: {
|
||||
type: DataTypes.TEXT,
|
||||
},
|
||||
|
||||
emailVerificationTokenExpiresAt: {
|
||||
type: DataTypes.DATE,
|
||||
},
|
||||
|
||||
passwordResetToken: {
|
||||
type: DataTypes.TEXT,
|
||||
},
|
||||
|
||||
passwordResetTokenExpiresAt: {
|
||||
type: DataTypes.DATE,
|
||||
},
|
||||
|
||||
provider: {
|
||||
type: DataTypes.TEXT,
|
||||
},
|
||||
|
||||
importHash: {
|
||||
type: DataTypes.STRING(255),
|
||||
allowNull: true,
|
||||
unique: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
paranoid: true,
|
||||
freezeTableName: true,
|
||||
},
|
||||
);
|
||||
|
||||
users.associate = (db) => {
|
||||
db.users.belongsToMany(db.permissions, {
|
||||
as: 'custom_permissions',
|
||||
foreignKey: {
|
||||
name: 'users_custom_permissionsId',
|
||||
},
|
||||
constraints: false,
|
||||
through: 'usersCustom_permissionsPermissions',
|
||||
});
|
||||
|
||||
db.users.belongsToMany(db.permissions, {
|
||||
as: 'custom_permissions_filter',
|
||||
foreignKey: {
|
||||
name: 'users_custom_permissionsId',
|
||||
},
|
||||
constraints: false,
|
||||
through: 'usersCustom_permissionsPermissions',
|
||||
});
|
||||
|
||||
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
|
||||
|
||||
db.users.hasMany(db.conversions, {
|
||||
as: 'conversions_user',
|
||||
foreignKey: {
|
||||
name: 'userId',
|
||||
},
|
||||
constraints: false,
|
||||
});
|
||||
|
||||
db.users.hasMany(db.files, {
|
||||
as: 'files_user',
|
||||
foreignKey: {
|
||||
name: 'userId',
|
||||
},
|
||||
constraints: false,
|
||||
});
|
||||
|
||||
//end loop
|
||||
|
||||
db.users.belongsTo(db.roles, {
|
||||
as: 'app_role',
|
||||
foreignKey: {
|
||||
name: 'app_roleId',
|
||||
},
|
||||
constraints: false,
|
||||
});
|
||||
|
||||
db.users.hasMany(db.file, {
|
||||
as: 'avatar',
|
||||
foreignKey: 'belongsToId',
|
||||
constraints: false,
|
||||
scope: {
|
||||
belongsTo: db.users.getTableName(),
|
||||
belongsToColumn: 'avatar',
|
||||
},
|
||||
});
|
||||
|
||||
db.users.belongsTo(db.users, {
|
||||
as: 'createdBy',
|
||||
});
|
||||
|
||||
db.users.belongsTo(db.users, {
|
||||
as: 'updatedBy',
|
||||
});
|
||||
};
|
||||
|
||||
users.beforeCreate((users, options) => {
|
||||
users = trimStringFields(users);
|
||||
|
||||
if (
|
||||
users.provider !== providers.LOCAL &&
|
||||
Object.values(providers).indexOf(users.provider) > -1
|
||||
) {
|
||||
users.emailVerified = true;
|
||||
|
||||
if (!users.password) {
|
||||
const password = crypto.randomBytes(20).toString('hex');
|
||||
|
||||
const hashedPassword = bcrypt.hashSync(
|
||||
password,
|
||||
config.bcrypt.saltRounds,
|
||||
);
|
||||
|
||||
users.password = hashedPassword;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
users.beforeUpdate((users, options) => {
|
||||
users = trimStringFields(users);
|
||||
});
|
||||
|
||||
return users;
|
||||
};
|
||||
|
||||
function trimStringFields(users) {
|
||||
users.email = users.email.trim();
|
||||
|
||||
users.firstName = users.firstName ? users.firstName.trim() : null;
|
||||
|
||||
users.lastName = users.lastName ? users.lastName.trim() : null;
|
||||
|
||||
return users;
|
||||
}
|
||||
16
backend/src/db/reset.js
Normal file
16
backend/src/db/reset.js
Normal file
@ -0,0 +1,16 @@
|
||||
const db = require('./models');
|
||||
const { execSync } = require('child_process');
|
||||
|
||||
console.log('Resetting Database');
|
||||
|
||||
db.sequelize
|
||||
.sync({ force: true })
|
||||
.then(() => {
|
||||
execSync('sequelize db:seed:all');
|
||||
console.log('OK');
|
||||
process.exit();
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
});
|
||||
69
backend/src/db/seeders/20200430130759-admin-user.js
Normal file
69
backend/src/db/seeders/20200430130759-admin-user.js
Normal file
@ -0,0 +1,69 @@
|
||||
'use strict';
|
||||
const bcrypt = require('bcrypt');
|
||||
const config = require('../../config');
|
||||
|
||||
const ids = [
|
||||
'193bf4b5-9f07-4bd5-9a43-e7e41f3e96af',
|
||||
'af5a87be-8f9c-4630-902a-37a60b7005ba',
|
||||
'5bc531ab-611f-41f3-9373-b7cc5d09c93d',
|
||||
];
|
||||
|
||||
module.exports = {
|
||||
up: async (queryInterface, Sequelize) => {
|
||||
let hash = bcrypt.hashSync(config.admin_pass, config.bcrypt.saltRounds);
|
||||
|
||||
try {
|
||||
await queryInterface.bulkInsert('users', [
|
||||
{
|
||||
id: ids[0],
|
||||
firstName: 'Admin',
|
||||
email: config.admin_email,
|
||||
emailVerified: true,
|
||||
provider: config.providers.LOCAL,
|
||||
password: hash,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
{
|
||||
id: ids[1],
|
||||
firstName: 'John',
|
||||
email: 'john@doe.com',
|
||||
emailVerified: true,
|
||||
provider: config.providers.LOCAL,
|
||||
password: hash,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
{
|
||||
id: ids[2],
|
||||
firstName: 'Client',
|
||||
email: 'client@hello.com',
|
||||
emailVerified: true,
|
||||
provider: config.providers.LOCAL,
|
||||
password: hash,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
]);
|
||||
} catch (error) {
|
||||
console.error('Error during bulkInsert:', error);
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
down: async (queryInterface, Sequelize) => {
|
||||
try {
|
||||
await queryInterface.bulkDelete(
|
||||
'users',
|
||||
{
|
||||
id: {
|
||||
[Sequelize.Op.in]: ids,
|
||||
},
|
||||
},
|
||||
{},
|
||||
);
|
||||
} catch (error) {
|
||||
console.error('Error during bulkDelete:', error);
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
};
|
||||
608
backend/src/db/seeders/20200430130760-user-roles.js
Normal file
608
backend/src/db/seeders/20200430130760-user-roles.js
Normal file
@ -0,0 +1,608 @@
|
||||
const { v4: uuid } = require('uuid');
|
||||
|
||||
module.exports = {
|
||||
/**
|
||||
* @param{import("sequelize").QueryInterface} queryInterface
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
async up(queryInterface) {
|
||||
const createdAt = new Date();
|
||||
const updatedAt = new Date();
|
||||
|
||||
/** @type {Map<string, string>} */
|
||||
const idMap = new Map();
|
||||
|
||||
/**
|
||||
* @param {string} key
|
||||
* @return {string}
|
||||
*/
|
||||
function getId(key) {
|
||||
if (idMap.has(key)) {
|
||||
return idMap.get(key);
|
||||
}
|
||||
const id = uuid();
|
||||
idMap.set(key, id);
|
||||
return id;
|
||||
}
|
||||
|
||||
await queryInterface.bulkInsert('roles', [
|
||||
{
|
||||
id: getId('Administrator'),
|
||||
name: 'Administrator',
|
||||
createdAt,
|
||||
updatedAt,
|
||||
},
|
||||
|
||||
{
|
||||
id: getId('SystemManager'),
|
||||
name: 'System Manager',
|
||||
createdAt,
|
||||
updatedAt,
|
||||
},
|
||||
|
||||
{
|
||||
id: getId('OperationsLead'),
|
||||
name: 'Operations Lead',
|
||||
createdAt,
|
||||
updatedAt,
|
||||
},
|
||||
|
||||
{
|
||||
id: getId('ConversionSpecialist'),
|
||||
name: 'Conversion Specialist',
|
||||
createdAt,
|
||||
updatedAt,
|
||||
},
|
||||
|
||||
{
|
||||
id: getId('SupportAgent'),
|
||||
name: 'Support Agent',
|
||||
createdAt,
|
||||
updatedAt,
|
||||
},
|
||||
|
||||
{ id: getId('Viewer'), name: 'Viewer', createdAt, updatedAt },
|
||||
]);
|
||||
|
||||
/**
|
||||
* @param {string} name
|
||||
*/
|
||||
function createPermissions(name) {
|
||||
return [
|
||||
{
|
||||
id: getId(`CREATE_${name.toUpperCase()}`),
|
||||
createdAt,
|
||||
updatedAt,
|
||||
name: `CREATE_${name.toUpperCase()}`,
|
||||
},
|
||||
{
|
||||
id: getId(`READ_${name.toUpperCase()}`),
|
||||
createdAt,
|
||||
updatedAt,
|
||||
name: `READ_${name.toUpperCase()}`,
|
||||
},
|
||||
{
|
||||
id: getId(`UPDATE_${name.toUpperCase()}`),
|
||||
createdAt,
|
||||
updatedAt,
|
||||
name: `UPDATE_${name.toUpperCase()}`,
|
||||
},
|
||||
{
|
||||
id: getId(`DELETE_${name.toUpperCase()}`),
|
||||
createdAt,
|
||||
updatedAt,
|
||||
name: `DELETE_${name.toUpperCase()}`,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
const entities = [
|
||||
'users',
|
||||
'conversions',
|
||||
'files',
|
||||
'roles',
|
||||
'permissions',
|
||||
,
|
||||
];
|
||||
await queryInterface.bulkInsert(
|
||||
'permissions',
|
||||
entities.flatMap(createPermissions),
|
||||
);
|
||||
await queryInterface.bulkInsert('permissions', [
|
||||
{
|
||||
id: getId(`READ_API_DOCS`),
|
||||
createdAt,
|
||||
updatedAt,
|
||||
name: `READ_API_DOCS`,
|
||||
},
|
||||
]);
|
||||
await queryInterface.bulkInsert('permissions', [
|
||||
{
|
||||
id: getId(`CREATE_SEARCH`),
|
||||
createdAt,
|
||||
updatedAt,
|
||||
name: `CREATE_SEARCH`,
|
||||
},
|
||||
]);
|
||||
|
||||
await queryInterface.sequelize
|
||||
.query(`create table "rolesPermissionsPermissions"
|
||||
(
|
||||
"createdAt" timestamp with time zone not null,
|
||||
"updatedAt" timestamp with time zone not null,
|
||||
"roles_permissionsId" uuid not null,
|
||||
"permissionId" uuid not null,
|
||||
primary key ("roles_permissionsId", "permissionId")
|
||||
);`);
|
||||
|
||||
await queryInterface.bulkInsert('rolesPermissionsPermissions', [
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('SystemManager'),
|
||||
permissionId: getId('CREATE_USERS'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('SystemManager'),
|
||||
permissionId: getId('READ_USERS'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('SystemManager'),
|
||||
permissionId: getId('UPDATE_USERS'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('SystemManager'),
|
||||
permissionId: getId('DELETE_USERS'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('OperationsLead'),
|
||||
permissionId: getId('CREATE_USERS'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('OperationsLead'),
|
||||
permissionId: getId('READ_USERS'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('ConversionSpecialist'),
|
||||
permissionId: getId('CREATE_USERS'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('ConversionSpecialist'),
|
||||
permissionId: getId('READ_USERS'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('SupportAgent'),
|
||||
permissionId: getId('CREATE_USERS'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('SupportAgent'),
|
||||
permissionId: getId('READ_USERS'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('Viewer'),
|
||||
permissionId: getId('READ_USERS'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('SystemManager'),
|
||||
permissionId: getId('CREATE_CONVERSIONS'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('SystemManager'),
|
||||
permissionId: getId('READ_CONVERSIONS'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('SystemManager'),
|
||||
permissionId: getId('UPDATE_CONVERSIONS'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('SystemManager'),
|
||||
permissionId: getId('DELETE_CONVERSIONS'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('OperationsLead'),
|
||||
permissionId: getId('CREATE_CONVERSIONS'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('OperationsLead'),
|
||||
permissionId: getId('READ_CONVERSIONS'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('OperationsLead'),
|
||||
permissionId: getId('UPDATE_CONVERSIONS'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('OperationsLead'),
|
||||
permissionId: getId('DELETE_CONVERSIONS'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('ConversionSpecialist'),
|
||||
permissionId: getId('CREATE_CONVERSIONS'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('ConversionSpecialist'),
|
||||
permissionId: getId('READ_CONVERSIONS'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('ConversionSpecialist'),
|
||||
permissionId: getId('UPDATE_CONVERSIONS'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('SupportAgent'),
|
||||
permissionId: getId('CREATE_CONVERSIONS'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('SupportAgent'),
|
||||
permissionId: getId('READ_CONVERSIONS'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('Viewer'),
|
||||
permissionId: getId('READ_CONVERSIONS'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('SystemManager'),
|
||||
permissionId: getId('CREATE_FILES'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('SystemManager'),
|
||||
permissionId: getId('READ_FILES'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('SystemManager'),
|
||||
permissionId: getId('UPDATE_FILES'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('SystemManager'),
|
||||
permissionId: getId('DELETE_FILES'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('OperationsLead'),
|
||||
permissionId: getId('CREATE_FILES'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('OperationsLead'),
|
||||
permissionId: getId('READ_FILES'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('OperationsLead'),
|
||||
permissionId: getId('UPDATE_FILES'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('OperationsLead'),
|
||||
permissionId: getId('DELETE_FILES'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('ConversionSpecialist'),
|
||||
permissionId: getId('CREATE_FILES'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('ConversionSpecialist'),
|
||||
permissionId: getId('READ_FILES'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('ConversionSpecialist'),
|
||||
permissionId: getId('UPDATE_FILES'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('SupportAgent'),
|
||||
permissionId: getId('CREATE_FILES'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('SupportAgent'),
|
||||
permissionId: getId('READ_FILES'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('Viewer'),
|
||||
permissionId: getId('READ_FILES'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('SystemManager'),
|
||||
permissionId: getId('CREATE_SEARCH'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('OperationsLead'),
|
||||
permissionId: getId('CREATE_SEARCH'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('ConversionSpecialist'),
|
||||
permissionId: getId('CREATE_SEARCH'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('SupportAgent'),
|
||||
permissionId: getId('CREATE_SEARCH'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('Viewer'),
|
||||
permissionId: getId('CREATE_SEARCH'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('Administrator'),
|
||||
permissionId: getId('CREATE_USERS'),
|
||||
},
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('Administrator'),
|
||||
permissionId: getId('READ_USERS'),
|
||||
},
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('Administrator'),
|
||||
permissionId: getId('UPDATE_USERS'),
|
||||
},
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('Administrator'),
|
||||
permissionId: getId('DELETE_USERS'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('Administrator'),
|
||||
permissionId: getId('CREATE_CONVERSIONS'),
|
||||
},
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('Administrator'),
|
||||
permissionId: getId('READ_CONVERSIONS'),
|
||||
},
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('Administrator'),
|
||||
permissionId: getId('UPDATE_CONVERSIONS'),
|
||||
},
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('Administrator'),
|
||||
permissionId: getId('DELETE_CONVERSIONS'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('Administrator'),
|
||||
permissionId: getId('CREATE_FILES'),
|
||||
},
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('Administrator'),
|
||||
permissionId: getId('READ_FILES'),
|
||||
},
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('Administrator'),
|
||||
permissionId: getId('UPDATE_FILES'),
|
||||
},
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('Administrator'),
|
||||
permissionId: getId('DELETE_FILES'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('Administrator'),
|
||||
permissionId: getId('CREATE_ROLES'),
|
||||
},
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('Administrator'),
|
||||
permissionId: getId('READ_ROLES'),
|
||||
},
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('Administrator'),
|
||||
permissionId: getId('UPDATE_ROLES'),
|
||||
},
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('Administrator'),
|
||||
permissionId: getId('DELETE_ROLES'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('Administrator'),
|
||||
permissionId: getId('CREATE_PERMISSIONS'),
|
||||
},
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('Administrator'),
|
||||
permissionId: getId('READ_PERMISSIONS'),
|
||||
},
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('Administrator'),
|
||||
permissionId: getId('UPDATE_PERMISSIONS'),
|
||||
},
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('Administrator'),
|
||||
permissionId: getId('DELETE_PERMISSIONS'),
|
||||
},
|
||||
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('Administrator'),
|
||||
permissionId: getId('READ_API_DOCS'),
|
||||
},
|
||||
{
|
||||
createdAt,
|
||||
updatedAt,
|
||||
roles_permissionsId: getId('Administrator'),
|
||||
permissionId: getId('CREATE_SEARCH'),
|
||||
},
|
||||
]);
|
||||
|
||||
await queryInterface.sequelize.query(
|
||||
`UPDATE "users" SET "app_roleId"='${getId(
|
||||
'SuperAdmin',
|
||||
)}' WHERE "email"='super_admin@flatlogic.com'`,
|
||||
);
|
||||
await queryInterface.sequelize.query(
|
||||
`UPDATE "users" SET "app_roleId"='${getId(
|
||||
'Administrator',
|
||||
)}' WHERE "email"='admin@flatlogic.com'`,
|
||||
);
|
||||
|
||||
await queryInterface.sequelize.query(
|
||||
`UPDATE "users" SET "app_roleId"='${getId(
|
||||
'SystemManager',
|
||||
)}' WHERE "email"='client@hello.com'`,
|
||||
);
|
||||
await queryInterface.sequelize.query(
|
||||
`UPDATE "users" SET "app_roleId"='${getId(
|
||||
'OperationsLead',
|
||||
)}' WHERE "email"='john@doe.com'`,
|
||||
);
|
||||
},
|
||||
};
|
||||
164
backend/src/db/seeders/20231127130745-sample-data.js
Normal file
164
backend/src/db/seeders/20231127130745-sample-data.js
Normal file
@ -0,0 +1,164 @@
|
||||
const db = require('../models');
|
||||
const Users = db.users;
|
||||
|
||||
const Conversions = db.conversions;
|
||||
|
||||
const Files = db.files;
|
||||
|
||||
const ConversionsData = [
|
||||
{
|
||||
file_name: 'report.pdf',
|
||||
|
||||
conversion_type: 'pdf_to_excel',
|
||||
|
||||
// type code here for "relation_one" field
|
||||
|
||||
conversion_date: new Date('2023-10-01T10:00:00Z'),
|
||||
},
|
||||
|
||||
{
|
||||
file_name: 'presentation.pptx',
|
||||
|
||||
conversion_type: 'image_to_jpg',
|
||||
|
||||
// type code here for "relation_one" field
|
||||
|
||||
conversion_date: new Date('2023-10-02T11:30:00Z'),
|
||||
},
|
||||
|
||||
{
|
||||
file_name: 'image.jpg',
|
||||
|
||||
conversion_type: 'pdf_to_word',
|
||||
|
||||
// type code here for "relation_one" field
|
||||
|
||||
conversion_date: new Date('2023-10-03T09:15:00Z'),
|
||||
},
|
||||
];
|
||||
|
||||
const FilesData = [
|
||||
{
|
||||
file_name: 'contract.pdf',
|
||||
|
||||
// type code here for "files" field
|
||||
|
||||
// type code here for "relation_one" field
|
||||
|
||||
upload_date: new Date('2023-10-01T10:00:00Z'),
|
||||
},
|
||||
|
||||
{
|
||||
file_name: 'invoice.xlsx',
|
||||
|
||||
// type code here for "files" field
|
||||
|
||||
// type code here for "relation_one" field
|
||||
|
||||
upload_date: new Date('2023-10-02T11:30:00Z'),
|
||||
},
|
||||
|
||||
{
|
||||
file_name: 'profile.jpg',
|
||||
|
||||
// type code here for "files" field
|
||||
|
||||
// type code here for "relation_one" field
|
||||
|
||||
upload_date: new Date('2023-10-03T09:15:00Z'),
|
||||
},
|
||||
];
|
||||
|
||||
// Similar logic for "relation_many"
|
||||
|
||||
async function associateConversionWithUser() {
|
||||
const relatedUser0 = await Users.findOne({
|
||||
offset: Math.floor(Math.random() * (await Users.count())),
|
||||
});
|
||||
const Conversion0 = await Conversions.findOne({
|
||||
order: [['id', 'ASC']],
|
||||
offset: 0,
|
||||
});
|
||||
if (Conversion0?.setUser) {
|
||||
await Conversion0.setUser(relatedUser0);
|
||||
}
|
||||
|
||||
const relatedUser1 = await Users.findOne({
|
||||
offset: Math.floor(Math.random() * (await Users.count())),
|
||||
});
|
||||
const Conversion1 = await Conversions.findOne({
|
||||
order: [['id', 'ASC']],
|
||||
offset: 1,
|
||||
});
|
||||
if (Conversion1?.setUser) {
|
||||
await Conversion1.setUser(relatedUser1);
|
||||
}
|
||||
|
||||
const relatedUser2 = await Users.findOne({
|
||||
offset: Math.floor(Math.random() * (await Users.count())),
|
||||
});
|
||||
const Conversion2 = await Conversions.findOne({
|
||||
order: [['id', 'ASC']],
|
||||
offset: 2,
|
||||
});
|
||||
if (Conversion2?.setUser) {
|
||||
await Conversion2.setUser(relatedUser2);
|
||||
}
|
||||
}
|
||||
|
||||
async function associateFileWithUser() {
|
||||
const relatedUser0 = await Users.findOne({
|
||||
offset: Math.floor(Math.random() * (await Users.count())),
|
||||
});
|
||||
const File0 = await Files.findOne({
|
||||
order: [['id', 'ASC']],
|
||||
offset: 0,
|
||||
});
|
||||
if (File0?.setUser) {
|
||||
await File0.setUser(relatedUser0);
|
||||
}
|
||||
|
||||
const relatedUser1 = await Users.findOne({
|
||||
offset: Math.floor(Math.random() * (await Users.count())),
|
||||
});
|
||||
const File1 = await Files.findOne({
|
||||
order: [['id', 'ASC']],
|
||||
offset: 1,
|
||||
});
|
||||
if (File1?.setUser) {
|
||||
await File1.setUser(relatedUser1);
|
||||
}
|
||||
|
||||
const relatedUser2 = await Users.findOne({
|
||||
offset: Math.floor(Math.random() * (await Users.count())),
|
||||
});
|
||||
const File2 = await Files.findOne({
|
||||
order: [['id', 'ASC']],
|
||||
offset: 2,
|
||||
});
|
||||
if (File2?.setUser) {
|
||||
await File2.setUser(relatedUser2);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
up: async (queryInterface, Sequelize) => {
|
||||
await Conversions.bulkCreate(ConversionsData);
|
||||
|
||||
await Files.bulkCreate(FilesData);
|
||||
|
||||
await Promise.all([
|
||||
// Similar logic for "relation_many"
|
||||
|
||||
await associateConversionWithUser(),
|
||||
|
||||
await associateFileWithUser(),
|
||||
]);
|
||||
},
|
||||
|
||||
down: async (queryInterface, Sequelize) => {
|
||||
await queryInterface.bulkDelete('conversions', null, {});
|
||||
|
||||
await queryInterface.bulkDelete('files', null, {});
|
||||
},
|
||||
};
|
||||
24
backend/src/db/utils.js
Normal file
24
backend/src/db/utils.js
Normal file
@ -0,0 +1,24 @@
|
||||
const validator = require('validator');
|
||||
const { v4: uuid } = require('uuid');
|
||||
const Sequelize = require('./models').Sequelize;
|
||||
|
||||
module.exports = class Utils {
|
||||
static uuid(value) {
|
||||
let id = value;
|
||||
|
||||
if (!validator.isUUID(id)) {
|
||||
id = uuid();
|
||||
}
|
||||
|
||||
return id;
|
||||
}
|
||||
|
||||
static ilike(model, column, value) {
|
||||
return Sequelize.where(
|
||||
Sequelize.fn('lower', Sequelize.col(`${model}.${column}`)),
|
||||
{
|
||||
[Sequelize.Op.like]: `%${value}%`.toLowerCase(),
|
||||
},
|
||||
);
|
||||
}
|
||||
};
|
||||
23
backend/src/helpers.js
Normal file
23
backend/src/helpers.js
Normal file
@ -0,0 +1,23 @@
|
||||
const jwt = require('jsonwebtoken');
|
||||
const config = require('./config');
|
||||
|
||||
module.exports = class Helpers {
|
||||
static wrapAsync(fn) {
|
||||
return function (req, res, next) {
|
||||
fn(req, res, next).catch(next);
|
||||
};
|
||||
}
|
||||
|
||||
static commonErrorHandler(error, req, res, next) {
|
||||
if ([400, 403, 404].includes(error.code)) {
|
||||
return res.status(error.code).send(error.message);
|
||||
}
|
||||
|
||||
console.error(error);
|
||||
return res.status(500).send(error.message);
|
||||
}
|
||||
|
||||
static jwtSign(data) {
|
||||
return jwt.sign(data, config.secret_key, { expiresIn: '6h' });
|
||||
}
|
||||
};
|
||||
153
backend/src/index.js
Normal file
153
backend/src/index.js
Normal file
@ -0,0 +1,153 @@
|
||||
const express = require('express');
|
||||
const cors = require('cors');
|
||||
const app = express();
|
||||
const passport = require('passport');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const bodyParser = require('body-parser');
|
||||
const db = require('./db/models');
|
||||
const config = require('./config');
|
||||
const swaggerUI = require('swagger-ui-express');
|
||||
const swaggerJsDoc = require('swagger-jsdoc');
|
||||
|
||||
const authRoutes = require('./routes/auth');
|
||||
const fileRoutes = require('./routes/file');
|
||||
const searchRoutes = require('./routes/search');
|
||||
const pexelsRoutes = require('./routes/pexels');
|
||||
|
||||
const openaiRoutes = require('./routes/openai');
|
||||
|
||||
const contactFormRoutes = require('./routes/contactForm');
|
||||
|
||||
const usersRoutes = require('./routes/users');
|
||||
|
||||
const conversionsRoutes = require('./routes/conversions');
|
||||
|
||||
const filesRoutes = require('./routes/files');
|
||||
|
||||
const rolesRoutes = require('./routes/roles');
|
||||
|
||||
const permissionsRoutes = require('./routes/permissions');
|
||||
|
||||
const options = {
|
||||
definition: {
|
||||
openapi: '3.0.0',
|
||||
info: {
|
||||
version: '1.0.0',
|
||||
title: 'i want to create a website for pdf & image convert',
|
||||
description:
|
||||
'i want to create a website for pdf & image convert Online REST API for Testing and Prototyping application. You can perform all major operations with your entities - create, delete and etc.',
|
||||
},
|
||||
servers: [
|
||||
{
|
||||
url: config.swaggerUrl,
|
||||
description: 'Development server',
|
||||
},
|
||||
],
|
||||
components: {
|
||||
securitySchemes: {
|
||||
bearerAuth: {
|
||||
type: 'http',
|
||||
scheme: 'bearer',
|
||||
bearerFormat: 'JWT',
|
||||
},
|
||||
},
|
||||
responses: {
|
||||
UnauthorizedError: {
|
||||
description: 'Access token is missing or invalid',
|
||||
},
|
||||
},
|
||||
},
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
apis: ['./src/routes/*.js'],
|
||||
};
|
||||
|
||||
const specs = swaggerJsDoc(options);
|
||||
app.use(
|
||||
'/api-docs',
|
||||
function (req, res, next) {
|
||||
swaggerUI.host = req.get('host');
|
||||
next();
|
||||
},
|
||||
swaggerUI.serve,
|
||||
swaggerUI.setup(specs),
|
||||
);
|
||||
|
||||
app.use(cors({ origin: true }));
|
||||
require('./auth/auth');
|
||||
|
||||
app.use(bodyParser.json());
|
||||
|
||||
app.use('/api/auth', authRoutes);
|
||||
app.use('/api/file', fileRoutes);
|
||||
app.use('/api/pexels', pexelsRoutes);
|
||||
app.enable('trust proxy');
|
||||
|
||||
app.use(
|
||||
'/api/users',
|
||||
passport.authenticate('jwt', { session: false }),
|
||||
usersRoutes,
|
||||
);
|
||||
|
||||
app.use(
|
||||
'/api/conversions',
|
||||
passport.authenticate('jwt', { session: false }),
|
||||
conversionsRoutes,
|
||||
);
|
||||
|
||||
app.use(
|
||||
'/api/files',
|
||||
passport.authenticate('jwt', { session: false }),
|
||||
filesRoutes,
|
||||
);
|
||||
|
||||
app.use(
|
||||
'/api/roles',
|
||||
passport.authenticate('jwt', { session: false }),
|
||||
rolesRoutes,
|
||||
);
|
||||
|
||||
app.use(
|
||||
'/api/permissions',
|
||||
passport.authenticate('jwt', { session: false }),
|
||||
permissionsRoutes,
|
||||
);
|
||||
|
||||
app.use(
|
||||
'/api/openai',
|
||||
passport.authenticate('jwt', { session: false }),
|
||||
openaiRoutes,
|
||||
);
|
||||
|
||||
app.use('/api/contact-form', contactFormRoutes);
|
||||
|
||||
app.use(
|
||||
'/api/search',
|
||||
passport.authenticate('jwt', { session: false }),
|
||||
searchRoutes,
|
||||
);
|
||||
|
||||
const publicDir = path.join(__dirname, '../public');
|
||||
|
||||
if (fs.existsSync(publicDir)) {
|
||||
app.use('/', express.static(publicDir));
|
||||
|
||||
app.get('*', function (request, response) {
|
||||
response.sendFile(path.resolve(publicDir, 'index.html'));
|
||||
});
|
||||
}
|
||||
|
||||
const PORT = process.env.NODE_ENV === 'dev_stage' ? 3000 : 8080;
|
||||
|
||||
db.sequelize.sync().then(function () {
|
||||
app.listen(PORT, () => {
|
||||
console.log(`Listening on port ${PORT}`);
|
||||
});
|
||||
});
|
||||
|
||||
module.exports = app;
|
||||
64
backend/src/middlewares/check-permissions.js
Normal file
64
backend/src/middlewares/check-permissions.js
Normal file
@ -0,0 +1,64 @@
|
||||
const ValidationError = require('../services/notifications/errors/validation');
|
||||
|
||||
/**
|
||||
* @param {string} permission
|
||||
* @return {import("express").RequestHandler}
|
||||
*/
|
||||
function checkPermissions(permission) {
|
||||
return (req, res, next) => {
|
||||
const { currentUser } = req;
|
||||
if (currentUser) {
|
||||
if (currentUser.id === req.params.id || currentUser.id === req.body.id) {
|
||||
next();
|
||||
return;
|
||||
}
|
||||
const userPermission = currentUser.custom_permissions.find(
|
||||
(cp) => cp.name === permission,
|
||||
);
|
||||
|
||||
if (userPermission) {
|
||||
next();
|
||||
} else {
|
||||
if (!currentUser.app_role) {
|
||||
return next(new ValidationError('auth.forbidden'));
|
||||
}
|
||||
currentUser.app_role
|
||||
.getPermissions()
|
||||
.then((permissions) => {
|
||||
if (permissions.find((p) => p.name === permission)) {
|
||||
next();
|
||||
} else {
|
||||
next(new ValidationError('auth.forbidden'));
|
||||
}
|
||||
})
|
||||
.catch((e) => next(e));
|
||||
}
|
||||
} else {
|
||||
next(new ValidationError('auth.unauthorized'));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const METHOD_MAP = {
|
||||
POST: 'CREATE',
|
||||
GET: 'READ',
|
||||
PUT: 'UPDATE',
|
||||
PATCH: 'UPDATE',
|
||||
DELETE: 'DELETE',
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {string} name
|
||||
* @return {import("express").RequestHandler}
|
||||
*/
|
||||
function checkCrudPermissions(name) {
|
||||
return (req, res, next) => {
|
||||
const permissionName = `${METHOD_MAP[req.method]}_${name.toUpperCase()}`;
|
||||
checkPermissions(permissionName)(req, res, next);
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
checkPermissions,
|
||||
checkCrudPermissions,
|
||||
};
|
||||
11
backend/src/middlewares/upload.js
Normal file
11
backend/src/middlewares/upload.js
Normal file
@ -0,0 +1,11 @@
|
||||
const util = require('util');
|
||||
const Multer = require('multer');
|
||||
const maxSize = 10 * 1024 * 1024;
|
||||
|
||||
let processFile = Multer({
|
||||
storage: Multer.memoryStorage(),
|
||||
limits: { fileSize: maxSize },
|
||||
}).single('file');
|
||||
|
||||
let processFileMiddleware = util.promisify(processFile);
|
||||
module.exports = processFileMiddleware;
|
||||
268
backend/src/routes/auth.js
Normal file
268
backend/src/routes/auth.js
Normal file
@ -0,0 +1,268 @@
|
||||
const express = require('express');
|
||||
const passport = require('passport');
|
||||
|
||||
const config = require('../config');
|
||||
const AuthService = require('../services/auth');
|
||||
const ForbiddenError = require('../services/notifications/errors/forbidden');
|
||||
const EmailSender = require('../services/email');
|
||||
const wrapAsync = require('../helpers').wrapAsync;
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* components:
|
||||
* schemas:
|
||||
* Auth:
|
||||
* type: object
|
||||
* required:
|
||||
* - email
|
||||
* - password
|
||||
* properties:
|
||||
* email:
|
||||
* type: string
|
||||
* default: admin@flatlogic.com
|
||||
* description: User email
|
||||
* password:
|
||||
* type: string
|
||||
* default: password
|
||||
* description: User password
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* tags:
|
||||
* name: Auth
|
||||
* description: Authorization operations
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/auth/signin/local:
|
||||
* post:
|
||||
* tags: [Auth]
|
||||
* summary: Logs user into the system
|
||||
* description: Logs user into the system
|
||||
* requestBody:
|
||||
* description: Set valid user email and password
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Auth"
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Successful login
|
||||
* 400:
|
||||
* description: Invalid username/password supplied
|
||||
* x-codegen-request-body-name: body
|
||||
*/
|
||||
|
||||
router.post(
|
||||
'/signin/local',
|
||||
wrapAsync(async (req, res) => {
|
||||
const payload = await AuthService.signin(
|
||||
req.body.email,
|
||||
req.body.password,
|
||||
req,
|
||||
);
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/auth/me:
|
||||
* get:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Auth]
|
||||
* summary: Get current authorized user info
|
||||
* description: Get current authorized user info
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Successful retrieval of current authorized user data
|
||||
* 400:
|
||||
* description: Invalid username/password supplied
|
||||
* x-codegen-request-body-name: body
|
||||
*/
|
||||
|
||||
router.get(
|
||||
'/me',
|
||||
passport.authenticate('jwt', { session: false }),
|
||||
(req, res) => {
|
||||
if (!req.currentUser || !req.currentUser.id) {
|
||||
throw new ForbiddenError();
|
||||
}
|
||||
|
||||
const payload = req.currentUser;
|
||||
delete payload.password;
|
||||
res.status(200).send(payload);
|
||||
},
|
||||
);
|
||||
|
||||
router.put(
|
||||
'/password-reset',
|
||||
wrapAsync(async (req, res) => {
|
||||
const payload = await AuthService.passwordReset(
|
||||
req.body.token,
|
||||
req.body.password,
|
||||
req,
|
||||
);
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
router.put(
|
||||
'/password-update',
|
||||
passport.authenticate('jwt', { session: false }),
|
||||
wrapAsync(async (req, res) => {
|
||||
const payload = await AuthService.passwordUpdate(
|
||||
req.body.currentPassword,
|
||||
req.body.newPassword,
|
||||
req,
|
||||
);
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/send-email-address-verification-email',
|
||||
passport.authenticate('jwt', { session: false }),
|
||||
wrapAsync(async (req, res) => {
|
||||
if (!req.currentUser) {
|
||||
throw new ForbiddenError();
|
||||
}
|
||||
|
||||
await AuthService.sendEmailAddressVerificationEmail(req.currentUser.email);
|
||||
const payload = true;
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/send-password-reset-email',
|
||||
wrapAsync(async (req, res) => {
|
||||
const link = new URL(req.headers.referer);
|
||||
await AuthService.sendPasswordResetEmail(
|
||||
req.body.email,
|
||||
'register',
|
||||
link.host,
|
||||
);
|
||||
const payload = true;
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/auth/signup:
|
||||
* post:
|
||||
* tags: [Auth]
|
||||
* summary: Register new user into the system
|
||||
* description: Register new user into the system
|
||||
* requestBody:
|
||||
* description: Set valid user email and password
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Auth"
|
||||
* responses:
|
||||
* 200:
|
||||
* description: New user successfully signed up
|
||||
* 400:
|
||||
* description: Invalid username/password supplied
|
||||
* 500:
|
||||
* description: Some server error
|
||||
* x-codegen-request-body-name: body
|
||||
*/
|
||||
|
||||
router.post(
|
||||
'/signup',
|
||||
wrapAsync(async (req, res) => {
|
||||
const link = new URL(req.headers.referer);
|
||||
const payload = await AuthService.signup(
|
||||
req.body.email,
|
||||
req.body.password,
|
||||
|
||||
req,
|
||||
link.host,
|
||||
);
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
router.put(
|
||||
'/profile',
|
||||
passport.authenticate('jwt', { session: false }),
|
||||
wrapAsync(async (req, res) => {
|
||||
if (!req.currentUser || !req.currentUser.id) {
|
||||
throw new ForbiddenError();
|
||||
}
|
||||
|
||||
await AuthService.updateProfile(req.body.profile, req.currentUser);
|
||||
const payload = true;
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
router.put(
|
||||
'/verify-email',
|
||||
wrapAsync(async (req, res) => {
|
||||
const payload = await AuthService.verifyEmail(
|
||||
req.body.token,
|
||||
req,
|
||||
req.headers.referer,
|
||||
);
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
router.get('/email-configured', (req, res) => {
|
||||
const payload = EmailSender.isConfigured;
|
||||
res.status(200).send(payload);
|
||||
});
|
||||
|
||||
router.get('/signin/google', (req, res, next) => {
|
||||
passport.authenticate('google', {
|
||||
scope: ['profile', 'email'],
|
||||
state: req.query.app,
|
||||
})(req, res, next);
|
||||
});
|
||||
|
||||
router.get(
|
||||
'/signin/google/callback',
|
||||
passport.authenticate('google', {
|
||||
failureRedirect: '/login',
|
||||
session: false,
|
||||
}),
|
||||
|
||||
function (req, res) {
|
||||
socialRedirect(res, req.query.state, req.user.token, config);
|
||||
},
|
||||
);
|
||||
|
||||
router.get('/signin/microsoft', (req, res, next) => {
|
||||
passport.authenticate('microsoft', {
|
||||
scope: ['https://graph.microsoft.com/user.read openid'],
|
||||
state: req.query.app,
|
||||
})(req, res, next);
|
||||
});
|
||||
|
||||
router.get(
|
||||
'/signin/microsoft/callback',
|
||||
passport.authenticate('microsoft', {
|
||||
failureRedirect: '/login',
|
||||
session: false,
|
||||
}),
|
||||
function (req, res) {
|
||||
socialRedirect(res, req.query.state, req.user.token, config);
|
||||
},
|
||||
);
|
||||
|
||||
router.use('/', require('../helpers').commonErrorHandler);
|
||||
|
||||
function socialRedirect(res, state, token, config) {
|
||||
res.redirect(config.uiUrl + '/login?token=' + token);
|
||||
}
|
||||
|
||||
module.exports = router;
|
||||
33
backend/src/routes/contactForm.js
Normal file
33
backend/src/routes/contactForm.js
Normal file
@ -0,0 +1,33 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const EmailSender = require('../services/email');
|
||||
|
||||
router.post('/send', async (req, res) => {
|
||||
try {
|
||||
const { email, subject, message } = req.body;
|
||||
|
||||
if (!email || !subject || !message) {
|
||||
return res.status(400).json({ error: 'All fields are required' });
|
||||
}
|
||||
|
||||
const emailSender = new EmailSender({
|
||||
to: 'aslu@me.com',
|
||||
subject: subject,
|
||||
html: () => `
|
||||
<p><strong>From:</strong> ${email}</p>
|
||||
<p><strong>Subject:</strong> ${subject}</p>
|
||||
<p><strong>Message:</strong></p>
|
||||
<p>${message}</p>
|
||||
`,
|
||||
text: () => `From: ${email}\nSubject: ${subject}\nMessage:\n${message}`,
|
||||
});
|
||||
|
||||
await emailSender.send();
|
||||
res.status(200).json({ message: 'Email sent successfully' });
|
||||
} catch (error) {
|
||||
console.error('Error sending email:', error);
|
||||
res.status(500).json({ error: 'Error sending email' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
443
backend/src/routes/conversions.js
Normal file
443
backend/src/routes/conversions.js
Normal file
@ -0,0 +1,443 @@
|
||||
const express = require('express');
|
||||
|
||||
const ConversionsService = require('../services/conversions');
|
||||
const ConversionsDBApi = require('../db/api/conversions');
|
||||
const wrapAsync = require('../helpers').wrapAsync;
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
const { parse } = require('json2csv');
|
||||
|
||||
const { checkCrudPermissions } = require('../middlewares/check-permissions');
|
||||
|
||||
router.use(checkCrudPermissions('conversions'));
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* components:
|
||||
* schemas:
|
||||
* Conversions:
|
||||
* type: object
|
||||
* properties:
|
||||
|
||||
* file_name:
|
||||
* type: string
|
||||
* default: file_name
|
||||
|
||||
*
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* tags:
|
||||
* name: Conversions
|
||||
* description: The Conversions managing API
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/conversions:
|
||||
* post:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Conversions]
|
||||
* summary: Add new item
|
||||
* description: Add new item
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* properties:
|
||||
* data:
|
||||
* description: Data of the updated item
|
||||
* type: object
|
||||
* $ref: "#/components/schemas/Conversions"
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The item was successfully added
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Conversions"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 405:
|
||||
* description: Invalid input data
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.post(
|
||||
'/',
|
||||
wrapAsync(async (req, res) => {
|
||||
const referer =
|
||||
req.headers.referer ||
|
||||
`${req.protocol}://${req.hostname}${req.originalUrl}`;
|
||||
const link = new URL(referer);
|
||||
await ConversionsService.create(
|
||||
req.body.data,
|
||||
req.currentUser,
|
||||
true,
|
||||
link.host,
|
||||
);
|
||||
const payload = true;
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/budgets/bulk-import:
|
||||
* post:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Conversions]
|
||||
* summary: Bulk import items
|
||||
* description: Bulk import items
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* properties:
|
||||
* data:
|
||||
* description: Data of the updated items
|
||||
* type: array
|
||||
* items:
|
||||
* $ref: "#/components/schemas/Conversions"
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The items were successfully imported
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Conversions"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 405:
|
||||
* description: Invalid input data
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*
|
||||
*/
|
||||
router.post(
|
||||
'/bulk-import',
|
||||
wrapAsync(async (req, res) => {
|
||||
const referer =
|
||||
req.headers.referer ||
|
||||
`${req.protocol}://${req.hostname}${req.originalUrl}`;
|
||||
const link = new URL(referer);
|
||||
await ConversionsService.bulkImport(req, res, true, link.host);
|
||||
const payload = true;
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/conversions/{id}:
|
||||
* put:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Conversions]
|
||||
* summary: Update the data of the selected item
|
||||
* description: Update the data of the selected item
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* description: Item ID to update
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* requestBody:
|
||||
* description: Set new item data
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* properties:
|
||||
* id:
|
||||
* description: ID of the updated item
|
||||
* type: string
|
||||
* data:
|
||||
* description: Data of the updated item
|
||||
* type: object
|
||||
* $ref: "#/components/schemas/Conversions"
|
||||
* required:
|
||||
* - id
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The item data was successfully updated
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Conversions"
|
||||
* 400:
|
||||
* description: Invalid ID supplied
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Item not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.put(
|
||||
'/:id',
|
||||
wrapAsync(async (req, res) => {
|
||||
await ConversionsService.update(
|
||||
req.body.data,
|
||||
req.body.id,
|
||||
req.currentUser,
|
||||
);
|
||||
const payload = true;
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/conversions/{id}:
|
||||
* delete:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Conversions]
|
||||
* summary: Delete the selected item
|
||||
* description: Delete the selected item
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* description: Item ID to delete
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The item was successfully deleted
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Conversions"
|
||||
* 400:
|
||||
* description: Invalid ID supplied
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Item not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.delete(
|
||||
'/:id',
|
||||
wrapAsync(async (req, res) => {
|
||||
await ConversionsService.remove(req.params.id, req.currentUser);
|
||||
const payload = true;
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/conversions/deleteByIds:
|
||||
* post:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Conversions]
|
||||
* summary: Delete the selected item list
|
||||
* description: Delete the selected item list
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* properties:
|
||||
* ids:
|
||||
* description: IDs of the updated items
|
||||
* type: array
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The items was successfully deleted
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Conversions"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Items not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.post(
|
||||
'/deleteByIds',
|
||||
wrapAsync(async (req, res) => {
|
||||
await ConversionsService.deleteByIds(req.body.data, req.currentUser);
|
||||
const payload = true;
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/conversions:
|
||||
* get:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Conversions]
|
||||
* summary: Get all conversions
|
||||
* description: Get all conversions
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Conversions list successfully received
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: array
|
||||
* items:
|
||||
* $ref: "#/components/schemas/Conversions"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Data not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.get(
|
||||
'/',
|
||||
wrapAsync(async (req, res) => {
|
||||
const filetype = req.query.filetype;
|
||||
|
||||
const currentUser = req.currentUser;
|
||||
const payload = await ConversionsDBApi.findAll(req.query, { currentUser });
|
||||
if (filetype && filetype === 'csv') {
|
||||
const fields = ['id', 'file_name', 'conversion_date'];
|
||||
const opts = { fields };
|
||||
try {
|
||||
const csv = parse(payload.rows, opts);
|
||||
res.status(200).attachment(csv);
|
||||
res.send(csv);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
}
|
||||
} else {
|
||||
res.status(200).send(payload);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/conversions/count:
|
||||
* get:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Conversions]
|
||||
* summary: Count all conversions
|
||||
* description: Count all conversions
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Conversions count successfully received
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: array
|
||||
* items:
|
||||
* $ref: "#/components/schemas/Conversions"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Data not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.get(
|
||||
'/count',
|
||||
wrapAsync(async (req, res) => {
|
||||
const currentUser = req.currentUser;
|
||||
const payload = await ConversionsDBApi.findAll(req.query, null, {
|
||||
countOnly: true,
|
||||
currentUser,
|
||||
});
|
||||
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/conversions/autocomplete:
|
||||
* get:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Conversions]
|
||||
* summary: Find all conversions that match search criteria
|
||||
* description: Find all conversions that match search criteria
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Conversions list successfully received
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: array
|
||||
* items:
|
||||
* $ref: "#/components/schemas/Conversions"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Data not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.get('/autocomplete', async (req, res) => {
|
||||
const payload = await ConversionsDBApi.findAllAutocomplete(
|
||||
req.query.query,
|
||||
req.query.limit,
|
||||
req.query.offset,
|
||||
);
|
||||
|
||||
res.status(200).send(payload);
|
||||
});
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/conversions/{id}:
|
||||
* get:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Conversions]
|
||||
* summary: Get selected item
|
||||
* description: Get selected item
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* description: ID of item to get
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Selected item successfully received
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Conversions"
|
||||
* 400:
|
||||
* description: Invalid ID supplied
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Item not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.get(
|
||||
'/:id',
|
||||
wrapAsync(async (req, res) => {
|
||||
const payload = await ConversionsDBApi.findBy({ id: req.params.id });
|
||||
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
router.use('/', require('../helpers').commonErrorHandler);
|
||||
|
||||
module.exports = router;
|
||||
40
backend/src/routes/file.js
Normal file
40
backend/src/routes/file.js
Normal file
@ -0,0 +1,40 @@
|
||||
const express = require('express');
|
||||
const config = require('../config');
|
||||
const path = require('path');
|
||||
const passport = require('passport');
|
||||
const services = require('../services/file');
|
||||
const router = express.Router();
|
||||
|
||||
router.get('/download', (req, res) => {
|
||||
if (
|
||||
process.env.NODE_ENV == 'production' ||
|
||||
process.env.NEXT_PUBLIC_BACK_API
|
||||
) {
|
||||
services.downloadGCloud(req, res);
|
||||
} else {
|
||||
services.downloadLocal(req, res);
|
||||
}
|
||||
});
|
||||
|
||||
router.post(
|
||||
'/upload/:table/:field',
|
||||
passport.authenticate('jwt', { session: false }),
|
||||
(req, res) => {
|
||||
const fileName = `${req.params.table}/${req.params.field}`;
|
||||
|
||||
if (
|
||||
process.env.NODE_ENV == 'production' ||
|
||||
process.env.NEXT_PUBLIC_BACK_API
|
||||
) {
|
||||
services.uploadGCloud(fileName, req, res);
|
||||
} else {
|
||||
services.uploadLocal(fileName, {
|
||||
entity: null,
|
||||
maxFileSize: 10 * 1024 * 1024,
|
||||
folderIncludesAuthenticationUid: false,
|
||||
})(req, res);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
module.exports = router;
|
||||
433
backend/src/routes/files.js
Normal file
433
backend/src/routes/files.js
Normal file
@ -0,0 +1,433 @@
|
||||
const express = require('express');
|
||||
|
||||
const FilesService = require('../services/files');
|
||||
const FilesDBApi = require('../db/api/files');
|
||||
const wrapAsync = require('../helpers').wrapAsync;
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
const { parse } = require('json2csv');
|
||||
|
||||
const { checkCrudPermissions } = require('../middlewares/check-permissions');
|
||||
|
||||
router.use(checkCrudPermissions('files'));
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* components:
|
||||
* schemas:
|
||||
* Files:
|
||||
* type: object
|
||||
* properties:
|
||||
|
||||
* file_name:
|
||||
* type: string
|
||||
* default: file_name
|
||||
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* tags:
|
||||
* name: Files
|
||||
* description: The Files managing API
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/files:
|
||||
* post:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Files]
|
||||
* summary: Add new item
|
||||
* description: Add new item
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* properties:
|
||||
* data:
|
||||
* description: Data of the updated item
|
||||
* type: object
|
||||
* $ref: "#/components/schemas/Files"
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The item was successfully added
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Files"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 405:
|
||||
* description: Invalid input data
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.post(
|
||||
'/',
|
||||
wrapAsync(async (req, res) => {
|
||||
const referer =
|
||||
req.headers.referer ||
|
||||
`${req.protocol}://${req.hostname}${req.originalUrl}`;
|
||||
const link = new URL(referer);
|
||||
await FilesService.create(req.body.data, req.currentUser, true, link.host);
|
||||
const payload = true;
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/budgets/bulk-import:
|
||||
* post:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Files]
|
||||
* summary: Bulk import items
|
||||
* description: Bulk import items
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* properties:
|
||||
* data:
|
||||
* description: Data of the updated items
|
||||
* type: array
|
||||
* items:
|
||||
* $ref: "#/components/schemas/Files"
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The items were successfully imported
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Files"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 405:
|
||||
* description: Invalid input data
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*
|
||||
*/
|
||||
router.post(
|
||||
'/bulk-import',
|
||||
wrapAsync(async (req, res) => {
|
||||
const referer =
|
||||
req.headers.referer ||
|
||||
`${req.protocol}://${req.hostname}${req.originalUrl}`;
|
||||
const link = new URL(referer);
|
||||
await FilesService.bulkImport(req, res, true, link.host);
|
||||
const payload = true;
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/files/{id}:
|
||||
* put:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Files]
|
||||
* summary: Update the data of the selected item
|
||||
* description: Update the data of the selected item
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* description: Item ID to update
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* requestBody:
|
||||
* description: Set new item data
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* properties:
|
||||
* id:
|
||||
* description: ID of the updated item
|
||||
* type: string
|
||||
* data:
|
||||
* description: Data of the updated item
|
||||
* type: object
|
||||
* $ref: "#/components/schemas/Files"
|
||||
* required:
|
||||
* - id
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The item data was successfully updated
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Files"
|
||||
* 400:
|
||||
* description: Invalid ID supplied
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Item not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.put(
|
||||
'/:id',
|
||||
wrapAsync(async (req, res) => {
|
||||
await FilesService.update(req.body.data, req.body.id, req.currentUser);
|
||||
const payload = true;
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/files/{id}:
|
||||
* delete:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Files]
|
||||
* summary: Delete the selected item
|
||||
* description: Delete the selected item
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* description: Item ID to delete
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The item was successfully deleted
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Files"
|
||||
* 400:
|
||||
* description: Invalid ID supplied
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Item not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.delete(
|
||||
'/:id',
|
||||
wrapAsync(async (req, res) => {
|
||||
await FilesService.remove(req.params.id, req.currentUser);
|
||||
const payload = true;
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/files/deleteByIds:
|
||||
* post:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Files]
|
||||
* summary: Delete the selected item list
|
||||
* description: Delete the selected item list
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* properties:
|
||||
* ids:
|
||||
* description: IDs of the updated items
|
||||
* type: array
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The items was successfully deleted
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Files"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Items not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.post(
|
||||
'/deleteByIds',
|
||||
wrapAsync(async (req, res) => {
|
||||
await FilesService.deleteByIds(req.body.data, req.currentUser);
|
||||
const payload = true;
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/files:
|
||||
* get:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Files]
|
||||
* summary: Get all files
|
||||
* description: Get all files
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Files list successfully received
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: array
|
||||
* items:
|
||||
* $ref: "#/components/schemas/Files"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Data not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.get(
|
||||
'/',
|
||||
wrapAsync(async (req, res) => {
|
||||
const filetype = req.query.filetype;
|
||||
|
||||
const currentUser = req.currentUser;
|
||||
const payload = await FilesDBApi.findAll(req.query, { currentUser });
|
||||
if (filetype && filetype === 'csv') {
|
||||
const fields = ['id', 'file_name', 'upload_date'];
|
||||
const opts = { fields };
|
||||
try {
|
||||
const csv = parse(payload.rows, opts);
|
||||
res.status(200).attachment(csv);
|
||||
res.send(csv);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
}
|
||||
} else {
|
||||
res.status(200).send(payload);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/files/count:
|
||||
* get:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Files]
|
||||
* summary: Count all files
|
||||
* description: Count all files
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Files count successfully received
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: array
|
||||
* items:
|
||||
* $ref: "#/components/schemas/Files"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Data not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.get(
|
||||
'/count',
|
||||
wrapAsync(async (req, res) => {
|
||||
const currentUser = req.currentUser;
|
||||
const payload = await FilesDBApi.findAll(req.query, null, {
|
||||
countOnly: true,
|
||||
currentUser,
|
||||
});
|
||||
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/files/autocomplete:
|
||||
* get:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Files]
|
||||
* summary: Find all files that match search criteria
|
||||
* description: Find all files that match search criteria
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Files list successfully received
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: array
|
||||
* items:
|
||||
* $ref: "#/components/schemas/Files"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Data not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.get('/autocomplete', async (req, res) => {
|
||||
const payload = await FilesDBApi.findAllAutocomplete(
|
||||
req.query.query,
|
||||
req.query.limit,
|
||||
req.query.offset,
|
||||
);
|
||||
|
||||
res.status(200).send(payload);
|
||||
});
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/files/{id}:
|
||||
* get:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Files]
|
||||
* summary: Get selected item
|
||||
* description: Get selected item
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* description: ID of item to get
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Selected item successfully received
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Files"
|
||||
* 400:
|
||||
* description: Invalid ID supplied
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Item not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.get(
|
||||
'/:id',
|
||||
wrapAsync(async (req, res) => {
|
||||
const payload = await FilesDBApi.findBy({ id: req.params.id });
|
||||
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
router.use('/', require('../helpers').commonErrorHandler);
|
||||
|
||||
module.exports = router;
|
||||
180
backend/src/routes/openai.js
Normal file
180
backend/src/routes/openai.js
Normal file
@ -0,0 +1,180 @@
|
||||
const express = require('express');
|
||||
const db = require('../db/models');
|
||||
const wrapAsync = require('../helpers').wrapAsync;
|
||||
const router = express.Router();
|
||||
const sjs = require('sequelize-json-schema');
|
||||
const { getWidget } = require('../services/openai');
|
||||
const RolesService = require('../services/roles');
|
||||
const RolesDBApi = require('../db/api/roles');
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/roles/roles-info/{infoId}:
|
||||
* delete:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Roles]
|
||||
* summary: Remove role information by ID
|
||||
* description: Remove specific role information by ID
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: infoId
|
||||
* description: ID of role information to remove
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* - in: query
|
||||
* name: userId
|
||||
* description: ID of the user
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* - in: query
|
||||
* name: key
|
||||
* description: Key of the role information to remove
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Role information successfully removed
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* user:
|
||||
* type: string
|
||||
* description: The user information
|
||||
* 400:
|
||||
* description: Invalid ID or key supplied
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Role not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
|
||||
router.delete(
|
||||
'/roles-info/:infoId',
|
||||
wrapAsync(async (req, res) => {
|
||||
const role = await RolesService.removeRoleInfoById(
|
||||
req.query.infoId,
|
||||
req.query.roleId,
|
||||
req.query.key,
|
||||
req.currentUser,
|
||||
);
|
||||
|
||||
res.status(200).send(role);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/roles/role-info/{roleId}:
|
||||
* get:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Roles]
|
||||
* summary: Get role information by key
|
||||
* description: Get specific role information by key
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: roleId
|
||||
* description: ID of role to get information for
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* - in: query
|
||||
* name: key
|
||||
* description: Key of the role information to retrieve
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Role information successfully received
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* info:
|
||||
* type: string
|
||||
* description: The role information
|
||||
* 400:
|
||||
* description: Invalid ID or key supplied
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Role not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
|
||||
router.get(
|
||||
'/info-by-key',
|
||||
wrapAsync(async (req, res) => {
|
||||
const roleId = req.query.roleId;
|
||||
const key = req.query.key;
|
||||
const currentUser = req.currentUser;
|
||||
let info = await RolesService.getRoleInfoByKey(key, roleId, currentUser);
|
||||
const role = await RolesDBApi.findBy({ id: roleId });
|
||||
if (!role?.role_customization) {
|
||||
await Promise.all(
|
||||
['pie', 'bar'].map(async (e) => {
|
||||
const schema = await sjs.getSequelizeSchema(db.sequelize, {});
|
||||
const payload = {
|
||||
description: `Create some cool ${e} chart`,
|
||||
modelDefinition: schema.definitions,
|
||||
};
|
||||
const widgetId = await getWidget(payload, currentUser?.id, roleId);
|
||||
if (widgetId) {
|
||||
await RolesService.addRoleInfo(
|
||||
roleId,
|
||||
currentUser?.id,
|
||||
'widgets',
|
||||
widgetId,
|
||||
req.currentUser,
|
||||
);
|
||||
}
|
||||
}),
|
||||
);
|
||||
info = await RolesService.getRoleInfoByKey(key, roleId, currentUser);
|
||||
}
|
||||
res.status(200).send(info);
|
||||
}),
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/create_widget',
|
||||
wrapAsync(async (req, res) => {
|
||||
const { description, userId, roleId } = req.body;
|
||||
|
||||
const currentUser = req.currentUser;
|
||||
const schema = await sjs.getSequelizeSchema(db.sequelize, {});
|
||||
const payload = {
|
||||
description,
|
||||
modelDefinition: schema.definitions,
|
||||
};
|
||||
|
||||
const widgetId = await getWidget(payload, userId, roleId);
|
||||
|
||||
if (widgetId) {
|
||||
await RolesService.addRoleInfo(
|
||||
roleId,
|
||||
userId,
|
||||
'widgets',
|
||||
widgetId,
|
||||
currentUser,
|
||||
);
|
||||
|
||||
return res.status(200).send(widgetId);
|
||||
} else {
|
||||
return res.status(400).send(widgetId);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
module.exports = router;
|
||||
0
backend/src/routes/organizationLogin.js
Normal file
0
backend/src/routes/organizationLogin.js
Normal file
442
backend/src/routes/permissions.js
Normal file
442
backend/src/routes/permissions.js
Normal file
@ -0,0 +1,442 @@
|
||||
const express = require('express');
|
||||
|
||||
const PermissionsService = require('../services/permissions');
|
||||
const PermissionsDBApi = require('../db/api/permissions');
|
||||
const wrapAsync = require('../helpers').wrapAsync;
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
const { parse } = require('json2csv');
|
||||
|
||||
const { checkCrudPermissions } = require('../middlewares/check-permissions');
|
||||
|
||||
router.use(checkCrudPermissions('permissions'));
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* components:
|
||||
* schemas:
|
||||
* Permissions:
|
||||
* type: object
|
||||
* properties:
|
||||
|
||||
* name:
|
||||
* type: string
|
||||
* default: name
|
||||
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* tags:
|
||||
* name: Permissions
|
||||
* description: The Permissions managing API
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/permissions:
|
||||
* post:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Permissions]
|
||||
* summary: Add new item
|
||||
* description: Add new item
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* properties:
|
||||
* data:
|
||||
* description: Data of the updated item
|
||||
* type: object
|
||||
* $ref: "#/components/schemas/Permissions"
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The item was successfully added
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Permissions"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 405:
|
||||
* description: Invalid input data
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.post(
|
||||
'/',
|
||||
wrapAsync(async (req, res) => {
|
||||
const referer =
|
||||
req.headers.referer ||
|
||||
`${req.protocol}://${req.hostname}${req.originalUrl}`;
|
||||
const link = new URL(referer);
|
||||
await PermissionsService.create(
|
||||
req.body.data,
|
||||
req.currentUser,
|
||||
true,
|
||||
link.host,
|
||||
);
|
||||
const payload = true;
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/budgets/bulk-import:
|
||||
* post:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Permissions]
|
||||
* summary: Bulk import items
|
||||
* description: Bulk import items
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* properties:
|
||||
* data:
|
||||
* description: Data of the updated items
|
||||
* type: array
|
||||
* items:
|
||||
* $ref: "#/components/schemas/Permissions"
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The items were successfully imported
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Permissions"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 405:
|
||||
* description: Invalid input data
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*
|
||||
*/
|
||||
router.post(
|
||||
'/bulk-import',
|
||||
wrapAsync(async (req, res) => {
|
||||
const referer =
|
||||
req.headers.referer ||
|
||||
`${req.protocol}://${req.hostname}${req.originalUrl}`;
|
||||
const link = new URL(referer);
|
||||
await PermissionsService.bulkImport(req, res, true, link.host);
|
||||
const payload = true;
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/permissions/{id}:
|
||||
* put:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Permissions]
|
||||
* summary: Update the data of the selected item
|
||||
* description: Update the data of the selected item
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* description: Item ID to update
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* requestBody:
|
||||
* description: Set new item data
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* properties:
|
||||
* id:
|
||||
* description: ID of the updated item
|
||||
* type: string
|
||||
* data:
|
||||
* description: Data of the updated item
|
||||
* type: object
|
||||
* $ref: "#/components/schemas/Permissions"
|
||||
* required:
|
||||
* - id
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The item data was successfully updated
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Permissions"
|
||||
* 400:
|
||||
* description: Invalid ID supplied
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Item not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.put(
|
||||
'/:id',
|
||||
wrapAsync(async (req, res) => {
|
||||
await PermissionsService.update(
|
||||
req.body.data,
|
||||
req.body.id,
|
||||
req.currentUser,
|
||||
);
|
||||
const payload = true;
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/permissions/{id}:
|
||||
* delete:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Permissions]
|
||||
* summary: Delete the selected item
|
||||
* description: Delete the selected item
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* description: Item ID to delete
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The item was successfully deleted
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Permissions"
|
||||
* 400:
|
||||
* description: Invalid ID supplied
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Item not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.delete(
|
||||
'/:id',
|
||||
wrapAsync(async (req, res) => {
|
||||
await PermissionsService.remove(req.params.id, req.currentUser);
|
||||
const payload = true;
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/permissions/deleteByIds:
|
||||
* post:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Permissions]
|
||||
* summary: Delete the selected item list
|
||||
* description: Delete the selected item list
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* properties:
|
||||
* ids:
|
||||
* description: IDs of the updated items
|
||||
* type: array
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The items was successfully deleted
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Permissions"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Items not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.post(
|
||||
'/deleteByIds',
|
||||
wrapAsync(async (req, res) => {
|
||||
await PermissionsService.deleteByIds(req.body.data, req.currentUser);
|
||||
const payload = true;
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/permissions:
|
||||
* get:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Permissions]
|
||||
* summary: Get all permissions
|
||||
* description: Get all permissions
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Permissions list successfully received
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: array
|
||||
* items:
|
||||
* $ref: "#/components/schemas/Permissions"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Data not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.get(
|
||||
'/',
|
||||
wrapAsync(async (req, res) => {
|
||||
const filetype = req.query.filetype;
|
||||
|
||||
const currentUser = req.currentUser;
|
||||
const payload = await PermissionsDBApi.findAll(req.query, { currentUser });
|
||||
if (filetype && filetype === 'csv') {
|
||||
const fields = ['id', 'name'];
|
||||
const opts = { fields };
|
||||
try {
|
||||
const csv = parse(payload.rows, opts);
|
||||
res.status(200).attachment(csv);
|
||||
res.send(csv);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
}
|
||||
} else {
|
||||
res.status(200).send(payload);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/permissions/count:
|
||||
* get:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Permissions]
|
||||
* summary: Count all permissions
|
||||
* description: Count all permissions
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Permissions count successfully received
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: array
|
||||
* items:
|
||||
* $ref: "#/components/schemas/Permissions"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Data not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.get(
|
||||
'/count',
|
||||
wrapAsync(async (req, res) => {
|
||||
const currentUser = req.currentUser;
|
||||
const payload = await PermissionsDBApi.findAll(req.query, null, {
|
||||
countOnly: true,
|
||||
currentUser,
|
||||
});
|
||||
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/permissions/autocomplete:
|
||||
* get:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Permissions]
|
||||
* summary: Find all permissions that match search criteria
|
||||
* description: Find all permissions that match search criteria
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Permissions list successfully received
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: array
|
||||
* items:
|
||||
* $ref: "#/components/schemas/Permissions"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Data not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.get('/autocomplete', async (req, res) => {
|
||||
const payload = await PermissionsDBApi.findAllAutocomplete(
|
||||
req.query.query,
|
||||
req.query.limit,
|
||||
req.query.offset,
|
||||
);
|
||||
|
||||
res.status(200).send(payload);
|
||||
});
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/permissions/{id}:
|
||||
* get:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Permissions]
|
||||
* summary: Get selected item
|
||||
* description: Get selected item
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* description: ID of item to get
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Selected item successfully received
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Permissions"
|
||||
* 400:
|
||||
* description: Invalid ID supplied
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Item not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.get(
|
||||
'/:id',
|
||||
wrapAsync(async (req, res) => {
|
||||
const payload = await PermissionsDBApi.findBy({ id: req.params.id });
|
||||
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
router.use('/', require('../helpers').commonErrorHandler);
|
||||
|
||||
module.exports = router;
|
||||
106
backend/src/routes/pexels.js
Normal file
106
backend/src/routes/pexels.js
Normal file
@ -0,0 +1,106 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { pexelsKey, pexelsQuery } = require('../config');
|
||||
const fetch = require('node-fetch');
|
||||
|
||||
const KEY = pexelsKey;
|
||||
|
||||
router.get('/image', async (req, res) => {
|
||||
const headers = {
|
||||
Authorization: `${KEY}`,
|
||||
};
|
||||
const query = pexelsQuery || 'nature';
|
||||
const orientation = 'portrait';
|
||||
const perPage = 1;
|
||||
const url = `https://api.pexels.com/v1/search?query=${query}&orientation=${orientation}&per_page=${perPage}&page=1`;
|
||||
|
||||
try {
|
||||
const response = await fetch(url, { headers });
|
||||
const data = await response.json();
|
||||
res.status(200).json(data.photos[0]);
|
||||
} catch (error) {
|
||||
res.status(500).json({ error: 'Failed to fetch image' });
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/video', async (req, res) => {
|
||||
const headers = {
|
||||
Authorization: `${KEY}`,
|
||||
};
|
||||
const query = pexelsQuery || 'nature';
|
||||
const orientation = 'portrait';
|
||||
const perPage = 1;
|
||||
const url = `https://api.pexels.com/videos/search?query=${query}&orientation=${orientation}&per_page=${perPage}&page=1`;
|
||||
|
||||
try {
|
||||
const response = await fetch(url, { headers });
|
||||
const data = await response.json();
|
||||
res.status(200).json(data.videos[0]);
|
||||
} catch (error) {
|
||||
res.status(500).json({ error: 'Failed to fetch video' });
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/multiple-images', async (req, res) => {
|
||||
const headers = {
|
||||
Authorization: `${KEY}`,
|
||||
};
|
||||
|
||||
const queries = req.query.queries
|
||||
? req.query.queries.split(',')
|
||||
: ['home', 'apple', 'pizza', 'mountains', 'cat'];
|
||||
const orientation = 'square';
|
||||
const perPage = 1;
|
||||
|
||||
const fallbackImage = {
|
||||
src: 'https://images.pexels.com/photos/8199252/pexels-photo-8199252.jpeg',
|
||||
photographer: 'Yan Krukau',
|
||||
photographer_url: 'https://www.pexels.com/@yankrukov',
|
||||
};
|
||||
const fetchFallbackImage = async () => {
|
||||
try {
|
||||
const response = await fetch('https://picsum.photos/600');
|
||||
return {
|
||||
src: response.url,
|
||||
photographer: 'Random Picsum',
|
||||
photographer_url: 'https://picsum.photos/',
|
||||
};
|
||||
} catch (error) {
|
||||
return fallbackImage;
|
||||
}
|
||||
};
|
||||
const fetchImage = async (query) => {
|
||||
const url = `https://api.pexels.com/v1/search?query=${query}&orientation=${orientation}&per_page=${perPage}&page=1`;
|
||||
const response = await fetch(url, { headers });
|
||||
const data = await response.json();
|
||||
return data.photos[0] || null;
|
||||
};
|
||||
|
||||
const imagePromises = queries.map((query) => fetchImage(query));
|
||||
const imagesResults = await Promise.allSettled(imagePromises);
|
||||
|
||||
const formattedImages = await Promise.all(
|
||||
imagesResults.map(async (result) => {
|
||||
if (result.status === 'fulfilled' && result.value) {
|
||||
const image = result.value;
|
||||
return {
|
||||
src: image.src?.original || fallbackImage.src,
|
||||
photographer: image.photographer || fallbackImage.photographer,
|
||||
photographer_url:
|
||||
image.photographer_url || fallbackImage.photographer_url,
|
||||
};
|
||||
} else {
|
||||
const fallback = await fetchFallbackImage();
|
||||
return {
|
||||
src: fallback.src || '',
|
||||
photographer: fallback.photographer || 'Unknown',
|
||||
photographer_url: fallback.photographer_url || '',
|
||||
};
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
res.json(formattedImages);
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
433
backend/src/routes/roles.js
Normal file
433
backend/src/routes/roles.js
Normal file
@ -0,0 +1,433 @@
|
||||
const express = require('express');
|
||||
|
||||
const RolesService = require('../services/roles');
|
||||
const RolesDBApi = require('../db/api/roles');
|
||||
const wrapAsync = require('../helpers').wrapAsync;
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
const { parse } = require('json2csv');
|
||||
|
||||
const { checkCrudPermissions } = require('../middlewares/check-permissions');
|
||||
|
||||
router.use(checkCrudPermissions('roles'));
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* components:
|
||||
* schemas:
|
||||
* Roles:
|
||||
* type: object
|
||||
* properties:
|
||||
|
||||
* name:
|
||||
* type: string
|
||||
* default: name
|
||||
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* tags:
|
||||
* name: Roles
|
||||
* description: The Roles managing API
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/roles:
|
||||
* post:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Roles]
|
||||
* summary: Add new item
|
||||
* description: Add new item
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* properties:
|
||||
* data:
|
||||
* description: Data of the updated item
|
||||
* type: object
|
||||
* $ref: "#/components/schemas/Roles"
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The item was successfully added
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Roles"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 405:
|
||||
* description: Invalid input data
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.post(
|
||||
'/',
|
||||
wrapAsync(async (req, res) => {
|
||||
const referer =
|
||||
req.headers.referer ||
|
||||
`${req.protocol}://${req.hostname}${req.originalUrl}`;
|
||||
const link = new URL(referer);
|
||||
await RolesService.create(req.body.data, req.currentUser, true, link.host);
|
||||
const payload = true;
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/budgets/bulk-import:
|
||||
* post:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Roles]
|
||||
* summary: Bulk import items
|
||||
* description: Bulk import items
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* properties:
|
||||
* data:
|
||||
* description: Data of the updated items
|
||||
* type: array
|
||||
* items:
|
||||
* $ref: "#/components/schemas/Roles"
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The items were successfully imported
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Roles"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 405:
|
||||
* description: Invalid input data
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*
|
||||
*/
|
||||
router.post(
|
||||
'/bulk-import',
|
||||
wrapAsync(async (req, res) => {
|
||||
const referer =
|
||||
req.headers.referer ||
|
||||
`${req.protocol}://${req.hostname}${req.originalUrl}`;
|
||||
const link = new URL(referer);
|
||||
await RolesService.bulkImport(req, res, true, link.host);
|
||||
const payload = true;
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/roles/{id}:
|
||||
* put:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Roles]
|
||||
* summary: Update the data of the selected item
|
||||
* description: Update the data of the selected item
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* description: Item ID to update
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* requestBody:
|
||||
* description: Set new item data
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* properties:
|
||||
* id:
|
||||
* description: ID of the updated item
|
||||
* type: string
|
||||
* data:
|
||||
* description: Data of the updated item
|
||||
* type: object
|
||||
* $ref: "#/components/schemas/Roles"
|
||||
* required:
|
||||
* - id
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The item data was successfully updated
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Roles"
|
||||
* 400:
|
||||
* description: Invalid ID supplied
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Item not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.put(
|
||||
'/:id',
|
||||
wrapAsync(async (req, res) => {
|
||||
await RolesService.update(req.body.data, req.body.id, req.currentUser);
|
||||
const payload = true;
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/roles/{id}:
|
||||
* delete:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Roles]
|
||||
* summary: Delete the selected item
|
||||
* description: Delete the selected item
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* description: Item ID to delete
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The item was successfully deleted
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Roles"
|
||||
* 400:
|
||||
* description: Invalid ID supplied
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Item not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.delete(
|
||||
'/:id',
|
||||
wrapAsync(async (req, res) => {
|
||||
await RolesService.remove(req.params.id, req.currentUser);
|
||||
const payload = true;
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/roles/deleteByIds:
|
||||
* post:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Roles]
|
||||
* summary: Delete the selected item list
|
||||
* description: Delete the selected item list
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* properties:
|
||||
* ids:
|
||||
* description: IDs of the updated items
|
||||
* type: array
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The items was successfully deleted
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Roles"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Items not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.post(
|
||||
'/deleteByIds',
|
||||
wrapAsync(async (req, res) => {
|
||||
await RolesService.deleteByIds(req.body.data, req.currentUser);
|
||||
const payload = true;
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/roles:
|
||||
* get:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Roles]
|
||||
* summary: Get all roles
|
||||
* description: Get all roles
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Roles list successfully received
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: array
|
||||
* items:
|
||||
* $ref: "#/components/schemas/Roles"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Data not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.get(
|
||||
'/',
|
||||
wrapAsync(async (req, res) => {
|
||||
const filetype = req.query.filetype;
|
||||
|
||||
const currentUser = req.currentUser;
|
||||
const payload = await RolesDBApi.findAll(req.query, { currentUser });
|
||||
if (filetype && filetype === 'csv') {
|
||||
const fields = ['id', 'name'];
|
||||
const opts = { fields };
|
||||
try {
|
||||
const csv = parse(payload.rows, opts);
|
||||
res.status(200).attachment(csv);
|
||||
res.send(csv);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
}
|
||||
} else {
|
||||
res.status(200).send(payload);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/roles/count:
|
||||
* get:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Roles]
|
||||
* summary: Count all roles
|
||||
* description: Count all roles
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Roles count successfully received
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: array
|
||||
* items:
|
||||
* $ref: "#/components/schemas/Roles"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Data not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.get(
|
||||
'/count',
|
||||
wrapAsync(async (req, res) => {
|
||||
const currentUser = req.currentUser;
|
||||
const payload = await RolesDBApi.findAll(req.query, null, {
|
||||
countOnly: true,
|
||||
currentUser,
|
||||
});
|
||||
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/roles/autocomplete:
|
||||
* get:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Roles]
|
||||
* summary: Find all roles that match search criteria
|
||||
* description: Find all roles that match search criteria
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Roles list successfully received
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: array
|
||||
* items:
|
||||
* $ref: "#/components/schemas/Roles"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Data not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.get('/autocomplete', async (req, res) => {
|
||||
const payload = await RolesDBApi.findAllAutocomplete(
|
||||
req.query.query,
|
||||
req.query.limit,
|
||||
req.query.offset,
|
||||
);
|
||||
|
||||
res.status(200).send(payload);
|
||||
});
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/roles/{id}:
|
||||
* get:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Roles]
|
||||
* summary: Get selected item
|
||||
* description: Get selected item
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* description: ID of item to get
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Selected item successfully received
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Roles"
|
||||
* 400:
|
||||
* description: Invalid ID supplied
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Item not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.get(
|
||||
'/:id',
|
||||
wrapAsync(async (req, res) => {
|
||||
const payload = await RolesDBApi.findBy({ id: req.params.id });
|
||||
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
router.use('/', require('../helpers').commonErrorHandler);
|
||||
|
||||
module.exports = router;
|
||||
54
backend/src/routes/search.js
Normal file
54
backend/src/routes/search.js
Normal file
@ -0,0 +1,54 @@
|
||||
const express = require('express');
|
||||
const SearchService = require('../services/search');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
const { checkCrudPermissions } = require('../middlewares/check-permissions');
|
||||
router.use(checkCrudPermissions('search'));
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* path:
|
||||
* /api/search:
|
||||
* post:
|
||||
* summary: Search
|
||||
* description: Search results across multiple tables
|
||||
* requestBody:
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* searchQuery:
|
||||
* type: string
|
||||
* required:
|
||||
* - searchQuery
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Successful request
|
||||
* 400:
|
||||
* description: Invalid request
|
||||
* 500:
|
||||
* description: Internal server error
|
||||
*/
|
||||
|
||||
router.post('/', async (req, res) => {
|
||||
const { searchQuery } = req.body;
|
||||
|
||||
if (!searchQuery) {
|
||||
return res.status(400).json({ error: 'Please enter a search query' });
|
||||
}
|
||||
|
||||
try {
|
||||
const foundMatches = await SearchService.search(
|
||||
searchQuery,
|
||||
req.currentUser,
|
||||
);
|
||||
res.json(foundMatches);
|
||||
} catch (error) {
|
||||
console.error('Internal Server Error', error);
|
||||
res.status(500).json({ error: 'Internal Server Error' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
444
backend/src/routes/users.js
Normal file
444
backend/src/routes/users.js
Normal file
@ -0,0 +1,444 @@
|
||||
const express = require('express');
|
||||
|
||||
const UsersService = require('../services/users');
|
||||
const UsersDBApi = require('../db/api/users');
|
||||
const wrapAsync = require('../helpers').wrapAsync;
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
const { parse } = require('json2csv');
|
||||
|
||||
const { checkCrudPermissions } = require('../middlewares/check-permissions');
|
||||
|
||||
router.use(checkCrudPermissions('users'));
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* components:
|
||||
* schemas:
|
||||
* Users:
|
||||
* type: object
|
||||
* properties:
|
||||
|
||||
* firstName:
|
||||
* type: string
|
||||
* default: firstName
|
||||
* lastName:
|
||||
* type: string
|
||||
* default: lastName
|
||||
* phoneNumber:
|
||||
* type: string
|
||||
* default: phoneNumber
|
||||
* email:
|
||||
* type: string
|
||||
* default: email
|
||||
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* tags:
|
||||
* name: Users
|
||||
* description: The Users managing API
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/users:
|
||||
* post:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Users]
|
||||
* summary: Add new item
|
||||
* description: Add new item
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* properties:
|
||||
* data:
|
||||
* description: Data of the updated item
|
||||
* type: object
|
||||
* $ref: "#/components/schemas/Users"
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The item was successfully added
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Users"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 405:
|
||||
* description: Invalid input data
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.post(
|
||||
'/',
|
||||
wrapAsync(async (req, res) => {
|
||||
const referer =
|
||||
req.headers.referer ||
|
||||
`${req.protocol}://${req.hostname}${req.originalUrl}`;
|
||||
const link = new URL(referer);
|
||||
await UsersService.create(req.body.data, req.currentUser, true, link.host);
|
||||
const payload = true;
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/budgets/bulk-import:
|
||||
* post:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Users]
|
||||
* summary: Bulk import items
|
||||
* description: Bulk import items
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* properties:
|
||||
* data:
|
||||
* description: Data of the updated items
|
||||
* type: array
|
||||
* items:
|
||||
* $ref: "#/components/schemas/Users"
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The items were successfully imported
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Users"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 405:
|
||||
* description: Invalid input data
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*
|
||||
*/
|
||||
router.post(
|
||||
'/bulk-import',
|
||||
wrapAsync(async (req, res) => {
|
||||
const referer =
|
||||
req.headers.referer ||
|
||||
`${req.protocol}://${req.hostname}${req.originalUrl}`;
|
||||
const link = new URL(referer);
|
||||
await UsersService.bulkImport(req, res, true, link.host);
|
||||
const payload = true;
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/users/{id}:
|
||||
* put:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Users]
|
||||
* summary: Update the data of the selected item
|
||||
* description: Update the data of the selected item
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* description: Item ID to update
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* requestBody:
|
||||
* description: Set new item data
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* properties:
|
||||
* id:
|
||||
* description: ID of the updated item
|
||||
* type: string
|
||||
* data:
|
||||
* description: Data of the updated item
|
||||
* type: object
|
||||
* $ref: "#/components/schemas/Users"
|
||||
* required:
|
||||
* - id
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The item data was successfully updated
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Users"
|
||||
* 400:
|
||||
* description: Invalid ID supplied
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Item not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.put(
|
||||
'/:id',
|
||||
wrapAsync(async (req, res) => {
|
||||
await UsersService.update(req.body.data, req.body.id, req.currentUser);
|
||||
const payload = true;
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/users/{id}:
|
||||
* delete:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Users]
|
||||
* summary: Delete the selected item
|
||||
* description: Delete the selected item
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* description: Item ID to delete
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The item was successfully deleted
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Users"
|
||||
* 400:
|
||||
* description: Invalid ID supplied
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Item not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.delete(
|
||||
'/:id',
|
||||
wrapAsync(async (req, res) => {
|
||||
await UsersService.remove(req.params.id, req.currentUser);
|
||||
const payload = true;
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/users/deleteByIds:
|
||||
* post:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Users]
|
||||
* summary: Delete the selected item list
|
||||
* description: Delete the selected item list
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* properties:
|
||||
* ids:
|
||||
* description: IDs of the updated items
|
||||
* type: array
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The items was successfully deleted
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Users"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Items not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.post(
|
||||
'/deleteByIds',
|
||||
wrapAsync(async (req, res) => {
|
||||
await UsersService.deleteByIds(req.body.data, req.currentUser);
|
||||
const payload = true;
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/users:
|
||||
* get:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Users]
|
||||
* summary: Get all users
|
||||
* description: Get all users
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Users list successfully received
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: array
|
||||
* items:
|
||||
* $ref: "#/components/schemas/Users"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Data not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.get(
|
||||
'/',
|
||||
wrapAsync(async (req, res) => {
|
||||
const filetype = req.query.filetype;
|
||||
|
||||
const currentUser = req.currentUser;
|
||||
const payload = await UsersDBApi.findAll(req.query, { currentUser });
|
||||
if (filetype && filetype === 'csv') {
|
||||
const fields = ['id', 'firstName', 'lastName', 'phoneNumber', 'email'];
|
||||
const opts = { fields };
|
||||
try {
|
||||
const csv = parse(payload.rows, opts);
|
||||
res.status(200).attachment(csv);
|
||||
res.send(csv);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
}
|
||||
} else {
|
||||
res.status(200).send(payload);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/users/count:
|
||||
* get:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Users]
|
||||
* summary: Count all users
|
||||
* description: Count all users
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Users count successfully received
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: array
|
||||
* items:
|
||||
* $ref: "#/components/schemas/Users"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Data not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.get(
|
||||
'/count',
|
||||
wrapAsync(async (req, res) => {
|
||||
const currentUser = req.currentUser;
|
||||
const payload = await UsersDBApi.findAll(req.query, null, {
|
||||
countOnly: true,
|
||||
currentUser,
|
||||
});
|
||||
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/users/autocomplete:
|
||||
* get:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Users]
|
||||
* summary: Find all users that match search criteria
|
||||
* description: Find all users that match search criteria
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Users list successfully received
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: array
|
||||
* items:
|
||||
* $ref: "#/components/schemas/Users"
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Data not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.get('/autocomplete', async (req, res) => {
|
||||
const payload = await UsersDBApi.findAllAutocomplete(
|
||||
req.query.query,
|
||||
req.query.limit,
|
||||
req.query.offset,
|
||||
);
|
||||
|
||||
res.status(200).send(payload);
|
||||
});
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/users/{id}:
|
||||
* get:
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* tags: [Users]
|
||||
* summary: Get selected item
|
||||
* description: Get selected item
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* description: ID of item to get
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Selected item successfully received
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: "#/components/schemas/Users"
|
||||
* 400:
|
||||
* description: Invalid ID supplied
|
||||
* 401:
|
||||
* $ref: "#/components/responses/UnauthorizedError"
|
||||
* 404:
|
||||
* description: Item not found
|
||||
* 500:
|
||||
* description: Some server error
|
||||
*/
|
||||
router.get(
|
||||
'/:id',
|
||||
wrapAsync(async (req, res) => {
|
||||
const payload = await UsersDBApi.findBy({ id: req.params.id });
|
||||
|
||||
delete payload.password;
|
||||
|
||||
res.status(200).send(payload);
|
||||
}),
|
||||
);
|
||||
|
||||
router.use('/', require('../helpers').commonErrorHandler);
|
||||
|
||||
module.exports = router;
|
||||
226
backend/src/services/auth.js
Normal file
226
backend/src/services/auth.js
Normal file
@ -0,0 +1,226 @@
|
||||
const UsersDBApi = require('../db/api/users');
|
||||
const ValidationError = require('./notifications/errors/validation');
|
||||
const ForbiddenError = require('./notifications/errors/forbidden');
|
||||
const bcrypt = require('bcrypt');
|
||||
const EmailAddressVerificationEmail = require('./email/list/addressVerification');
|
||||
const InvitationEmail = require('./email/list/invitation');
|
||||
const PasswordResetEmail = require('./email/list/passwordReset');
|
||||
const EmailSender = require('./email');
|
||||
const config = require('../config');
|
||||
const helpers = require('../helpers');
|
||||
|
||||
class Auth {
|
||||
static async signup(email, password, options = {}, host) {
|
||||
const user = await UsersDBApi.findBy({ email });
|
||||
|
||||
const hashedPassword = await bcrypt.hash(
|
||||
password,
|
||||
config.bcrypt.saltRounds,
|
||||
);
|
||||
|
||||
if (user) {
|
||||
if (user.authenticationUid) {
|
||||
throw new ValidationError('auth.emailAlreadyInUse');
|
||||
}
|
||||
|
||||
if (user.disabled) {
|
||||
throw new ValidationError('auth.userDisabled');
|
||||
}
|
||||
|
||||
await UsersDBApi.updatePassword(user.id, hashedPassword, options);
|
||||
|
||||
if (EmailSender.isConfigured) {
|
||||
await this.sendEmailAddressVerificationEmail(user.email, host);
|
||||
}
|
||||
|
||||
const data = {
|
||||
user: {
|
||||
id: user.id,
|
||||
email: user.email,
|
||||
},
|
||||
};
|
||||
|
||||
return helpers.jwtSign(data);
|
||||
}
|
||||
|
||||
const newUser = await UsersDBApi.createFromAuth(
|
||||
{
|
||||
firstName: email.split('@')[0],
|
||||
password: hashedPassword,
|
||||
email: email,
|
||||
},
|
||||
options,
|
||||
);
|
||||
|
||||
if (EmailSender.isConfigured) {
|
||||
await this.sendEmailAddressVerificationEmail(newUser.email, host);
|
||||
}
|
||||
|
||||
const data = {
|
||||
user: {
|
||||
id: newUser.id,
|
||||
email: newUser.email,
|
||||
},
|
||||
};
|
||||
|
||||
return helpers.jwtSign(data);
|
||||
}
|
||||
|
||||
static async signin(email, password, options = {}) {
|
||||
const user = await UsersDBApi.findBy({ email });
|
||||
|
||||
if (!user) {
|
||||
throw new ValidationError('auth.userNotFound');
|
||||
}
|
||||
|
||||
if (user.disabled) {
|
||||
throw new ValidationError('auth.userDisabled');
|
||||
}
|
||||
|
||||
if (!user.password) {
|
||||
throw new ValidationError('auth.wrongPassword');
|
||||
}
|
||||
|
||||
if (!EmailSender.isConfigured) {
|
||||
user.emailVerified = true;
|
||||
}
|
||||
|
||||
if (!user.emailVerified) {
|
||||
throw new ValidationError('auth.userNotVerified');
|
||||
}
|
||||
|
||||
const passwordsMatch = await bcrypt.compare(password, user.password);
|
||||
|
||||
if (!passwordsMatch) {
|
||||
throw new ValidationError('auth.wrongPassword');
|
||||
}
|
||||
|
||||
const data = {
|
||||
user: {
|
||||
id: user.id,
|
||||
email: user.email,
|
||||
},
|
||||
};
|
||||
|
||||
return helpers.jwtSign(data);
|
||||
}
|
||||
|
||||
static async sendEmailAddressVerificationEmail(email, host) {
|
||||
let link;
|
||||
try {
|
||||
const token = await UsersDBApi.generateEmailVerificationToken(email);
|
||||
link = `${host}/verify-email?token=${token}`;
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
throw new ValidationError('auth.emailAddressVerificationEmail.error');
|
||||
}
|
||||
|
||||
const emailAddressVerificationEmail = new EmailAddressVerificationEmail(
|
||||
email,
|
||||
link,
|
||||
);
|
||||
|
||||
return new EmailSender(emailAddressVerificationEmail).send();
|
||||
}
|
||||
|
||||
static async sendPasswordResetEmail(email, type = 'register', host) {
|
||||
let link;
|
||||
|
||||
try {
|
||||
const token = await UsersDBApi.generatePasswordResetToken(email);
|
||||
link = `${host}/password-reset?token=${token}`;
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
throw new ValidationError('auth.passwordReset.error');
|
||||
}
|
||||
|
||||
let passwordResetEmail;
|
||||
if (type === 'register') {
|
||||
passwordResetEmail = new PasswordResetEmail(email, link);
|
||||
}
|
||||
if (type === 'invitation') {
|
||||
passwordResetEmail = new InvitationEmail(email, link);
|
||||
}
|
||||
|
||||
return new EmailSender(passwordResetEmail).send();
|
||||
}
|
||||
|
||||
static async verifyEmail(token, options = {}) {
|
||||
const user = await UsersDBApi.findByEmailVerificationToken(token, options);
|
||||
|
||||
if (!user) {
|
||||
throw new ValidationError(
|
||||
'auth.emailAddressVerificationEmail.invalidToken',
|
||||
);
|
||||
}
|
||||
|
||||
return UsersDBApi.markEmailVerified(user.id, options);
|
||||
}
|
||||
|
||||
static async passwordUpdate(currentPassword, newPassword, options) {
|
||||
const currentUser = options.currentUser || null;
|
||||
if (!currentUser) {
|
||||
throw new ForbiddenError();
|
||||
}
|
||||
|
||||
const currentPasswordMatch = await bcrypt.compare(
|
||||
currentPassword,
|
||||
currentUser.password,
|
||||
);
|
||||
|
||||
if (!currentPasswordMatch) {
|
||||
throw new ValidationError('auth.wrongPassword');
|
||||
}
|
||||
|
||||
const newPasswordMatch = await bcrypt.compare(
|
||||
newPassword,
|
||||
currentUser.password,
|
||||
);
|
||||
|
||||
if (newPasswordMatch) {
|
||||
throw new ValidationError('auth.passwordUpdate.samePassword');
|
||||
}
|
||||
|
||||
const hashedPassword = await bcrypt.hash(
|
||||
newPassword,
|
||||
config.bcrypt.saltRounds,
|
||||
);
|
||||
|
||||
return UsersDBApi.updatePassword(currentUser.id, hashedPassword, options);
|
||||
}
|
||||
|
||||
static async passwordReset(token, password, options = {}) {
|
||||
const user = await UsersDBApi.findByPasswordResetToken(token, options);
|
||||
|
||||
if (!user) {
|
||||
throw new ValidationError('auth.passwordReset.invalidToken');
|
||||
}
|
||||
|
||||
const hashedPassword = await bcrypt.hash(
|
||||
password,
|
||||
config.bcrypt.saltRounds,
|
||||
);
|
||||
|
||||
return UsersDBApi.updatePassword(user.id, hashedPassword, options);
|
||||
}
|
||||
|
||||
static async updateProfile(data, currentUser) {
|
||||
let transaction = await db.sequelize.transaction();
|
||||
|
||||
try {
|
||||
await UsersDBApi.findBy({ id: currentUser.id }, { transaction });
|
||||
|
||||
await UsersDBApi.update(currentUser.id, data, {
|
||||
currentUser,
|
||||
transaction,
|
||||
});
|
||||
|
||||
await transaction.commit();
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Auth;
|
||||
114
backend/src/services/conversions.js
Normal file
114
backend/src/services/conversions.js
Normal file
@ -0,0 +1,114 @@
|
||||
const db = require('../db/models');
|
||||
const ConversionsDBApi = require('../db/api/conversions');
|
||||
const processFile = require('../middlewares/upload');
|
||||
const ValidationError = require('./notifications/errors/validation');
|
||||
const csv = require('csv-parser');
|
||||
const axios = require('axios');
|
||||
const config = require('../config');
|
||||
const stream = require('stream');
|
||||
|
||||
module.exports = class ConversionsService {
|
||||
static async create(data, currentUser) {
|
||||
const transaction = await db.sequelize.transaction();
|
||||
try {
|
||||
await ConversionsDBApi.create(data, {
|
||||
currentUser,
|
||||
transaction,
|
||||
});
|
||||
|
||||
await transaction.commit();
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async bulkImport(req, res, sendInvitationEmails = true, host) {
|
||||
const transaction = await db.sequelize.transaction();
|
||||
|
||||
try {
|
||||
await processFile(req, res);
|
||||
const bufferStream = new stream.PassThrough();
|
||||
const results = [];
|
||||
|
||||
await bufferStream.end(Buffer.from(req.file.buffer, 'utf-8')); // convert Buffer to Stream
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
bufferStream
|
||||
.pipe(csv())
|
||||
.on('data', (data) => results.push(data))
|
||||
.on('end', async () => {
|
||||
console.log('CSV results', results);
|
||||
resolve();
|
||||
})
|
||||
.on('error', (error) => reject(error));
|
||||
});
|
||||
|
||||
await ConversionsDBApi.bulkImport(results, {
|
||||
transaction,
|
||||
ignoreDuplicates: true,
|
||||
validate: true,
|
||||
currentUser: req.currentUser,
|
||||
});
|
||||
|
||||
await transaction.commit();
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async update(data, id, currentUser) {
|
||||
const transaction = await db.sequelize.transaction();
|
||||
try {
|
||||
let conversions = await ConversionsDBApi.findBy({ id }, { transaction });
|
||||
|
||||
if (!conversions) {
|
||||
throw new ValidationError('conversionsNotFound');
|
||||
}
|
||||
|
||||
const updatedConversions = await ConversionsDBApi.update(id, data, {
|
||||
currentUser,
|
||||
transaction,
|
||||
});
|
||||
|
||||
await transaction.commit();
|
||||
return updatedConversions;
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async deleteByIds(ids, currentUser) {
|
||||
const transaction = await db.sequelize.transaction();
|
||||
|
||||
try {
|
||||
await ConversionsDBApi.deleteByIds(ids, {
|
||||
currentUser,
|
||||
transaction,
|
||||
});
|
||||
|
||||
await transaction.commit();
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async remove(id, currentUser) {
|
||||
const transaction = await db.sequelize.transaction();
|
||||
|
||||
try {
|
||||
await ConversionsDBApi.remove(id, {
|
||||
currentUser,
|
||||
transaction,
|
||||
});
|
||||
|
||||
await transaction.commit();
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -0,0 +1,52 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<style>
|
||||
.email-container {
|
||||
max-width: 600px;
|
||||
margin: auto;
|
||||
background-color: #ffffff;
|
||||
border: 1px solid #e2e8f0;
|
||||
border-radius: 4px;
|
||||
overflow: hidden;
|
||||
}
|
||||
.email-header {
|
||||
background-color: #3498db;
|
||||
color: #fff;
|
||||
padding: 16px;
|
||||
text-align: center;
|
||||
}
|
||||
.email-body {
|
||||
padding: 16px;
|
||||
}
|
||||
.email-footer {
|
||||
padding: 16px;
|
||||
background-color: #f7fafc;
|
||||
text-align: center;
|
||||
color: #4a5568;
|
||||
font-size: 14px;
|
||||
}
|
||||
.link-primary {
|
||||
color: #3498db;
|
||||
text-decoration: none;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="email-container">
|
||||
<div class="email-header">Verify your email for {appTitle}!</div>
|
||||
<div class="email-body">
|
||||
<p>Hello,</p>
|
||||
<p>Follow this link to verify your email address.</p>
|
||||
<p>
|
||||
If you didn't ask to verify this address, you can ignore this email.
|
||||
</p>
|
||||
<p><a href="{signupUrl}" class="link-primary">{signupUrl}</a></p>
|
||||
</div>
|
||||
<div class="email-footer">
|
||||
Thanks,<br />
|
||||
The {appTitle} Team
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
@ -0,0 +1,56 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<style>
|
||||
.email-container {
|
||||
max-width: 600px;
|
||||
margin: auto;
|
||||
background-color: #ffffff;
|
||||
border: 1px solid #e2e8f0;
|
||||
border-radius: 4px;
|
||||
overflow: hidden;
|
||||
}
|
||||
.email-header {
|
||||
background-color: #3498db;
|
||||
color: #fff;
|
||||
padding: 16px;
|
||||
text-align: center;
|
||||
}
|
||||
.email-body {
|
||||
padding: 16px;
|
||||
}
|
||||
.email-footer {
|
||||
padding: 16px;
|
||||
background-color: #f7fafc;
|
||||
text-align: center;
|
||||
color: #4a5568;
|
||||
font-size: 14px;
|
||||
}
|
||||
.btn-primary {
|
||||
background-color: #3498db;
|
||||
color: #fff !important;
|
||||
padding: 8px 16px;
|
||||
border-radius: 4px;
|
||||
text-decoration: none;
|
||||
display: inline-block;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="email-container">
|
||||
<div class="email-header">Welcome to {appTitle}!</div>
|
||||
<div class="email-body">
|
||||
<p>Hello,</p>
|
||||
<p>
|
||||
You've been invited to join {appTitle}. Please click the button below
|
||||
to set up your account.
|
||||
</p>
|
||||
<a href="{signupUrl}" class="btn-primary">Set up account</a>
|
||||
</div>
|
||||
<div class="email-footer">
|
||||
Thanks,<br />
|
||||
The {appTitle} Team
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
@ -0,0 +1,55 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<style>
|
||||
.email-container {
|
||||
max-width: 600px;
|
||||
margin: auto;
|
||||
background-color: #ffffff;
|
||||
border: 1px solid #e2e8f0;
|
||||
border-radius: 4px;
|
||||
overflow: hidden;
|
||||
}
|
||||
.email-header {
|
||||
background-color: #3498db;
|
||||
color: #fff;
|
||||
padding: 16px;
|
||||
text-align: center;
|
||||
}
|
||||
.email-body {
|
||||
padding: 16px;
|
||||
}
|
||||
.email-footer {
|
||||
padding: 16px;
|
||||
background-color: #f7fafc;
|
||||
text-align: center;
|
||||
color: #4a5568;
|
||||
font-size: 14px;
|
||||
}
|
||||
.link-primary {
|
||||
color: #3498db;
|
||||
text-decoration: none;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="email-container">
|
||||
<div class="email-header">Reset your password for {appTitle}</div>
|
||||
<div class="email-body">
|
||||
<p>Hello,</p>
|
||||
<p>
|
||||
Follow this link to reset your {appTitle} password for your
|
||||
{accountName} account.
|
||||
</p>
|
||||
<p><a href="{resetUrl}" class="link-primary">{resetUrl}</a></p>
|
||||
<p>
|
||||
If you didn't ask to reset your password, you can ignore this email.
|
||||
</p>
|
||||
</div>
|
||||
<div class="email-footer">
|
||||
Thanks,<br />
|
||||
The {appTitle} Team
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
41
backend/src/services/email/index.js
Normal file
41
backend/src/services/email/index.js
Normal file
@ -0,0 +1,41 @@
|
||||
const config = require('../../config');
|
||||
const assert = require('assert');
|
||||
const nodemailer = require('nodemailer');
|
||||
|
||||
module.exports = class EmailSender {
|
||||
constructor(email) {
|
||||
this.email = email;
|
||||
}
|
||||
|
||||
async send() {
|
||||
assert(this.email, 'email is required');
|
||||
assert(this.email.to, 'email.to is required');
|
||||
assert(this.email.subject, 'email.subject is required');
|
||||
assert(this.email.html, 'email.html is required');
|
||||
|
||||
const htmlContent = await this.email.html();
|
||||
|
||||
const transporter = nodemailer.createTransport(this.transportConfig);
|
||||
|
||||
const mailOptions = {
|
||||
from: this.from,
|
||||
to: this.email.to,
|
||||
subject: this.email.subject,
|
||||
html: htmlContent,
|
||||
};
|
||||
|
||||
return transporter.sendMail(mailOptions);
|
||||
}
|
||||
|
||||
static get isConfigured() {
|
||||
return !!config.email?.auth?.pass && !!config.email?.auth?.user;
|
||||
}
|
||||
|
||||
get transportConfig() {
|
||||
return config.email;
|
||||
}
|
||||
|
||||
get from() {
|
||||
return config.email.from;
|
||||
}
|
||||
};
|
||||
41
backend/src/services/email/list/addressVerification.js
Normal file
41
backend/src/services/email/list/addressVerification.js
Normal file
@ -0,0 +1,41 @@
|
||||
const { getNotification } = require('../../notifications/helpers');
|
||||
const fs = require('fs').promises;
|
||||
const path = require('path');
|
||||
|
||||
module.exports = class EmailAddressVerificationEmail {
|
||||
constructor(to, link) {
|
||||
this.to = to;
|
||||
this.link = link;
|
||||
}
|
||||
|
||||
get subject() {
|
||||
return getNotification(
|
||||
'emails.emailAddressVerification.subject',
|
||||
getNotification('app.title'),
|
||||
);
|
||||
}
|
||||
|
||||
async html() {
|
||||
try {
|
||||
const templatePath = path.join(
|
||||
__dirname,
|
||||
'../../email/htmlTemplates/addressVerification/emailAddressVerification.html',
|
||||
);
|
||||
|
||||
const template = await fs.readFile(templatePath, 'utf8');
|
||||
|
||||
const appTitle = getNotification('app.title');
|
||||
const signupUrl = this.link;
|
||||
|
||||
let html = template
|
||||
.replace(/{appTitle}/g, appTitle)
|
||||
.replace(/{signupUrl}/g, signupUrl)
|
||||
.replace(/{to}/g, this.to);
|
||||
|
||||
return html;
|
||||
} catch (error) {
|
||||
console.error('Error generating invitation email HTML:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
};
|
||||
41
backend/src/services/email/list/invitation.js
Normal file
41
backend/src/services/email/list/invitation.js
Normal file
@ -0,0 +1,41 @@
|
||||
const fs = require('fs').promises;
|
||||
const path = require('path');
|
||||
const { getNotification } = require('../../notifications/helpers');
|
||||
|
||||
module.exports = class InvitationEmail {
|
||||
constructor(to, host) {
|
||||
this.to = to;
|
||||
this.host = host;
|
||||
}
|
||||
|
||||
get subject() {
|
||||
return getNotification(
|
||||
'emails.invitation.subject',
|
||||
getNotification('app.title'),
|
||||
);
|
||||
}
|
||||
|
||||
async html() {
|
||||
try {
|
||||
const templatePath = path.join(
|
||||
__dirname,
|
||||
'../../email/htmlTemplates/invitation/invitationTemplate.html',
|
||||
);
|
||||
|
||||
const template = await fs.readFile(templatePath, 'utf8');
|
||||
|
||||
const appTitle = getNotification('app.title');
|
||||
const signupUrl = `${this.host}&invitation=true`;
|
||||
|
||||
let html = template
|
||||
.replace(/{appTitle}/g, appTitle)
|
||||
.replace(/{signupUrl}/g, signupUrl)
|
||||
.replace(/{to}/g, this.to);
|
||||
|
||||
return html;
|
||||
} catch (error) {
|
||||
console.error('Error generating invitation email HTML:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
};
|
||||
42
backend/src/services/email/list/passwordReset.js
Normal file
42
backend/src/services/email/list/passwordReset.js
Normal file
@ -0,0 +1,42 @@
|
||||
const { getNotification } = require('../../notifications/helpers');
|
||||
const path = require('path');
|
||||
const { promises: fs } = require('fs');
|
||||
|
||||
module.exports = class PasswordResetEmail {
|
||||
constructor(to, link) {
|
||||
this.to = to;
|
||||
this.link = link;
|
||||
}
|
||||
|
||||
get subject() {
|
||||
return getNotification(
|
||||
'emails.passwordReset.subject',
|
||||
getNotification('app.title'),
|
||||
);
|
||||
}
|
||||
|
||||
async html() {
|
||||
try {
|
||||
const templatePath = path.join(
|
||||
__dirname,
|
||||
'../../email/htmlTemplates/passwordReset/passwordResetEmail.html',
|
||||
);
|
||||
|
||||
const template = await fs.readFile(templatePath, 'utf8');
|
||||
|
||||
const appTitle = getNotification('app.title');
|
||||
const resetUrl = this.link;
|
||||
const accountName = this.to;
|
||||
|
||||
let html = template
|
||||
.replace(/{appTitle}/g, appTitle)
|
||||
.replace(/{resetUrl}/g, resetUrl)
|
||||
.replace(/{accountName}/g, accountName);
|
||||
|
||||
return html;
|
||||
} catch (error) {
|
||||
console.error('Error generating invitation email HTML:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
};
|
||||
202
backend/src/services/file.js
Normal file
202
backend/src/services/file.js
Normal file
@ -0,0 +1,202 @@
|
||||
const formidable = require('formidable');
|
||||
const fs = require('fs');
|
||||
const config = require('../config');
|
||||
const path = require('path');
|
||||
const { format } = require('util');
|
||||
|
||||
const ensureDirectoryExistence = (filePath) => {
|
||||
const dirname = path.dirname(filePath);
|
||||
|
||||
if (fs.existsSync(dirname)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
ensureDirectoryExistence(dirname);
|
||||
fs.mkdirSync(dirname);
|
||||
};
|
||||
|
||||
const uploadLocal = (
|
||||
folder,
|
||||
validations = {
|
||||
entity: null,
|
||||
maxFileSize: null,
|
||||
folderIncludesAuthenticationUid: false,
|
||||
},
|
||||
) => {
|
||||
return (req, res) => {
|
||||
if (!req.currentUser) {
|
||||
res.sendStatus(403);
|
||||
return;
|
||||
}
|
||||
|
||||
if (validations.entity) {
|
||||
res.sendStatus(403);
|
||||
return;
|
||||
}
|
||||
|
||||
if (validations.folderIncludesAuthenticationUid) {
|
||||
folder = folder.replace(':userId', req.currentUser.authenticationUid);
|
||||
if (
|
||||
!req.currentUser.authenticationUid ||
|
||||
!folder.includes(req.currentUser.authenticationUid)
|
||||
) {
|
||||
res.sendStatus(403);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const form = new formidable.IncomingForm();
|
||||
form.uploadDir = config.uploadDir;
|
||||
|
||||
if (validations && validations.maxFileSize) {
|
||||
form.maxFileSize = validations.maxFileSize;
|
||||
}
|
||||
|
||||
form.parse(req, function (err, fields, files) {
|
||||
const filename = String(fields.filename);
|
||||
const fileTempUrl = files.file.path;
|
||||
|
||||
if (!filename) {
|
||||
fs.unlinkSync(fileTempUrl);
|
||||
res.sendStatus(500);
|
||||
return;
|
||||
}
|
||||
|
||||
const privateUrl = path.join(form.uploadDir, folder, filename);
|
||||
ensureDirectoryExistence(privateUrl);
|
||||
fs.renameSync(fileTempUrl, privateUrl);
|
||||
res.sendStatus(200);
|
||||
});
|
||||
|
||||
form.on('error', function (err) {
|
||||
res.status(500).send(err);
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
const downloadLocal = async (req, res) => {
|
||||
const privateUrl = req.query.privateUrl;
|
||||
if (!privateUrl) {
|
||||
return res.sendStatus(404);
|
||||
}
|
||||
res.download(path.join(config.uploadDir, privateUrl));
|
||||
};
|
||||
|
||||
const initGCloud = () => {
|
||||
const processFile = require('../middlewares/upload');
|
||||
const { Storage } = require('@google-cloud/storage');
|
||||
|
||||
const crypto = require('crypto');
|
||||
const hash = config.gcloud.hash;
|
||||
|
||||
const privateKey = process.env.GC_PRIVATE_KEY.replace(/\\\n/g, '\n');
|
||||
|
||||
const storage = new Storage({
|
||||
projectId: process.env.GC_PROJECT_ID,
|
||||
credentials: {
|
||||
client_email: process.env.GC_CLIENT_EMAIL,
|
||||
private_key: privateKey,
|
||||
},
|
||||
});
|
||||
|
||||
const bucket = storage.bucket(config.gcloud.bucket);
|
||||
return { hash, bucket, processFile };
|
||||
};
|
||||
|
||||
const uploadGCloud = async (folder, req, res) => {
|
||||
try {
|
||||
const { hash, bucket, processFile } = initGCloud();
|
||||
await processFile(req, res);
|
||||
let buffer = await req.file.buffer;
|
||||
let filename = await req.body.filename;
|
||||
|
||||
if (!req.file) {
|
||||
return res.status(400).send({ message: 'Please upload a file!' });
|
||||
}
|
||||
|
||||
let path = `${hash}/${folder}/${filename}`;
|
||||
let blob = bucket.file(path);
|
||||
|
||||
console.log(path);
|
||||
|
||||
const blobStream = blob.createWriteStream({
|
||||
resumable: false,
|
||||
});
|
||||
|
||||
blobStream.on('error', (err) => {
|
||||
console.log('Upload error');
|
||||
console.log(err.message);
|
||||
res.status(500).send({ message: err.message });
|
||||
});
|
||||
|
||||
console.log(`https://storage.googleapis.com/${bucket.name}/${blob.name}`);
|
||||
|
||||
blobStream.on('finish', async (data) => {
|
||||
const publicUrl = format(
|
||||
`https://storage.googleapis.com/${bucket.name}/${blob.name}`,
|
||||
);
|
||||
|
||||
res.status(200).send({
|
||||
message: 'Uploaded the file successfully: ' + path,
|
||||
url: publicUrl,
|
||||
});
|
||||
});
|
||||
|
||||
blobStream.end(buffer);
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
|
||||
res.status(500).send({
|
||||
message: `Could not upload the file. ${err}`,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const downloadGCloud = async (req, res) => {
|
||||
try {
|
||||
const { hash, bucket, processFile } = initGCloud();
|
||||
|
||||
const privateUrl = await req.query.privateUrl;
|
||||
const filePath = `${hash}/${privateUrl}`;
|
||||
const file = bucket.file(filePath);
|
||||
const fileExists = await file.exists();
|
||||
|
||||
if (fileExists[0]) {
|
||||
const stream = file.createReadStream();
|
||||
stream.pipe(res);
|
||||
} else {
|
||||
res.status(404).send({
|
||||
message: 'Could not download the file. ' + err,
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
res.status(404).send({
|
||||
message: 'Could not download the file. ' + err,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const deleteGCloud = async (privateUrl) => {
|
||||
try {
|
||||
const { hash, bucket, processFile } = initGCloud();
|
||||
const filePath = `${hash}/${privateUrl}`;
|
||||
|
||||
const file = bucket.file(filePath);
|
||||
const fileExists = await file.exists();
|
||||
|
||||
if (fileExists[0]) {
|
||||
file.delete();
|
||||
}
|
||||
} catch (err) {
|
||||
console.log(`Cannot find the file ${privateUrl}`);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
initGCloud,
|
||||
uploadLocal,
|
||||
downloadLocal,
|
||||
deleteGCloud,
|
||||
uploadGCloud,
|
||||
downloadGCloud,
|
||||
};
|
||||
114
backend/src/services/files.js
Normal file
114
backend/src/services/files.js
Normal file
@ -0,0 +1,114 @@
|
||||
const db = require('../db/models');
|
||||
const FilesDBApi = require('../db/api/files');
|
||||
const processFile = require('../middlewares/upload');
|
||||
const ValidationError = require('./notifications/errors/validation');
|
||||
const csv = require('csv-parser');
|
||||
const axios = require('axios');
|
||||
const config = require('../config');
|
||||
const stream = require('stream');
|
||||
|
||||
module.exports = class FilesService {
|
||||
static async create(data, currentUser) {
|
||||
const transaction = await db.sequelize.transaction();
|
||||
try {
|
||||
await FilesDBApi.create(data, {
|
||||
currentUser,
|
||||
transaction,
|
||||
});
|
||||
|
||||
await transaction.commit();
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async bulkImport(req, res, sendInvitationEmails = true, host) {
|
||||
const transaction = await db.sequelize.transaction();
|
||||
|
||||
try {
|
||||
await processFile(req, res);
|
||||
const bufferStream = new stream.PassThrough();
|
||||
const results = [];
|
||||
|
||||
await bufferStream.end(Buffer.from(req.file.buffer, 'utf-8')); // convert Buffer to Stream
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
bufferStream
|
||||
.pipe(csv())
|
||||
.on('data', (data) => results.push(data))
|
||||
.on('end', async () => {
|
||||
console.log('CSV results', results);
|
||||
resolve();
|
||||
})
|
||||
.on('error', (error) => reject(error));
|
||||
});
|
||||
|
||||
await FilesDBApi.bulkImport(results, {
|
||||
transaction,
|
||||
ignoreDuplicates: true,
|
||||
validate: true,
|
||||
currentUser: req.currentUser,
|
||||
});
|
||||
|
||||
await transaction.commit();
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async update(data, id, currentUser) {
|
||||
const transaction = await db.sequelize.transaction();
|
||||
try {
|
||||
let files = await FilesDBApi.findBy({ id }, { transaction });
|
||||
|
||||
if (!files) {
|
||||
throw new ValidationError('filesNotFound');
|
||||
}
|
||||
|
||||
const updatedFiles = await FilesDBApi.update(id, data, {
|
||||
currentUser,
|
||||
transaction,
|
||||
});
|
||||
|
||||
await transaction.commit();
|
||||
return updatedFiles;
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async deleteByIds(ids, currentUser) {
|
||||
const transaction = await db.sequelize.transaction();
|
||||
|
||||
try {
|
||||
await FilesDBApi.deleteByIds(ids, {
|
||||
currentUser,
|
||||
transaction,
|
||||
});
|
||||
|
||||
await transaction.commit();
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async remove(id, currentUser) {
|
||||
const transaction = await db.sequelize.transaction();
|
||||
|
||||
try {
|
||||
await FilesDBApi.remove(id, {
|
||||
currentUser,
|
||||
transaction,
|
||||
});
|
||||
|
||||
await transaction.commit();
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
};
|
||||
16
backend/src/services/notifications/errors/forbidden.js
Normal file
16
backend/src/services/notifications/errors/forbidden.js
Normal file
@ -0,0 +1,16 @@
|
||||
const { getNotification, isNotification } = require('../helpers');
|
||||
|
||||
module.exports = class ForbiddenError extends Error {
|
||||
constructor(messageCode) {
|
||||
let message;
|
||||
|
||||
if (messageCode && isNotification(messageCode)) {
|
||||
message = getNotification(messageCode);
|
||||
}
|
||||
|
||||
message = message || getNotification('errors.forbidden.message');
|
||||
|
||||
super(message);
|
||||
this.code = 403;
|
||||
}
|
||||
};
|
||||
16
backend/src/services/notifications/errors/validation.js
Normal file
16
backend/src/services/notifications/errors/validation.js
Normal file
@ -0,0 +1,16 @@
|
||||
const { getNotification, isNotification } = require('../helpers');
|
||||
|
||||
module.exports = class ValidationError extends Error {
|
||||
constructor(messageCode) {
|
||||
let message;
|
||||
|
||||
if (messageCode && isNotification(messageCode)) {
|
||||
message = getNotification(messageCode);
|
||||
}
|
||||
|
||||
message = message || getNotification('errors.validation.message');
|
||||
|
||||
super(message);
|
||||
this.code = 400;
|
||||
}
|
||||
};
|
||||
30
backend/src/services/notifications/helpers.js
Normal file
30
backend/src/services/notifications/helpers.js
Normal file
@ -0,0 +1,30 @@
|
||||
const _get = require('lodash/get');
|
||||
const errors = require('./list');
|
||||
|
||||
function format(message, args) {
|
||||
if (!message) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return message.replace(/{(\d+)}/g, function (match, number) {
|
||||
return typeof args[number] != 'undefined' ? args[number] : match;
|
||||
});
|
||||
}
|
||||
|
||||
const isNotification = (key) => {
|
||||
const message = _get(errors, key);
|
||||
return !!message;
|
||||
};
|
||||
|
||||
const getNotification = (key, ...args) => {
|
||||
const message = _get(errors, key);
|
||||
|
||||
if (!message) {
|
||||
return key;
|
||||
}
|
||||
|
||||
return format(message, args);
|
||||
};
|
||||
|
||||
exports.getNotification = getNotification;
|
||||
exports.isNotification = isNotification;
|
||||
100
backend/src/services/notifications/list.js
Normal file
100
backend/src/services/notifications/list.js
Normal file
@ -0,0 +1,100 @@
|
||||
const errors = {
|
||||
app: {
|
||||
title: 'i want to create a website for pdf & image convert',
|
||||
},
|
||||
|
||||
auth: {
|
||||
userDisabled: 'Your account is disabled',
|
||||
forbidden: 'Forbidden',
|
||||
unauthorized: 'Unauthorized',
|
||||
userNotFound: `Sorry, we don't recognize your credentials`,
|
||||
wrongPassword: `Sorry, we don't recognize your credentials`,
|
||||
weakPassword: 'This password is too weak',
|
||||
emailAlreadyInUse: 'Email is already in use',
|
||||
invalidEmail: 'Please provide a valid email',
|
||||
passwordReset: {
|
||||
invalidToken: 'Password reset link is invalid or has expired',
|
||||
error: `Email not recognized`,
|
||||
},
|
||||
passwordUpdate: {
|
||||
samePassword: `You can't use the same password. Please create new password`,
|
||||
},
|
||||
userNotVerified: `Sorry, your email has not been verified yet`,
|
||||
emailAddressVerificationEmail: {
|
||||
invalidToken: 'Email verification link is invalid or has expired',
|
||||
error: `Email not recognized`,
|
||||
},
|
||||
},
|
||||
|
||||
iam: {
|
||||
errors: {
|
||||
userAlreadyExists: 'User with this email already exists',
|
||||
userNotFound: 'User not found',
|
||||
disablingHimself: `You can't disable yourself`,
|
||||
revokingOwnPermission: `You can't revoke your own owner permission`,
|
||||
deletingHimself: `You can't delete yourself`,
|
||||
emailRequired: 'Email is required',
|
||||
},
|
||||
},
|
||||
|
||||
importer: {
|
||||
errors: {
|
||||
invalidFileEmpty: 'The file is empty',
|
||||
invalidFileExcel: 'Only excel (.xlsx) files are allowed',
|
||||
invalidFileUpload:
|
||||
'Invalid file. Make sure you are using the last version of the template.',
|
||||
importHashRequired: 'Import hash is required',
|
||||
importHashExistent: 'Data has already been imported',
|
||||
userEmailMissing: 'Some items in the CSV do not have an email',
|
||||
},
|
||||
},
|
||||
|
||||
errors: {
|
||||
forbidden: {
|
||||
message: 'Forbidden',
|
||||
},
|
||||
validation: {
|
||||
message: 'An error occurred',
|
||||
},
|
||||
searchQueryRequired: {
|
||||
message: 'Search query is required',
|
||||
},
|
||||
},
|
||||
|
||||
emails: {
|
||||
invitation: {
|
||||
subject: `You've been invited to {0}`,
|
||||
body: `
|
||||
<p>Hello,</p>
|
||||
<p>You've been invited to {0} set password for your {1} account.</p>
|
||||
<p><a href='{2}'>{2}</a></p>
|
||||
<p>Thanks,</p>
|
||||
<p>Your {0} team</p>
|
||||
`,
|
||||
},
|
||||
emailAddressVerification: {
|
||||
subject: `Verify your email for {0}`,
|
||||
body: `
|
||||
<p>Hello,</p>
|
||||
<p>Follow this link to verify your email address.</p>
|
||||
<p><a href='{0}'>{0}</a></p>
|
||||
<p>If you didn't ask to verify this address, you can ignore this email.</p>
|
||||
<p>Thanks,</p>
|
||||
<p>Your {1} team</p>
|
||||
`,
|
||||
},
|
||||
passwordReset: {
|
||||
subject: `Reset your password for {0}`,
|
||||
body: `
|
||||
<p>Hello,</p>
|
||||
<p>Follow this link to reset your {0} password for your {1} account.</p>
|
||||
<p><a href='{2}'>{2}</a></p>
|
||||
<p>If you didn't ask to reset your password, you can ignore this email.</p>
|
||||
<p>Thanks,</p>
|
||||
<p>Your {0} team</p>
|
||||
`,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = errors;
|
||||
22
backend/src/services/openai.js
Normal file
22
backend/src/services/openai.js
Normal file
@ -0,0 +1,22 @@
|
||||
const axios = require('axios');
|
||||
const { v4: uuid } = require('uuid');
|
||||
const RoleService = require('./roles');
|
||||
const config = require('../config');
|
||||
|
||||
module.exports = class OpenAiService {
|
||||
static async getWidget(payload, userId, roleId) {
|
||||
const response = await axios.post(
|
||||
`${config.flHost}/${config.project_uuid}/project_customization_widgets.json`,
|
||||
payload,
|
||||
);
|
||||
|
||||
if (response.status >= 200 && response.status < 300) {
|
||||
const { widget_id } = await response.data;
|
||||
await RoleService.addRoleInfo(roleId, userId, 'widgets', widget_id);
|
||||
return widget_id;
|
||||
} else {
|
||||
console.error('=======error=======', response.data);
|
||||
return { value: null, error: response.data };
|
||||
}
|
||||
}
|
||||
};
|
||||
114
backend/src/services/permissions.js
Normal file
114
backend/src/services/permissions.js
Normal file
@ -0,0 +1,114 @@
|
||||
const db = require('../db/models');
|
||||
const PermissionsDBApi = require('../db/api/permissions');
|
||||
const processFile = require('../middlewares/upload');
|
||||
const ValidationError = require('./notifications/errors/validation');
|
||||
const csv = require('csv-parser');
|
||||
const axios = require('axios');
|
||||
const config = require('../config');
|
||||
const stream = require('stream');
|
||||
|
||||
module.exports = class PermissionsService {
|
||||
static async create(data, currentUser) {
|
||||
const transaction = await db.sequelize.transaction();
|
||||
try {
|
||||
await PermissionsDBApi.create(data, {
|
||||
currentUser,
|
||||
transaction,
|
||||
});
|
||||
|
||||
await transaction.commit();
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async bulkImport(req, res, sendInvitationEmails = true, host) {
|
||||
const transaction = await db.sequelize.transaction();
|
||||
|
||||
try {
|
||||
await processFile(req, res);
|
||||
const bufferStream = new stream.PassThrough();
|
||||
const results = [];
|
||||
|
||||
await bufferStream.end(Buffer.from(req.file.buffer, 'utf-8')); // convert Buffer to Stream
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
bufferStream
|
||||
.pipe(csv())
|
||||
.on('data', (data) => results.push(data))
|
||||
.on('end', async () => {
|
||||
console.log('CSV results', results);
|
||||
resolve();
|
||||
})
|
||||
.on('error', (error) => reject(error));
|
||||
});
|
||||
|
||||
await PermissionsDBApi.bulkImport(results, {
|
||||
transaction,
|
||||
ignoreDuplicates: true,
|
||||
validate: true,
|
||||
currentUser: req.currentUser,
|
||||
});
|
||||
|
||||
await transaction.commit();
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async update(data, id, currentUser) {
|
||||
const transaction = await db.sequelize.transaction();
|
||||
try {
|
||||
let permissions = await PermissionsDBApi.findBy({ id }, { transaction });
|
||||
|
||||
if (!permissions) {
|
||||
throw new ValidationError('permissionsNotFound');
|
||||
}
|
||||
|
||||
const updatedPermissions = await PermissionsDBApi.update(id, data, {
|
||||
currentUser,
|
||||
transaction,
|
||||
});
|
||||
|
||||
await transaction.commit();
|
||||
return updatedPermissions;
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async deleteByIds(ids, currentUser) {
|
||||
const transaction = await db.sequelize.transaction();
|
||||
|
||||
try {
|
||||
await PermissionsDBApi.deleteByIds(ids, {
|
||||
currentUser,
|
||||
transaction,
|
||||
});
|
||||
|
||||
await transaction.commit();
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async remove(id, currentUser) {
|
||||
const transaction = await db.sequelize.transaction();
|
||||
|
||||
try {
|
||||
await PermissionsDBApi.remove(id, {
|
||||
currentUser,
|
||||
transaction,
|
||||
});
|
||||
|
||||
await transaction.commit();
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
};
|
||||
430
backend/src/services/roles.js
Normal file
430
backend/src/services/roles.js
Normal file
@ -0,0 +1,430 @@
|
||||
const db = require('../db/models');
|
||||
const RolesDBApi = require('../db/api/roles');
|
||||
const processFile = require('../middlewares/upload');
|
||||
const ValidationError = require('./notifications/errors/validation');
|
||||
const csv = require('csv-parser');
|
||||
const axios = require('axios');
|
||||
const config = require('../config');
|
||||
const stream = require('stream');
|
||||
|
||||
function buildWidgetResult(widget, queryResult, queryString) {
|
||||
if (queryResult[0] && queryResult[0].length) {
|
||||
const key = Object.keys(queryResult[0][0])[0];
|
||||
const value =
|
||||
widget.widget_type === 'scalar' ? queryResult[0][0][key] : queryResult[0];
|
||||
const widgetData = JSON.parse(widget.data);
|
||||
return { ...widget, ...widgetData, value, query: queryString };
|
||||
} else {
|
||||
return { ...widget, value: [], query: queryString };
|
||||
}
|
||||
}
|
||||
|
||||
async function executeQuery(queryString, currentUser) {
|
||||
try {
|
||||
return await db.sequelize.query(queryString, {
|
||||
replacements: { organizationId: currentUser.organizationId },
|
||||
});
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
function insertWhereConditions(queryString, whereConditions) {
|
||||
if (!whereConditions) return queryString;
|
||||
|
||||
const whereIndex = queryString.toLowerCase().indexOf('where');
|
||||
const groupByIndex = queryString.toLowerCase().indexOf('group by');
|
||||
const insertIndex =
|
||||
whereIndex === -1
|
||||
? groupByIndex !== -1
|
||||
? groupByIndex
|
||||
: queryString.length
|
||||
: whereIndex + 5;
|
||||
|
||||
const prefix = queryString.substring(0, insertIndex);
|
||||
const suffix = queryString.substring(insertIndex);
|
||||
const conditionString =
|
||||
whereIndex === -1
|
||||
? ` WHERE ${whereConditions} `
|
||||
: ` ${whereConditions} AND `;
|
||||
|
||||
return `${prefix}${conditionString}${suffix}`;
|
||||
}
|
||||
|
||||
function constructWhereConditions(mainTable, currentUser, replacements) {
|
||||
const {
|
||||
organizationId,
|
||||
app_role: { globalAccess },
|
||||
} = currentUser;
|
||||
const tablesWithoutOrgId = ['permissions', 'roles'];
|
||||
let whereConditions = '';
|
||||
|
||||
if (!globalAccess && !tablesWithoutOrgId.includes(mainTable)) {
|
||||
whereConditions += `"${mainTable}"."organizationId" = :organizationId`;
|
||||
replacements.organizationId = organizationId;
|
||||
}
|
||||
|
||||
if (mainTable !== 'users') {
|
||||
whereConditions += whereConditions ? ' AND ' : '';
|
||||
whereConditions += `"${mainTable}"."deletedAt" IS NULL`;
|
||||
}
|
||||
|
||||
return whereConditions;
|
||||
}
|
||||
|
||||
function extractTableName(queryString) {
|
||||
const tableNameRegex = /FROM\s+("?)([^"\s]+)\1\s*/i;
|
||||
const match = tableNameRegex.exec(queryString);
|
||||
return match ? match[2] : null;
|
||||
}
|
||||
|
||||
function buildQueryString(widget, currentUser) {
|
||||
let queryString = widget?.query || '';
|
||||
const tableName = extractTableName(queryString);
|
||||
const mainTable = JSON.parse(widget?.data)?.main_table || tableName;
|
||||
const replacements = {};
|
||||
const whereConditions = constructWhereConditions(
|
||||
mainTable,
|
||||
currentUser,
|
||||
replacements,
|
||||
);
|
||||
queryString = insertWhereConditions(queryString, whereConditions);
|
||||
console.log(queryString, 'queryString');
|
||||
return queryString;
|
||||
}
|
||||
|
||||
async function constructWidgetsResults(widgets, currentUser) {
|
||||
const widgetsResults = [];
|
||||
for (const widget of widgets) {
|
||||
if (!widget) continue;
|
||||
const queryString = buildQueryString(widget, currentUser);
|
||||
const queryResult = await executeQuery(queryString, currentUser);
|
||||
widgetsResults.push(buildWidgetResult(widget, queryResult, queryString));
|
||||
}
|
||||
return widgetsResults;
|
||||
}
|
||||
|
||||
async function fetchWidgetsData(widgets) {
|
||||
const widgetPromises = (widgets || []).map((widgetId) =>
|
||||
axios.get(
|
||||
`${config.flHost}/${config.project_uuid}/project_customization_widgets/${widgetId}.json`,
|
||||
),
|
||||
);
|
||||
const widgetResults = widgetPromises
|
||||
? await Promise.allSettled(widgetPromises)
|
||||
: [];
|
||||
return widgetResults
|
||||
.filter((result) => result.status === 'fulfilled')
|
||||
.map((result) => result.value.data);
|
||||
}
|
||||
|
||||
async function processWidgets(widgets, currentUser) {
|
||||
const widgetData = await fetchWidgetsData(widgets);
|
||||
return constructWidgetsResults(widgetData, currentUser);
|
||||
}
|
||||
|
||||
function parseCustomization(role) {
|
||||
try {
|
||||
return JSON.parse(role.role_customization || '{}');
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
async function findRole(roleId, currentUser) {
|
||||
const transaction = await db.sequelize.transaction();
|
||||
try {
|
||||
const role = roleId
|
||||
? await RolesDBApi.findBy({ id: roleId }, { transaction })
|
||||
: await RolesDBApi.findBy({ name: 'User' }, { transaction });
|
||||
await transaction.commit();
|
||||
return role;
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = class RolesService {
|
||||
static async create(data, currentUser) {
|
||||
const transaction = await db.sequelize.transaction();
|
||||
try {
|
||||
await RolesDBApi.create(data, {
|
||||
currentUser,
|
||||
transaction,
|
||||
});
|
||||
|
||||
await transaction.commit();
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async bulkImport(req, res, sendInvitationEmails = true, host) {
|
||||
const transaction = await db.sequelize.transaction();
|
||||
|
||||
try {
|
||||
await processFile(req, res);
|
||||
const bufferStream = new stream.PassThrough();
|
||||
const results = [];
|
||||
|
||||
await bufferStream.end(Buffer.from(req.file.buffer, 'utf-8')); // convert Buffer to Stream
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
bufferStream
|
||||
.pipe(csv())
|
||||
.on('data', (data) => results.push(data))
|
||||
.on('end', async () => {
|
||||
console.log('CSV results', results);
|
||||
resolve();
|
||||
})
|
||||
.on('error', (error) => reject(error));
|
||||
});
|
||||
|
||||
await RolesDBApi.bulkImport(results, {
|
||||
transaction,
|
||||
ignoreDuplicates: true,
|
||||
validate: true,
|
||||
currentUser: req.currentUser,
|
||||
});
|
||||
|
||||
await transaction.commit();
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async update(data, id, currentUser) {
|
||||
const transaction = await db.sequelize.transaction();
|
||||
try {
|
||||
let roles = await RolesDBApi.findBy({ id }, { transaction });
|
||||
|
||||
if (!roles) {
|
||||
throw new ValidationError('rolesNotFound');
|
||||
}
|
||||
|
||||
const updatedRoles = await RolesDBApi.update(id, data, {
|
||||
currentUser,
|
||||
transaction,
|
||||
});
|
||||
|
||||
await transaction.commit();
|
||||
return updatedRoles;
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async deleteByIds(ids, currentUser) {
|
||||
const transaction = await db.sequelize.transaction();
|
||||
|
||||
try {
|
||||
await RolesDBApi.deleteByIds(ids, {
|
||||
currentUser,
|
||||
transaction,
|
||||
});
|
||||
|
||||
await transaction.commit();
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async remove(id, currentUser) {
|
||||
const transaction = await db.sequelize.transaction();
|
||||
|
||||
try {
|
||||
await RolesDBApi.remove(id, {
|
||||
currentUser,
|
||||
transaction,
|
||||
});
|
||||
|
||||
await transaction.commit();
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async addRoleInfo(roleId, userId, key, widgetId, currentUser) {
|
||||
const regexExpForUuid =
|
||||
/^[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12}$/gi;
|
||||
const widgetIdIsUUID = regexExpForUuid.test(widgetId);
|
||||
|
||||
const transaction = await db.sequelize.transaction();
|
||||
let role;
|
||||
if (roleId) {
|
||||
role = await RolesDBApi.findBy({ id: roleId }, { transaction });
|
||||
} else {
|
||||
role = await RolesDBApi.findBy({ name: 'User' }, { transaction });
|
||||
}
|
||||
|
||||
if (!role) {
|
||||
throw new ValidationError('rolesNotFound');
|
||||
}
|
||||
|
||||
try {
|
||||
let customization = {};
|
||||
try {
|
||||
customization = JSON.parse(role.role_customization || '{}');
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
}
|
||||
|
||||
if (widgetIdIsUUID && Array.isArray(customization[key])) {
|
||||
const el = customization[key].find((e) => e === widgetId);
|
||||
!el ? customization[key].unshift(widgetId) : null;
|
||||
}
|
||||
|
||||
if (widgetIdIsUUID && !customization[key]) {
|
||||
customization[key] = [widgetId];
|
||||
}
|
||||
|
||||
const newRole = await RolesDBApi.update(
|
||||
role.id,
|
||||
{
|
||||
role_customization: JSON.stringify(customization),
|
||||
name: role.name,
|
||||
permissions: role.permissions,
|
||||
},
|
||||
{
|
||||
currentUser,
|
||||
transaction,
|
||||
},
|
||||
);
|
||||
|
||||
await transaction.commit();
|
||||
|
||||
return newRole;
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async removeRoleInfoById(infoId, roleId, key, currentUser) {
|
||||
const transaction = await db.sequelize.transaction();
|
||||
|
||||
let role;
|
||||
if (roleId) {
|
||||
role = await RolesDBApi.findBy({ id: roleId }, { transaction });
|
||||
} else {
|
||||
role = await RolesDBApi.findBy({ name: 'User' }, { transaction });
|
||||
}
|
||||
if (!role) {
|
||||
await transaction.rollback();
|
||||
throw new ValidationError('rolesNotFound');
|
||||
}
|
||||
|
||||
let customization = {};
|
||||
try {
|
||||
customization = JSON.parse(role.role_customization || '{}');
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
}
|
||||
|
||||
customization[key] = customization[key].filter((item) => item !== infoId);
|
||||
|
||||
const response = await axios.delete(
|
||||
`${config.flHost}/${config.project_uuid}/project_customization_widgets/${infoId}.json`,
|
||||
);
|
||||
const { status } = await response;
|
||||
try {
|
||||
const result = await RolesDBApi.update(
|
||||
role.id,
|
||||
{
|
||||
role_customization: JSON.stringify(customization),
|
||||
name: role.name,
|
||||
permissions: role.permissions,
|
||||
},
|
||||
{
|
||||
currentUser,
|
||||
transaction,
|
||||
},
|
||||
);
|
||||
|
||||
await transaction.commit();
|
||||
return result;
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async getRoleInfoByKey(key, roleId, currentUser) {
|
||||
const transaction = await db.sequelize.transaction();
|
||||
|
||||
const organizationId = currentUser.organizationId;
|
||||
let globalAccess = currentUser.app_role?.globalAccess;
|
||||
let queryString = '';
|
||||
|
||||
let role;
|
||||
try {
|
||||
if (roleId) {
|
||||
role = await RolesDBApi.findBy({ id: roleId }, { transaction });
|
||||
} else {
|
||||
role = await RolesDBApi.findBy({ name: 'User' }, { transaction });
|
||||
}
|
||||
|
||||
await transaction.commit();
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
|
||||
let customization = '{}';
|
||||
|
||||
try {
|
||||
customization = JSON.parse(role.role_customization || '{}');
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
}
|
||||
|
||||
if (key === 'widgets') {
|
||||
const widgets = customization[key] || [];
|
||||
const widgetArray = widgets.map((widget) => {
|
||||
return axios.get(
|
||||
`${config.flHost}/${config.project_uuid}/project_customization_widgets/${widget}.json`,
|
||||
);
|
||||
});
|
||||
const widgetResults = await Promise.allSettled(widgetArray);
|
||||
|
||||
const fulfilledWidgets = widgetResults.map((result) => {
|
||||
if (result.status === 'fulfilled') {
|
||||
return result.value.data;
|
||||
}
|
||||
});
|
||||
|
||||
const widgetsResults = [];
|
||||
|
||||
if (Array.isArray(fulfilledWidgets)) {
|
||||
for (const widget of fulfilledWidgets) {
|
||||
let result = [];
|
||||
try {
|
||||
result = await db.sequelize.query(widget.query);
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
}
|
||||
|
||||
if (result[0] && result[0].length) {
|
||||
const key = Object.keys(result[0][0])[0];
|
||||
const value =
|
||||
widget.widget_type === 'scalar' ? result[0][0][key] : result[0];
|
||||
const widgetData = JSON.parse(widget.data);
|
||||
widgetsResults.push({ ...widget, ...widgetData, value });
|
||||
} else {
|
||||
widgetsResults.push({ ...widget, value: null });
|
||||
}
|
||||
}
|
||||
}
|
||||
return widgetsResults;
|
||||
}
|
||||
return customization[key];
|
||||
}
|
||||
};
|
||||
133
backend/src/services/search.js
Normal file
133
backend/src/services/search.js
Normal file
@ -0,0 +1,133 @@
|
||||
const db = require('../db/models');
|
||||
const ValidationError = require('./notifications/errors/validation');
|
||||
|
||||
const Sequelize = db.Sequelize;
|
||||
const Op = Sequelize.Op;
|
||||
|
||||
/**
|
||||
* @param {string} permission
|
||||
* @param {object} currentUser
|
||||
*/
|
||||
async function checkPermissions(permission, currentUser) {
|
||||
if (!currentUser) {
|
||||
throw new ValidationError('auth.unauthorized');
|
||||
}
|
||||
|
||||
const userPermission = currentUser.custom_permissions.find(
|
||||
(cp) => cp.name === permission,
|
||||
);
|
||||
|
||||
if (userPermission) {
|
||||
return true;
|
||||
}
|
||||
|
||||
try {
|
||||
if (!currentUser.app_role) {
|
||||
throw new ValidationError('auth.forbidden');
|
||||
}
|
||||
|
||||
const permissions = await currentUser.app_role.getPermissions();
|
||||
|
||||
return !!permissions.find((p) => p.name === permission);
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = class SearchService {
|
||||
static async search(searchQuery, currentUser) {
|
||||
try {
|
||||
if (!searchQuery) {
|
||||
throw new ValidationError('iam.errors.searchQueryRequired');
|
||||
}
|
||||
const tableColumns = {
|
||||
users: ['firstName', 'lastName', 'phoneNumber', 'email'],
|
||||
|
||||
conversions: ['file_name'],
|
||||
|
||||
files: ['file_name'],
|
||||
};
|
||||
const columnsInt = {};
|
||||
|
||||
let allFoundRecords = [];
|
||||
|
||||
for (const tableName in tableColumns) {
|
||||
if (tableColumns.hasOwnProperty(tableName)) {
|
||||
const attributesToSearch = tableColumns[tableName];
|
||||
const attributesIntToSearch = columnsInt[tableName] || [];
|
||||
const whereCondition = {
|
||||
[Op.or]: [
|
||||
...attributesToSearch.map((attribute) => ({
|
||||
[attribute]: {
|
||||
[Op.iLike]: `%${searchQuery}%`,
|
||||
},
|
||||
})),
|
||||
...attributesIntToSearch.map((attribute) =>
|
||||
Sequelize.where(
|
||||
Sequelize.cast(
|
||||
Sequelize.col(`${tableName}.${attribute}`),
|
||||
'varchar',
|
||||
),
|
||||
{ [Op.iLike]: `%${searchQuery}%` },
|
||||
),
|
||||
),
|
||||
],
|
||||
};
|
||||
|
||||
const hasPermission = await checkPermissions(
|
||||
`READ_${tableName.toUpperCase()}`,
|
||||
currentUser,
|
||||
);
|
||||
if (!hasPermission) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const foundRecords = await db[tableName].findAll({
|
||||
where: whereCondition,
|
||||
attributes: [
|
||||
...tableColumns[tableName],
|
||||
'id',
|
||||
...attributesIntToSearch,
|
||||
],
|
||||
});
|
||||
|
||||
const modifiedRecords = foundRecords.map((record) => {
|
||||
const matchAttribute = [];
|
||||
|
||||
for (const attribute of attributesToSearch) {
|
||||
if (
|
||||
record[attribute]
|
||||
?.toLowerCase()
|
||||
?.includes(searchQuery.toLowerCase())
|
||||
) {
|
||||
matchAttribute.push(attribute);
|
||||
}
|
||||
}
|
||||
|
||||
for (const attribute of attributesIntToSearch) {
|
||||
const castedValue = String(record[attribute]);
|
||||
if (
|
||||
castedValue &&
|
||||
castedValue.toLowerCase().includes(searchQuery.toLowerCase())
|
||||
) {
|
||||
matchAttribute.push(attribute);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
...record.get(),
|
||||
matchAttribute,
|
||||
tableName,
|
||||
};
|
||||
});
|
||||
|
||||
allFoundRecords = allFoundRecords.concat(modifiedRecords);
|
||||
}
|
||||
}
|
||||
|
||||
return allFoundRecords;
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
};
|
||||
152
backend/src/services/users.js
Normal file
152
backend/src/services/users.js
Normal file
@ -0,0 +1,152 @@
|
||||
const db = require('../db/models');
|
||||
const UsersDBApi = require('../db/api/users');
|
||||
const processFile = require('../middlewares/upload');
|
||||
const ValidationError = require('./notifications/errors/validation');
|
||||
const csv = require('csv-parser');
|
||||
const axios = require('axios');
|
||||
const config = require('../config');
|
||||
const stream = require('stream');
|
||||
|
||||
const InvitationEmail = require('./email/list/invitation');
|
||||
const EmailSender = require('./email');
|
||||
const AuthService = require('./auth');
|
||||
|
||||
module.exports = class UsersService {
|
||||
static async create(data, currentUser, sendInvitationEmails = true, host) {
|
||||
let transaction = await db.sequelize.transaction();
|
||||
|
||||
let email = data.email;
|
||||
let emailsToInvite = [];
|
||||
try {
|
||||
if (email) {
|
||||
let user = await UsersDBApi.findBy({ email }, { transaction });
|
||||
if (user) {
|
||||
throw new ValidationError('iam.errors.userAlreadyExists');
|
||||
} else {
|
||||
await UsersDBApi.create(
|
||||
{ data },
|
||||
|
||||
{
|
||||
currentUser,
|
||||
transaction,
|
||||
},
|
||||
);
|
||||
emailsToInvite.push(email);
|
||||
}
|
||||
} else {
|
||||
throw new ValidationError('iam.errors.emailRequired');
|
||||
}
|
||||
await transaction.commit();
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
if (emailsToInvite && emailsToInvite.length) {
|
||||
if (!sendInvitationEmails) return;
|
||||
|
||||
AuthService.sendPasswordResetEmail(email, 'invitation', host);
|
||||
}
|
||||
}
|
||||
|
||||
static async bulkImport(req, res, sendInvitationEmails = true, host) {
|
||||
const transaction = await db.sequelize.transaction();
|
||||
let emailsToInvite = [];
|
||||
|
||||
try {
|
||||
await processFile(req, res);
|
||||
const bufferStream = new stream.PassThrough();
|
||||
const results = [];
|
||||
|
||||
await bufferStream.end(Buffer.from(req.file.buffer, 'utf-8')); // convert Buffer to Stream
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
bufferStream
|
||||
.pipe(csv())
|
||||
.on('data', (data) => results.push(data))
|
||||
.on('end', () => {
|
||||
console.log('results csv', results);
|
||||
resolve();
|
||||
})
|
||||
.on('error', (error) => reject(error));
|
||||
});
|
||||
|
||||
const hasAllEmails = results.every((result) => result.email);
|
||||
|
||||
if (!hasAllEmails) {
|
||||
throw new ValidationError('importer.errors.userEmailMissing');
|
||||
}
|
||||
|
||||
await UsersDBApi.bulkImport(results, {
|
||||
transaction,
|
||||
ignoreDuplicates: true,
|
||||
validate: true,
|
||||
currentUser: req.currentUser,
|
||||
});
|
||||
|
||||
emailsToInvite = results.map((result) => result.email);
|
||||
|
||||
await transaction.commit();
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
|
||||
if (emailsToInvite && emailsToInvite.length && !sendInvitationEmails) {
|
||||
emailsToInvite.forEach((email) => {
|
||||
AuthService.sendPasswordResetEmail(email, 'invitation', host);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
static async update(data, id, currentUser) {
|
||||
const transaction = await db.sequelize.transaction();
|
||||
|
||||
try {
|
||||
let users = await UsersDBApi.findBy({ id }, { transaction });
|
||||
|
||||
if (!users) {
|
||||
throw new ValidationError('iam.errors.userNotFound');
|
||||
}
|
||||
|
||||
const updatedUser = await UsersDBApi.update(
|
||||
id,
|
||||
data,
|
||||
|
||||
{
|
||||
currentUser,
|
||||
transaction,
|
||||
},
|
||||
);
|
||||
|
||||
await transaction.commit();
|
||||
return updatedUser;
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async remove(id, currentUser) {
|
||||
const transaction = await db.sequelize.transaction();
|
||||
|
||||
try {
|
||||
if (currentUser.id === id) {
|
||||
throw new ValidationError('iam.errors.deletingHimself');
|
||||
}
|
||||
|
||||
if (currentUser.app_role?.name !== config.roles.admin) {
|
||||
throw new ValidationError('errors.forbidden.message');
|
||||
}
|
||||
|
||||
await UsersDBApi.remove(id, {
|
||||
currentUser,
|
||||
transaction,
|
||||
});
|
||||
|
||||
await transaction.commit();
|
||||
} catch (error) {
|
||||
await transaction.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
};
|
||||
4470
backend/yarn.lock
Normal file
4470
backend/yarn.lock
Normal file
File diff suppressed because it is too large
Load Diff
14
cloudbuild.yaml
Normal file
14
cloudbuild.yaml
Normal file
@ -0,0 +1,14 @@
|
||||
steps:
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
entrypoint: 'bash'
|
||||
args: ['-c', 'docker pull gcr.io/fldemo-315215/i-want-to-create-a-website-for-pdf-image-convert-29929-dev:latest || exit 0']
|
||||
- name: 'gcr.io/cloud-builders/docker'
|
||||
args: [
|
||||
'build',
|
||||
'-t', 'gcr.io/fldemo-315215/i-want-to-create-a-website-for-pdf-image-convert-29929-dev:latest',
|
||||
'--file', 'Dockerfile.dev',
|
||||
'--cache-from', 'gcr.io/fldemo-315215/i-want-to-create-a-website-for-pdf-image-convert-29929-dev:latest',
|
||||
'.'
|
||||
]
|
||||
images: ['gcr.io/fldemo-315215/i-want-to-create-a-website-for-pdf-image-convert-29929-dev:latest']
|
||||
logsBucket: 'gs://fldemo-315215-cloudbuild-logs'
|
||||
46
docker/README.md
Normal file
46
docker/README.md
Normal file
@ -0,0 +1,46 @@
|
||||
## Description:
|
||||
|
||||
The project contains the **docker folder** and the `Dockerfile`.
|
||||
|
||||
The `Dockerfile` is used to Deploy the project to Google Cloud.
|
||||
|
||||
The **docker folder** contains a couple of helper scripts:
|
||||
|
||||
- `docker-compose.yml` (all our services: web, backend, db are described here)
|
||||
- `start-backend.sh` (starts backend, but only after the database)
|
||||
- `wait-for-it.sh` (imported from https://github.com/vishnubob/wait-for-it)
|
||||
|
||||
> To avoid breaking the application, we recommend you don't edit the following files: everything that includes the **docker folder** and `Dokerfile`.
|
||||
|
||||
|
||||
## Run services:
|
||||
|
||||
1. Install docker compose (https://docs.docker.com/compose/install/)
|
||||
|
||||
2. Move to `docker` folder. All next steps should be done from this folder.
|
||||
|
||||
``` cd docker ```
|
||||
|
||||
3. Make executables from `wait-for-it.sh` and `start-backend.sh`:
|
||||
|
||||
``` chmod +x start-backend.sh && chmod +x wait-for-it.sh ```
|
||||
|
||||
4. Download dependend projects for services.
|
||||
|
||||
5. Review the docker-compose.yml file. Make sure that all services have Dockerfiles. Only db service doesn't require a Dockerfile.
|
||||
|
||||
6. Make sure you have needed ports (see them in `ports`) available on your local machine.
|
||||
|
||||
7. Start services:
|
||||
|
||||
7.1. With an empty database `rm -rf data && docker-compose up`
|
||||
|
||||
7.2. With a stored (from previus runs) database data `docker-compose up`
|
||||
|
||||
8. Check http://localhost:3000
|
||||
|
||||
9. Stop services:
|
||||
|
||||
9.1. Just press `Ctr+C`
|
||||
|
||||
|
||||
35
docker/docker-compose.yml
Normal file
35
docker/docker-compose.yml
Normal file
@ -0,0 +1,35 @@
|
||||
|
||||
|
||||
version: "3.9"
|
||||
services:
|
||||
web:
|
||||
image: frontend
|
||||
build: ../frontend
|
||||
stdin_open: true # docker run -i
|
||||
tty: true # docker run -t
|
||||
ports:
|
||||
- "3000:3000"
|
||||
db:
|
||||
image: postgres
|
||||
volumes:
|
||||
- ./data/db:/var/lib/postgresql/data
|
||||
environment:
|
||||
- POSTGRES_HOST_AUTH_METHOD=trust
|
||||
- POSTGRES_DB=db_i_want_to_create_a_website_for_pdf___image_convert
|
||||
ports:
|
||||
- "5432:5432"
|
||||
backend:
|
||||
image: backend
|
||||
volumes:
|
||||
- ./wait-for-it.sh:/usr/src/app/wait-for-it.sh
|
||||
- ./start-backend.sh:/usr/src/app/start-backend.sh
|
||||
build: ../backend
|
||||
environment:
|
||||
- DB_HOST=db
|
||||
ports:
|
||||
- "8080:8080"
|
||||
depends_on:
|
||||
- "db"
|
||||
|
||||
command: ["bash", "./wait-for-it.sh", "db:5432", "--timeout=0", "--strict", "--", "bash", "./start-backend.sh"]
|
||||
|
||||
2
docker/start-backend.sh
Normal file
2
docker/start-backend.sh
Normal file
@ -0,0 +1,2 @@
|
||||
#!/usr/bin/env bash
|
||||
yarn start
|
||||
182
docker/wait-for-it.sh
Normal file
182
docker/wait-for-it.sh
Normal file
@ -0,0 +1,182 @@
|
||||
#!/usr/bin/env bash
|
||||
# Use this script to test if a given TCP host/port are available
|
||||
|
||||
WAITFORIT_cmdname=${0##*/}
|
||||
|
||||
echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
|
||||
|
||||
usage()
|
||||
{
|
||||
cat << USAGE >&2
|
||||
Usage:
|
||||
$WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
|
||||
-h HOST | --host=HOST Host or IP under test
|
||||
-p PORT | --port=PORT TCP port under test
|
||||
Alternatively, you specify the host and port as host:port
|
||||
-s | --strict Only execute subcommand if the test succeeds
|
||||
-q | --quiet Don't output any status messages
|
||||
-t TIMEOUT | --timeout=TIMEOUT
|
||||
Timeout in seconds, zero for no timeout
|
||||
-- COMMAND ARGS Execute command with args after the test finishes
|
||||
USAGE
|
||||
exit 1
|
||||
}
|
||||
|
||||
wait_for()
|
||||
{
|
||||
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
|
||||
else
|
||||
echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
|
||||
fi
|
||||
WAITFORIT_start_ts=$(date +%s)
|
||||
while :
|
||||
do
|
||||
if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
|
||||
nc -z $WAITFORIT_HOST $WAITFORIT_PORT
|
||||
WAITFORIT_result=$?
|
||||
else
|
||||
(echo -n > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
|
||||
WAITFORIT_result=$?
|
||||
fi
|
||||
if [[ $WAITFORIT_result -eq 0 ]]; then
|
||||
WAITFORIT_end_ts=$(date +%s)
|
||||
echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
|
||||
break
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
return $WAITFORIT_result
|
||||
}
|
||||
|
||||
wait_for_wrapper()
|
||||
{
|
||||
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
|
||||
if [[ $WAITFORIT_QUIET -eq 1 ]]; then
|
||||
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
|
||||
else
|
||||
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
|
||||
fi
|
||||
WAITFORIT_PID=$!
|
||||
trap "kill -INT -$WAITFORIT_PID" INT
|
||||
wait $WAITFORIT_PID
|
||||
WAITFORIT_RESULT=$?
|
||||
if [[ $WAITFORIT_RESULT -ne 0 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
|
||||
fi
|
||||
return $WAITFORIT_RESULT
|
||||
}
|
||||
|
||||
# process arguments
|
||||
while [[ $# -gt 0 ]]
|
||||
do
|
||||
case "$1" in
|
||||
*:* )
|
||||
WAITFORIT_hostport=(${1//:/ })
|
||||
WAITFORIT_HOST=${WAITFORIT_hostport[0]}
|
||||
WAITFORIT_PORT=${WAITFORIT_hostport[1]}
|
||||
shift 1
|
||||
;;
|
||||
--child)
|
||||
WAITFORIT_CHILD=1
|
||||
shift 1
|
||||
;;
|
||||
-q | --quiet)
|
||||
WAITFORIT_QUIET=1
|
||||
shift 1
|
||||
;;
|
||||
-s | --strict)
|
||||
WAITFORIT_STRICT=1
|
||||
shift 1
|
||||
;;
|
||||
-h)
|
||||
WAITFORIT_HOST="$2"
|
||||
if [[ $WAITFORIT_HOST == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--host=*)
|
||||
WAITFORIT_HOST="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
-p)
|
||||
WAITFORIT_PORT="$2"
|
||||
if [[ $WAITFORIT_PORT == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--port=*)
|
||||
WAITFORIT_PORT="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
-t)
|
||||
WAITFORIT_TIMEOUT="$2"
|
||||
if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--timeout=*)
|
||||
WAITFORIT_TIMEOUT="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
--)
|
||||
shift
|
||||
WAITFORIT_CLI=("$@")
|
||||
break
|
||||
;;
|
||||
--help)
|
||||
usage
|
||||
;;
|
||||
*)
|
||||
echoerr "Unknown argument: $1"
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
|
||||
echoerr "Error: you need to provide a host and port to test."
|
||||
usage
|
||||
fi
|
||||
|
||||
WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
|
||||
WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
|
||||
WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
|
||||
WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
|
||||
|
||||
# Check to see if timeout is from busybox?
|
||||
WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
|
||||
WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
|
||||
|
||||
WAITFORIT_BUSYTIMEFLAG=""
|
||||
if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
|
||||
WAITFORIT_ISBUSY=1
|
||||
# Check if busybox timeout uses -t flag
|
||||
# (recent Alpine versions don't support -t anymore)
|
||||
if timeout &>/dev/stdout | grep -q -e '-t '; then
|
||||
WAITFORIT_BUSYTIMEFLAG="-t"
|
||||
fi
|
||||
else
|
||||
WAITFORIT_ISBUSY=0
|
||||
fi
|
||||
|
||||
if [[ $WAITFORIT_CHILD -gt 0 ]]; then
|
||||
wait_for
|
||||
WAITFORIT_RESULT=$?
|
||||
exit $WAITFORIT_RESULT
|
||||
else
|
||||
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
|
||||
wait_for_wrapper
|
||||
WAITFORIT_RESULT=$?
|
||||
else
|
||||
wait_for
|
||||
WAITFORIT_RESULT=$?
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ $WAITFORIT_CLI != "" ]]; then
|
||||
if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
|
||||
exit $WAITFORIT_RESULT
|
||||
fi
|
||||
exec "${WAITFORIT_CLI[@]}"
|
||||
else
|
||||
exit $WAITFORIT_RESULT
|
||||
fi
|
||||
11
frontend/.eslintrc.cjs
Normal file
11
frontend/.eslintrc.cjs
Normal file
@ -0,0 +1,11 @@
|
||||
module.exports = {
|
||||
extends: [
|
||||
'next/core-web-vitals',
|
||||
'eslint-config-prettier',
|
||||
'eslint:recommended',
|
||||
'plugin:@typescript-eslint/recommended',
|
||||
],
|
||||
parser: '@typescript-eslint/parser',
|
||||
plugins: ['@typescript-eslint'],
|
||||
root: true,
|
||||
};
|
||||
10
frontend/.prettierrc
Normal file
10
frontend/.prettierrc
Normal file
@ -0,0 +1,10 @@
|
||||
{
|
||||
"singleQuote": true,
|
||||
"tabWidth": 2,
|
||||
"trailingComma": "all",
|
||||
"quoteProps": "as-needed",
|
||||
"jsxSingleQuote": true,
|
||||
"bracketSpacing": true,
|
||||
"bracketSameLine": false,
|
||||
"arrowParens": "always"
|
||||
}
|
||||
19
frontend/Dockerfile
Normal file
19
frontend/Dockerfile
Normal file
@ -0,0 +1,19 @@
|
||||
FROM node:20.15.1-alpine
|
||||
|
||||
# Create app directory
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# Install app dependencies
|
||||
# A wildcard is used to ensure both package.json AND package-lock.json are copied
|
||||
# where available (npm@5+)
|
||||
COPY package*.json ./
|
||||
|
||||
RUN yarn install
|
||||
# If you are building your code for production
|
||||
# RUN npm ci --only=production
|
||||
|
||||
# Bundle app source
|
||||
COPY . .
|
||||
|
||||
EXPOSE 3000
|
||||
CMD [ "yarn", "dev" ]
|
||||
21
frontend/LICENSE-justboil
Normal file
21
frontend/LICENSE-justboil
Normal file
@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2019-current JustBoil.me (https://justboil.me)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user