Initial version

This commit is contained in:
Flatlogic Bot 2025-03-12 07:20:56 +00:00
commit 35266d9442
733 changed files with 130394 additions and 0 deletions

3
.dockerignore Normal file
View File

@ -0,0 +1,3 @@
backend/node_modules
frontend/node_modules
frontend/build

3
.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
node_modules/
*/node_modules/
*/build/

17
Dockerfile Normal file
View File

@ -0,0 +1,17 @@
FROM node:20.15.1-alpine AS builder
RUN apk add --no-cache git
WORKDIR /app
COPY frontend/package.json frontend/yarn.lock ./
RUN yarn install --pure-lockfile
COPY frontend .
RUN yarn build
FROM node:20.15.1-alpine
WORKDIR /app
COPY backend/package.json backend/yarn.lock ./
RUN yarn install --pure-lockfile
COPY backend .
COPY --from=builder /app/build /app/public
CMD ["yarn", "start"]

64
Dockerfile.dev Normal file
View File

@ -0,0 +1,64 @@
# Base image for Node.js dependencies
FROM node:20.15.1-alpine AS frontend-deps
RUN apk add --no-cache git
WORKDIR /app/frontend
COPY frontend/package.json frontend/yarn.lock ./
RUN yarn install --pure-lockfile
FROM node:20.15.1-alpine AS backend-deps
RUN apk add --no-cache git
WORKDIR /app/backend
COPY backend/package.json backend/yarn.lock ./
RUN yarn install --pure-lockfile
FROM node:20.15.1-alpine AS app-shell-deps
RUN apk add --no-cache git
WORKDIR /app/app-shell
COPY app-shell/package.json app-shell/yarn.lock ./
RUN yarn install --pure-lockfile
# Nginx setup and application build
FROM node:20.15.1-alpine AS build
RUN apk add --no-cache git nginx
RUN yarn global add concurrently
# Make sure to add yarn global bin to PATH
ENV PATH /root/.yarn/bin:/root/.config/yarn/global/node_modules/.bin:$PATH
# Copy dependencies
WORKDIR /app
COPY --from=frontend-deps /app/frontend /app/frontend
COPY --from=backend-deps /app/backend /app/backend
COPY --from=app-shell-deps /app/app-shell /app/app-shell
COPY frontend /app/frontend
COPY backend /app/backend
COPY app-shell /app/app-shell
COPY docker /app/docker
# Copy Nginx configuration
COPY nginx.conf /etc/nginx/nginx.conf
# Copy all files from root to /app
COPY . /app
# Expose the port the app runs on
EXPOSE 8080
ENV NODE_ENV=dev_stage
ENV FRONT_PORT=3001
ENV APP_SHELL_PORT=4000
# Start app_shell
CMD ["sh", "-c", "\
concurrently --kill-others-on-fail \
\"yarn --cwd /app/frontend dev\" \
\"yarn --cwd /app/backend start\" \
\"sleep 10 && nginx -g 'daemon off;'\" & \
CONC_PID=$! && \
echo 'Waiting for frontend (port ${FRONT_PORT}) to be available...' && \
while ! nc -z localhost ${FRONT_PORT}; do \
sleep 2; \
done && \
echo 'Frontend is up. Starting app_shell for Git check...' && \
yarn --cwd /app/app-shell start && \
wait $CONC_PID"]

1
LICENSE Normal file
View File

@ -0,0 +1 @@
https://flatlogic.com/

198
README.md Normal file
View File

@ -0,0 +1,198 @@
# susatechnology
## This project was generated by [Flatlogic Platform](https://flatlogic.com).
- Frontend: [React.js](https://flatlogic.com/templates?framework%5B%5D=react&sort=default)
- Backend: [NodeJS](https://flatlogic.com/templates?backend%5B%5D=nodejs&sort=default)
<details><summary>Backend Folder Structure</summary>
The generated application has the following backend folder structure:
`src` folder which contains your working files that will be used later to create the build. The src folder contains folders as:
- `auth` - config the library for authentication and authorization;
- `db` - contains such folders as:
- `api` - documentation that is automatically generated by jsdoc or other tools;
- `migrations` - is a skeleton of the database or all the actions that users do with the database;
- `models`- what will represent the database for the backend;
- `seeders` - the entity that creates the data for the database.
- `routes` - this folder would contain all the routes that you have created using Express Router and what they do would be exported from a Controller file;
- `services` - contains such folders as `emails` and `notifications`.
</details>
- Database: PostgreSQL
-----------------------
### We offer 2 ways how to start the project locally: by running Frontend and Backend or with Docker.
-----------------------
## To start the project:
### Backend:
> Please change current folder: `cd backend`
#### Install local dependencies:
`yarn install`
------------
#### Adjust local db:
##### 1. Install postgres:
MacOS:
`brew install postgres`
> if you dont have brew please install it (https://brew.sh) and repeat step `brew install postgres`.
Ubuntu:
`sudo apt update`
`sudo apt install postgresql postgresql-contrib`
##### 2. Create db and admin user:
Before run and test connection, make sure you have created a database as described in the above configuration. You can use the `psql` command to create a user and database.
`psql postgres --u postgres`
Next, type this command for creating a new user with password then give access for creating the database.
`postgres-# CREATE ROLE admin WITH LOGIN PASSWORD 'admin_pass';`
`postgres-# ALTER ROLE admin CREATEDB;`
Quit `psql` then log in again using the new user that previously created.
`postgres-# \q`
`psql postgres -U admin`
Type this command to creating a new database.
`postgres=> CREATE DATABASE db_{your_project_name};`
Then give that new user privileges to the new database then quit the `psql`.
`postgres=> GRANT ALL PRIVILEGES ON DATABASE db_{your_project_name} TO admin;`
`postgres=> \q`
------------
#### Create database:
`yarn db:create`
#### Start production build:
`yarn start`
### Frontend:
> Please change current folder: `cd frontend`
## To start the project with Docker:
### Description:
The project contains the **docker folder** and the `Dockerfile`.
The `Dockerfile` is used to Deploy the project to Google Cloud.
The **docker folder** contains a couple of helper scripts:
- `docker-compose.yml` (all our services: web, backend, db are described here)
- `start-backend.sh` (starts backend, but only after the database)
- `wait-for-it.sh` (imported from https://github.com/vishnubob/wait-for-it)
> To avoid breaking the application, we recommend you don't edit the following files: everything that includes the **docker folder** and `Dokerfile`.
## Run services:
1. Install docker compose (https://docs.docker.com/compose/install/)
2. Move to `docker` folder. All next steps should be done from this folder.
``` cd docker ```
3. Make executables from `wait-for-it.sh` and `start-backend.sh`:
``` chmod +x start-backend.sh && chmod +x wait-for-it.sh ```
4. Download dependend projects for services.
5. Review the docker-compose.yml file. Make sure that all services have Dockerfiles. Only db service doesn't require a Dockerfile.
6. Make sure you have needed ports (see them in `ports`) available on your local machine.
7. Start services:
7.1. With an empty database `rm -rf data && docker-compose up`
7.2. With a stored (from previus runs) database data `docker-compose up`
8. Check http://localhost:3000
9. Stop services:
9.1. Just press `Ctr+C`
## Most common errors:
1. `connection refused`
There could be many reasons, but the most common are:
- The port is not open on the destination machine.
- The port is open on the destination machine, but its backlog of pending connections is full.
- A firewall between the client and server is blocking access (also check local firewalls).
After checking for firewalls and that the port is open, use telnet to connect to the IP/port to test connectivity. This removes any potential issues from your application.
***MacOS:***
If you suspect that your SSH service might be down, you can run this command to find out:
`sudo service ssh status`
If the command line returns a status of down, then youve likely found the reason behind your connectivity error.
***Ubuntu:***
Sometimes a connection refused error can also indicate that there is an IP address conflict on your network. You can search for possible IP conflicts by running:
`arp-scan -I eth0 -l | grep <ipaddress>`
`arp-scan -I eth0 -l | grep <ipaddress>`
and
`arping <ipaddress>`
2. `yarn db:create` creates database with the assembled tables (on MacOS with Postgres database)
The workaround - put the next commands to your Postgres database terminal:
`DROP SCHEMA public CASCADE;`
`CREATE SCHEMA public;`
`GRANT ALL ON SCHEMA public TO postgres;`
`GRANT ALL ON SCHEMA public TO public;`
Afterwards, continue to start your project in the backend directory by running:
`yarn start`

26
app-shell/.eslintrc.cjs Normal file
View File

@ -0,0 +1,26 @@
const globals = require('globals');
module.exports = [
{
files: ['**/*.js', '**/*.ts', '**/*.tsx'],
languageOptions: {
ecmaVersion: 2021,
sourceType: 'module',
globals: {
...globals.browser,
...globals.node,
},
parser: '@typescript-eslint/parser',
},
plugins: ['@typescript-eslint'],
rules: {
'no-unused-vars': 'warn',
'no-console': 'off',
'indent': ['error', 2],
'quotes': ['error', 'single'],
'semi': ['error', 'always'],
'@typescript-eslint/no-unused-vars': 'warn',
},
},
];

11
app-shell/.prettierrc Normal file
View File

@ -0,0 +1,11 @@
{
"singleQuote": true,
"tabWidth": 2,
"printWidth": 80,
"trailingComma": "all",
"quoteProps": "as-needed",
"jsxSingleQuote": true,
"bracketSpacing": true,
"bracketSameLine": false,
"arrowParens": "always"
}

7
app-shell/.sequelizerc Normal file
View File

@ -0,0 +1,7 @@
const path = require('path');
module.exports = {
"config": path.resolve("src", "db", "db.config.js"),
"models-path": path.resolve("src", "db", "models"),
"seeders-path": path.resolve("src", "db", "seeders"),
"migrations-path": path.resolve("src", "db", "migrations")
};

23
app-shell/Dockerfile Normal file
View File

@ -0,0 +1,23 @@
FROM node:20.15.1-alpine
RUN apk update && apk add bash
# Create app directory
WORKDIR /usr/src/app
# Install app dependencies
# A wildcard is used to ensure both package.json AND package-lock.json are copied
# where available (npm@5+)
COPY package*.json ./
RUN yarn install
# If you are building your code for production
# RUN npm ci --only=production
# Bundle app source
COPY . .
EXPOSE 4000
CMD [ "yarn", "start" ]

13
app-shell/README.md Normal file
View File

@ -0,0 +1,13 @@
#test - template backend,
#### Run App on local machine:
##### Install local dependencies:
- `yarn install`
---
##### Start build:
- `yarn start`

44
app-shell/package.json Normal file
View File

@ -0,0 +1,44 @@
{
"name": "test",
"description": "test - template backend",
"scripts": {
"start": "nodemon ./src/index.js"
},
"dependencies": {
"@babel/parser": "^7.26.7",
"adm-zip": "^0.5.16",
"axios": "^1.6.7",
"bcrypt": "5.1.1",
"cors": "2.8.5",
"csv-parser": "^3.0.0",
"eslint": "^9.13.0",
"express": "4.18.2",
"formidable": "1.2.2",
"helmet": "4.1.1",
"json2csv": "^5.0.7",
"jsonwebtoken": "8.5.1",
"lodash": "4.17.21",
"moment": "2.30.1",
"multer": "^1.4.4",
"passport": "^0.7.0",
"passport-google-oauth2": "^0.2.0",
"passport-jwt": "^4.0.1",
"passport-microsoft": "^0.1.0",
"postcss": "^8.5.1",
"sequelize-json-schema": "^2.1.1",
"pg": "^8.13.3"
},
"engines": {
"node": ">=18"
},
"private": true,
"devDependencies": {
"@typescript-eslint/eslint-plugin": "^8.12.2",
"@typescript-eslint/parser": "^8.12.2",
"cross-env": "7.0.3",
"mocha": "8.1.3",
"node-mocks-http": "1.9.0",
"nodemon": "^3.1.7",
"sequelize-cli": "6.6.2"
}
}

70
app-shell/src/config.js Normal file
View File

@ -0,0 +1,70 @@
const os = require('os');
const config = {
gcloud: {
bucket: "fldemo-files",
hash: "1b2a4cd99c98c6614dd81ba731df5667"
},
bcrypt: {
saltRounds: 12
},
admin_pass: "password",
admin_email: "admin@flatlogic.com",
providers: {
LOCAL: 'local',
GOOGLE: 'google',
MICROSOFT: 'microsoft'
},
secret_key: 'HUEyqESqgQ1yTwzVlO6wprC9Kf1J1xuA',
remote: '',
port: process.env.NODE_ENV === "production" ? "" : "8080",
hostUI: process.env.NODE_ENV === "production" ? "" : "http://localhost",
portUI: process.env.NODE_ENV === "production" ? "" : "3000",
portUIProd: process.env.NODE_ENV === "production" ? "" : ":3000",
swaggerUI: process.env.NODE_ENV === "production" ? "" : "http://localhost",
swaggerPort: process.env.NODE_ENV === "production" ? "" : ":8080",
google: {
clientId: '671001533244-kf1k1gmp6mnl0r030qmvdu6v36ghmim6.apps.googleusercontent.com',
clientSecret: 'Yo4qbKZniqvojzUQ60iKlxqR'
},
microsoft: {
clientId: '4696f457-31af-40de-897c-e00d7d4cff73',
clientSecret: 'm8jzZ.5UpHF3=-dXzyxiZ4e[F8OF54@p'
},
uploadDir: os.tmpdir(),
email: {
from: 'susatechnology <app@flatlogic.app>',
host: 'email-smtp.us-east-1.amazonaws.com',
port: 587,
auth: {
user: 'AKIAVEW7G4PQUBGM52OF',
pass: process.env.EMAIL_PASS,
},
tls: {
rejectUnauthorized: false
}
},
roles: {
super_admin: 'Super Administrator',
admin: 'Administrator',
user: 'User',
},
project_uuid: '211609a8-ae51-4b11-bb65-8f947b4a65d1',
flHost: process.env.NODE_ENV === 'production' ? 'https://flatlogic.com/projects' : 'http://localhost:3000/projects',
};
config.pexelsKey = 'Vc99rnmOhHhJAbgGQoKLZtsaIVfkeownoQNbTj78VemUjKh08ZYRbf18';
config.pexelsQuery = 'abstract technology network background';
config.host = process.env.NODE_ENV === "production" ? config.remote : "http://localhost";
config.apiUrl = `${config.host}${config.port ? `:${config.port}` : ``}/api`;
config.swaggerUrl = `${config.swaggerUI}${config.swaggerPort}`;
config.uiUrl = `${config.hostUI}${config.portUI ? `:${config.portUI}` : ``}/#`;
config.backUrl = `${config.hostUI}${config.portUI ? `:${config.portUI}` : ``}`;
module.exports = config;

23
app-shell/src/helpers.js Normal file
View File

@ -0,0 +1,23 @@
const jwt = require('jsonwebtoken');
const config = require('./config');
module.exports = class Helpers {
static wrapAsync(fn) {
return function (req, res, next) {
fn(req, res, next).catch(next);
};
}
static commonErrorHandler(error, req, res, next) {
if ([400, 403, 404].includes(error.code)) {
return res.status(error.code).send(error.message);
}
console.error(error);
return res.status(500).send(error.message);
}
static jwtSign(data) {
return jwt.sign(data, config.secret_key, { expiresIn: '6h' });
}
};

54
app-shell/src/index.js Normal file
View File

@ -0,0 +1,54 @@
const express = require('express');
const cors = require('cors');
const app = express();
const bodyParser = require('body-parser');
const checkPermissions = require('./middlewares/check-permissions');
const modifyPath = require('./middlewares/modify-path');
const VCS = require('./services/vcs');
const executorRoutes = require('./routes/executor');
const vcsRoutes = require('./routes/vcs');
// Function to initialize the Git repository
function initRepo() {
const projectId = '29814';
return VCS.initRepo(projectId);
}
// Start the Express app on APP_SHELL_PORT (4000)
function startServer() {
const PORT = 4000;
app.listen(PORT, () => {
console.log(`Listening on port ${PORT}`);
});
}
// Run Git check after the server is up
function runGitCheck() {
initRepo()
.then(result => {
console.log(result.message);
// Here you can add additional logic if needed
})
.catch(err => {
console.error('Error during repo initialization:', err);
// Optionally exit the process if Git check is critical:
process.exit(1);
});
}
app.use(cors({ origin: true }));
app.use(bodyParser.json());
app.use(checkPermissions);
app.use(modifyPath);
app.use('/executor', executorRoutes);
app.use('/vcs', vcsRoutes);
// Start the app_shell server
startServer();
// Now perform Git check
runGitCheck();
module.exports = app;

View File

@ -0,0 +1,17 @@
const config = require('../config');
function checkPermissions(req, res, next) {
const project_uuid = config.project_uuid;
const requiredHeader = 'X-Project-UUID';
const headerValue = req.headers[requiredHeader.toLowerCase()];
// Logging whatever request we're getting
console.log('Request:', req.url, req.method, req.body, req.headers);
if (headerValue && headerValue === project_uuid) {
next();
} else {
res.status(403).send({ error: 'Stop right there, criminal scum! Your project UUID is invalid or missing.' });
}
}
module.exports = checkPermissions;

View File

@ -0,0 +1,8 @@
function modifyPath(req, res, next) {
if (req.body && req.body.path) {
req.body.path = '../../../' + req.body.path;
}
next();
}
module.exports = modifyPath;

View File

@ -0,0 +1,274 @@
const express = require('express');
const multer = require('multer');
const upload = multer({ dest: 'uploads/' });
const fs = require('fs');
const ExecutorService = require('../services/executor');
const wrapAsync = require('../helpers').wrapAsync;
const router = express.Router();
router.post(
'/read_project_tree',
wrapAsync(async (req, res) => {
const { path } = req.body;
const tree = await ExecutorService.readProjectTree(path);
res.status(200).send(tree);
}),
);
router.post(
'/read_file',
wrapAsync(async (req, res) => {
const { path, showLines } = req.body;
const content = await ExecutorService.readFileContents(path, showLines);
res.status(200).send(content);
}),
);
router.post(
'/count_file_lines',
wrapAsync(async (req, res) => {
const { path } = req.body;
const content = await ExecutorService.countFileLines(path);
res.status(200).send(content);
}),
);
// router.post(
// '/read_file_header',
// wrapAsync(async (req, res) => {
// const { path, N } = req.body;
// try {
// const header = await ExecutorService.readFileHeader(path, N);
// res.status(200).send(header);
// } catch (error) {
// res.status(500).send({
// error: true,
// message: error.message,
// details: error.details || error.stack,
// validation: error.validation
// });
// }
// }),
// );
router.post(
'/read_file_line_context',
wrapAsync(async (req, res) => {
const { path, lineNumber, windowSize, showLines } = req.body;
try {
const context = await ExecutorService.readFileLineContext(path, lineNumber, windowSize, showLines);
res.status(200).send(context);
} catch (error) {
res.status(500).send({
error: true,
message: error.message,
details: error.details || error.stack,
validation: error.validation
});
}
}),
);
router.post(
'/write_file',
wrapAsync(async (req, res) => {
const { path, fileContents, comment } = req.body;
try {
await ExecutorService.writeFile(path, fileContents, comment);
res.status(200).send({ message: 'File written successfully' });
} catch (error) {
res.status(500).send({
error: true,
message: error.message,
details: error.details || error.stack,
validation: error.validation
});
}
}),
);
router.post(
'/insert_file_content',
wrapAsync(async (req, res) => {
const { path, lineNumber, newContent, message } = req.body;
try {
await ExecutorService.insertFileContent(path, lineNumber, newContent, message);
res.status(200).send({ message: 'File written successfully' });
} catch (error) {
res.status(500).send({
error: true,
message: error.message,
details: error.details || error.stack,
validation: error.validation
});
}
}),
);
router.post(
'/replace_file_line',
wrapAsync(async (req, res) => {
const { path, lineNumber, newText } = req.body;
try {
const result = await ExecutorService.replaceFileLine(path, lineNumber, newText);
res.status(200).send(result);
} catch (error) {
res.status(500).send({
error: true,
message: error.message,
details: error.details || error.stack,
validation: error.validation
});
}
}),
);
router.post(
'/replace_file_chunk',
wrapAsync(async (req, res) => {
const { path, startLine, endLine, newCode } = req.body;
try {
const result = await ExecutorService.replaceFileChunk(path, startLine, endLine, newCode);
res.status(200).send(result);
} catch (error) {
res.status(500).send({
error: true,
message: error.message,
details: error.details || error.stack,
validation: error.validation
});
}
}),
);
router.post(
'/delete_file_lines',
wrapAsync(async (req, res) => {
const { path, startLine, endLine, message } = req.body;
try {
const result = await ExecutorService.deleteFileLines(path, startLine, endLine, message);
res.status(200).send(result);
} catch (error) {
res.status(500).send({
error: true,
message: error.message,
details: error.details || error.stack,
validation: error.validation
});
}
}),
);
router.post(
'/validate_file',
wrapAsync(async (req, res) => {
const { path } = req.body;
try {
const validationResult = await ExecutorService.validateFile(path);
res.status(200).send({ validationResult });
} catch (error) {
res.status(500).send({
error: true,
message: error.message,
details: error.details || error.stack,
validation: error.validation
});
}
}),
);
router.post(
'/replace_code_block',
wrapAsync(async (req, res) => {
const {path, oldCode, newCode, message} = req.body;
try {
const response = await ExecutorService.replaceCodeBlock(path, oldCode, newCode, message);
res.status(200).send(response);
} catch (error) {
res.status(500).send({
error: true,
message: error.message,
details: error.details || error.stack,
validation: error.validation
})
}
})
)
router.post('/update_project_files_from_scheme',
upload.single('file'), // 'file' - name of the field in the form
async (req, res) => {
console.log('Request received');
console.log('Headers:', req.headers);
if (!req.file) {
return res.status(400).json({ error: 'No file uploaded' });
}
console.log('File info:', {
originalname: req.file.originalname,
path: req.file.path,
size: req.file.size,
mimetype: req.file.mimetype
});
try {
console.log('Starting update process...');
const result = await ExecutorService.updateProjectFilesFromScheme(req.file.path);
console.log('Update completed, result:', result);
console.log('Removing temp file...');
fs.unlinkSync(req.file.path);
console.log('Temp file removed');
console.log('Sending response...');
return res.json(result);
} catch (error) {
console.error('Error in route handler:', error);
if (req.file) {
try {
fs.unlinkSync(req.file.path);
console.log('Temp file removed after error');
} catch (unlinkError) {
console.error('Error removing temp file:', unlinkError);
}
}
console.error('Update project files error:', error);
return res.status(500).json({
error: error.message,
stack: process.env.NODE_ENV === 'development' ? error.stack : undefined
});
}
}
);
router.post(
'/get_db_schema',
wrapAsync(async (req, res) => {
try {
const jsonSchema = await ExecutorService.getDBSchema();
res.status(200).send({ jsonSchema });
} catch (error) {
res.status(500).send({ error: error });
}
}),
);
router.post(
'/execute_sql',
wrapAsync(async (req, res) => {
try {
const { query } = req.body;
const result = await ExecutorService.executeSQL(query);
res.status(200).send(result);
} catch (error) {
res.status(500).send({ error: error });
}
}),
);
router.use('/', require('../helpers').commonErrorHandler);
module.exports = router;

View File

@ -0,0 +1,40 @@
const express = require('express');
const wrapAsync = require('../helpers').wrapAsync; // Ваша обёртка для обработки асинхронных маршрутов
const VSC = require('../services/vcs');
const router = express.Router();
router.post('/init', wrapAsync(async (req, res) => {
const result = await VSC.initRepo();
res.status(200).send(result);
}));
router.post('/commit', wrapAsync(async (req, res) => {
const { message, files } = req.body;
const result = await VSC.commitChanges(message, files);
res.status(200).send(result);
}));
router.post('/log', wrapAsync(async (req, res) => {
const result = await VSC.getLog();
res.status(200).send(result);
}));
router.post('/rollback', wrapAsync(async (req, res) => {
const { ref } = req.body;
// const result = await VSC.checkout(ref);
const result = await VSC.revert(ref);
res.status(200).send(result);
}));
router.post('/sync-to-stable', wrapAsync(async (req, res) => {
const result = await VSC.mergeDevIntoMaster();
res.status(200).send(result);
}));
router.post('/reset-dev', wrapAsync(async (req, res) => {
const result = await VSC.resetDevBranch();
res.status(200).send(result);
}));
router.use('/', require('../helpers').commonErrorHandler);
module.exports = router;

View File

@ -0,0 +1,88 @@
// Database.js
const { Client } = require('pg');
const config = require('../../../backend/src/db/db.config');
const env = process.env.NODE_ENV || 'development';
const dbConfig = config[env];
class Database {
constructor() {
this.client = new Client({
user: dbConfig.username,
password: dbConfig.password,
database: dbConfig.database,
host: dbConfig.host,
port: dbConfig.port
});
// Connect once, reuse the client
this.client.connect().catch(err => {
console.error('Error connecting to the database:', err);
throw err;
});
}
async executeSQL(query) {
try {
const result = await this.client.query(query);
return {
success: true,
rows: result.rows
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
// Method to fetch simple table/column info from 'information_schema'
// (You can expand this to handle constraints, indexes, etc.)
async getDBSchema(schemaName = 'public') {
try {
const tableQuery = `
SELECT table_name
FROM information_schema.tables
WHERE table_schema = $1
AND table_type = 'BASE TABLE'
ORDER BY table_name
`;
const columnQuery = `
SELECT table_name, column_name, data_type, is_nullable
FROM information_schema.columns
WHERE table_schema = $1
ORDER BY table_name, ordinal_position
`;
const [tablesResult, columnsResult] = await Promise.all([
this.client.query(tableQuery, [schemaName]),
this.client.query(columnQuery, [schemaName]),
]);
// Build a simple schema object:
const tables = tablesResult.rows.map(row => row.table_name);
const columnsByTable = {};
columnsResult.rows.forEach(row => {
const { table_name, column_name, data_type, is_nullable } = row;
if (!columnsByTable[table_name]) columnsByTable[table_name] = [];
columnsByTable[table_name].push({ column_name, data_type, is_nullable });
});
// Combine tables with their columns
return tables.map(table => ({
table,
columns: columnsByTable[table] || [],
}));
} catch (error) {
console.error('Error fetching schema:', error);
throw error;
}
}
async close() {
await this.client.end();
}
}
module.exports = new Database();

View File

@ -0,0 +1,52 @@
<!DOCTYPE html>
<html>
<head>
<style>
.email-container {
max-width: 600px;
margin: auto;
background-color: #ffffff;
border: 1px solid #e2e8f0;
border-radius: 4px;
overflow: hidden;
}
.email-header {
background-color: #3498db;
color: #fff;
padding: 16px;
text-align: center;
}
.email-body {
padding: 16px;
}
.email-footer {
padding: 16px;
background-color: #f7fafc;
text-align: center;
color: #4a5568;
font-size: 14px;
}
.link-primary {
color: #3498db;
text-decoration: none;
}
</style>
</head>
<body>
<div class="email-container">
<div class="email-header">Verify your email for {appTitle}!</div>
<div class="email-body">
<p>Hello,</p>
<p>Follow this link to verify your email address.</p>
<p>
If you didn't ask to verify this address, you can ignore this email.
</p>
<p><a href="{signupUrl}" class="link-primary">{signupUrl}</a></p>
</div>
<div class="email-footer">
Thanks,<br />
The {appTitle} Team
</div>
</div>
</body>
</html>

View File

@ -0,0 +1,56 @@
<!DOCTYPE html>
<html>
<head>
<style>
.email-container {
max-width: 600px;
margin: auto;
background-color: #ffffff;
border: 1px solid #e2e8f0;
border-radius: 4px;
overflow: hidden;
}
.email-header {
background-color: #3498db;
color: #fff;
padding: 16px;
text-align: center;
}
.email-body {
padding: 16px;
}
.email-footer {
padding: 16px;
background-color: #f7fafc;
text-align: center;
color: #4a5568;
font-size: 14px;
}
.btn-primary {
background-color: #3498db;
color: #fff !important;
padding: 8px 16px;
border-radius: 4px;
text-decoration: none;
display: inline-block;
}
</style>
</head>
<body>
<div class="email-container">
<div class="email-header">Welcome to {appTitle}!</div>
<div class="email-body">
<p>Hello,</p>
<p>
You've been invited to join {appTitle}. Please click the button below
to set up your account.
</p>
<a href="{signupUrl}" class="btn-primary">Set up account</a>
</div>
<div class="email-footer">
Thanks,<br />
The {appTitle} Team
</div>
</div>
</body>
</html>

View File

@ -0,0 +1,55 @@
<!DOCTYPE html>
<html>
<head>
<style>
.email-container {
max-width: 600px;
margin: auto;
background-color: #ffffff;
border: 1px solid #e2e8f0;
border-radius: 4px;
overflow: hidden;
}
.email-header {
background-color: #3498db;
color: #fff;
padding: 16px;
text-align: center;
}
.email-body {
padding: 16px;
}
.email-footer {
padding: 16px;
background-color: #f7fafc;
text-align: center;
color: #4a5568;
font-size: 14px;
}
.link-primary {
color: #3498db;
text-decoration: none;
}
</style>
</head>
<body>
<div class="email-container">
<div class="email-header">Reset your password for {appTitle}</div>
<div class="email-body">
<p>Hello,</p>
<p>
Follow this link to reset your {appTitle} password for your
{accountName} account.
</p>
<p><a href="{resetUrl}" class="link-primary">{resetUrl}</a></p>
<p>
If you didn't ask to reset your password, you can ignore this email.
</p>
</div>
<div class="email-footer">
Thanks,<br />
The {appTitle} Team
</div>
</div>
</body>
</html>

View File

@ -0,0 +1,41 @@
const config = require('../../config');
const assert = require('assert');
const nodemailer = require('nodemailer');
module.exports = class EmailSender {
constructor(email) {
this.email = email;
}
async send() {
assert(this.email, 'email is required');
assert(this.email.to, 'email.to is required');
assert(this.email.subject, 'email.subject is required');
assert(this.email.html, 'email.html is required');
const htmlContent = await this.email.html();
const transporter = nodemailer.createTransport(this.transportConfig);
const mailOptions = {
from: this.from,
to: this.email.to,
subject: this.email.subject,
html: htmlContent,
};
return transporter.sendMail(mailOptions);
}
static get isConfigured() {
return !!config.email?.auth?.pass && !!config.email?.auth?.user;
}
get transportConfig() {
return config.email;
}
get from() {
return config.email.from;
}
};

View File

@ -0,0 +1,41 @@
const { getNotification } = require('../../notifications/helpers');
const fs = require('fs').promises;
const path = require('path');
module.exports = class EmailAddressVerificationEmail {
constructor(to, link) {
this.to = to;
this.link = link;
}
get subject() {
return getNotification(
'emails.emailAddressVerification.subject',
getNotification('app.title'),
);
}
async html() {
try {
const templatePath = path.join(
__dirname,
'../../email/htmlTemplates/addressVerification/emailAddressVerification.html',
);
const template = await fs.readFile(templatePath, 'utf8');
const appTitle = getNotification('app.title');
const signupUrl = this.link;
let html = template
.replace(/{appTitle}/g, appTitle)
.replace(/{signupUrl}/g, signupUrl)
.replace(/{to}/g, this.to);
return html;
} catch (error) {
console.error('Error generating invitation email HTML:', error);
throw error;
}
}
};

View File

@ -0,0 +1,41 @@
const fs = require('fs').promises;
const path = require('path');
const { getNotification } = require('../../notifications/helpers');
module.exports = class InvitationEmail {
constructor(to, host) {
this.to = to;
this.host = host;
}
get subject() {
return getNotification(
'emails.invitation.subject',
getNotification('app.title'),
);
}
async html() {
try {
const templatePath = path.join(
__dirname,
'../../email/htmlTemplates/invitation/invitationTemplate.html',
);
const template = await fs.readFile(templatePath, 'utf8');
const appTitle = getNotification('app.title');
const signupUrl = `${this.host}&invitation=true`;
let html = template
.replace(/{appTitle}/g, appTitle)
.replace(/{signupUrl}/g, signupUrl)
.replace(/{to}/g, this.to);
return html;
} catch (error) {
console.error('Error generating invitation email HTML:', error);
throw error;
}
}
};

View File

@ -0,0 +1,42 @@
const { getNotification } = require('../../notifications/helpers');
const path = require('path');
const { promises: fs } = require('fs');
module.exports = class PasswordResetEmail {
constructor(to, link) {
this.to = to;
this.link = link;
}
get subject() {
return getNotification(
'emails.passwordReset.subject',
getNotification('app.title'),
);
}
async html() {
try {
const templatePath = path.join(
__dirname,
'../../email/htmlTemplates/passwordReset/passwordResetEmail.html',
);
const template = await fs.readFile(templatePath, 'utf8');
const appTitle = getNotification('app.title');
const resetUrl = this.link;
const accountName = this.to;
let html = template
.replace(/{appTitle}/g, appTitle)
.replace(/{resetUrl}/g, resetUrl)
.replace(/{accountName}/g, accountName);
return html;
} catch (error) {
console.error('Error generating invitation email HTML:', error);
throw error;
}
}
};

View File

@ -0,0 +1,957 @@
const fs = require('fs').promises;
const os = require('os');
const path = require('path');
const AdmZip = require('adm-zip');
const { exec } = require('child_process');
const util = require('util');
// Babel Parser for JS/TS/TSX
const babelParser = require('@babel/parser');
const babelParse = babelParser.parse;
// Local App DB Connection
const database = require('./database');
// PostCSS for CSS
const postcss = require('postcss');
const execAsync = util.promisify(exec);
module.exports = class ExecutorService {
static async readProjectTree (directoryPath) {
const paths = {
frontend: '../../../frontend',
backend: '../../../backend',
default: '../../../'
};
try {
const publicDir = path.join(__dirname, paths[directoryPath] || directoryPath || paths.default);
return await getDirectoryTree(publicDir);
} catch (error) {
console.error('Error reading directory:', error);
throw error;
}
}
static async readFileContents(filePath, showLines) {
try {
const fullPath = path.join(__dirname, filePath);
const content = await fs.readFile(fullPath, 'utf8');
if (showLines) {
const lines = content.split('\n');
const lineObject = {};
lines.forEach((line, index) => {
lineObject[index + 1] = line;
});
return lineObject;
} else {
return content;
}
} catch (error) {
console.error('Error reading file:', error);
throw error;
}
}
static async countFileLines(filePath) {
try {
const fullPath = path.join(__dirname, filePath);
// Check file exists
await fs.access(fullPath);
// Read file content
const content = await fs.readFile(fullPath, 'utf8');
// Split by newline and count
const lines = content.split('\n');
return {
success: true,
lineCount: lines.length
};
} catch (error) {
console.error('Error counting file lines:', error);
return {
success: false,
message: error.message
};
}
}
// static async readFileHeader(filePath, N = 30) {
// try {
// const fullPath = path.join(__dirname, filePath);
// const content = await fs.readFile(fullPath, 'utf8');
// const lines = content.split('\n');
//
// if (lines.length < N) {
// return { error: `File has less than ${N} lines` };
// }
//
// const headerLines = lines.slice(0, Math.min(50, lines.length));
//
// const lineObject = {};
// headerLines.forEach((line, index) => {
// lineObject[index + 1] = line;
// });
//
// return lineObject;
// } catch (error) {
// console.error('Error reading file header:', error);
// throw error;
// }
// }
static async readFileLineContext(filePath, lineNumber, windowSize, showLines) {
try {
const fullPath = path.join(__dirname, filePath);
const content = await fs.readFile(fullPath, 'utf8');
const lines = content.split('\n');
const start = Math.max(0, lineNumber - windowSize);
const end = Math.min(lines.length, lineNumber + windowSize + 1);
const contextLines = lines.slice(start, end);
if (showLines) {
const lineObject = {};
contextLines.forEach((line, index) => {
lineObject[start + index + 1] = line;
});
return lineObject;
} else {
return contextLines.join('\n');
}
} catch (error) {
console.error('Error reading file line context:', error);
throw error;
}
}
static async validateFile(filePath) {
console.log('Validating file:', filePath);
// Read file content
let content;
try {
content = await fs.readFile(filePath, 'utf8');
} catch (err) {
throw new Error(`Could not read file: ${filePath}\n${err.message}`);
}
// Determine file extension
let ext = path.extname(filePath).toLowerCase();
if (ext === '.temp') {
ext = path.extname(filePath.slice(0, -5)).toLowerCase();
}
try {
switch (ext) {
case '.js':
case '.ts':
case '.tsx': {
// Parse JS/TS/TSX with Babel
babelParse(content, {
sourceType: 'module',
// plugins array covers JS, TS, TSX, and optional JS flavors
plugins: ['jsx', 'typescript']
});
break;
}
case '.css': {
// Parse CSS with PostCSS
postcss.parse(content);
break;
}
default: {
// If the extension isn't recognized, assume it's "valid"
// or you could throw an error to force a known extension
console.warn(`No validation implemented for extension "${ext}". Skipping syntax check.`);
}
}
// If parsing succeeded, return true
return true;
} catch (parseError) {
// Rethrow parse errors with a friendlier message
throw parseError;
}
}
static async writeFile(filePath, fileContents, comment) {
try {
console.log(comment)
const fullPath = path.join(__dirname, filePath);
// Write to a temp file first
const tempPath = `${fullPath}.temp`;
await fs.writeFile(tempPath, fileContents, 'utf8');
// Validate the temp file
await this.validateFile(tempPath);
// Rename temp file to original path
await fs.rename(tempPath, fullPath);
return true;
} catch (error) {
console.error('Error writing file:', error);
throw error;
}
}
static async insertFileContent(filePath, lineNumber, newContent, message) {
try {
const fullPath = path.join(__dirname, filePath);
// Check file exists
await fs.access(fullPath);
// Read and split by line
const content = await fs.readFile(fullPath, 'utf8');
const lines = content.split('\n');
// Ensure lineNumber is within [1 ... lines.length + 1]
// 1 means "insert at the very first line"
// lines.length + 1 means "append at the end"
if (lineNumber < 1) {
lineNumber = 1;
}
if (lineNumber > lines.length + 1) {
lineNumber = lines.length + 1;
}
// Convert to 0-based index
const insertIndex = lineNumber - 1;
// Prepare preview
const preview = {
insertionLine: lineNumber,
insertedLines: newContent.split('\n')
};
// Insert newContent lines at the specified index
lines.splice(insertIndex, 0, ...newContent.split('\n'));
// Write changes to a temp file first
const updatedContent = lines.join('\n');
const tempPath = `${fullPath}.temp`;
await fs.writeFile(tempPath, updatedContent, 'utf8');
await this.validateFile(tempPath);
// Rename temp file to original path
await fs.rename(tempPath, fullPath);
return {
success: true
};
} catch (error) {
console.error('Error inserting file content:', error);
throw error;
}
}
static async replaceFileLine(filePath, lineNumber, newText, message = null) {
const fullPath = path.join(__dirname, filePath);
try {
try {
await fs.access(fullPath);
} catch (error) {
throw new Error(`File not found: ${filePath}`);
}
const content = await fs.readFile(fullPath, 'utf8');
const lines = content.split('\n');
if (lineNumber < 1 || lineNumber > lines.length) {
throw new Error(`Invalid line number: ${lineNumber}. File has ${lines.length} lines`);
}
if (typeof newText !== 'string') {
throw new Error('New text must be a string');
}
const preview = {
oldLine: lines[lineNumber - 1],
newLine: newText,
lineNumber: lineNumber
};
lines[lineNumber - 1] = newText;
const newContent = lines.join('\n');
const tempPath = `${fullPath}.temp`;
await fs.writeFile(tempPath, newContent, 'utf8');
await this.validateFile(tempPath);
await fs.rename(tempPath, fullPath);
return {
success: true
};
} catch (error) {
console.error('Error updating file line:', error);
try {
await fs.unlink(`${fullPath}.temp`);
} catch {}
throw {
error: error,
message: error.message,
details: error.stack
};
}
}
static async replaceFileChunk(filePath, startLine, endLine, newCode) {
try {
// Check if this is a single-line change
const newCodeLines = newCode.split('\n');
if (newCodeLines.length === 1 && endLine === startLine) {
// Redirect to replace_file_line
return await this.replaceFileLine(filePath, startLine, newCode);
}
const fullPath = path.join(__dirname, filePath);
// Check if file exists
try {
await fs.access(fullPath);
} catch (error) {
throw new Error(`File not found: ${filePath}`);
}
const content = await fs.readFile(fullPath, 'utf8');
const lines = content.split('\n');
// Adjust line numbers to array indices (subtract 1)
const startIndex = startLine - 1;
const endIndex = endLine - 1;
// Validate input parameters
if (startIndex < 0 || endIndex >= lines.length || startIndex > endIndex) {
throw new Error(`Invalid line range: ${startLine}-${endLine}. File has ${lines.length} lines`);
}
// Check type of new code
if (typeof newCode !== 'string') {
throw new Error('New code must be a string');
}
// Create changes preview
const preview = {
oldLines: lines.slice(startIndex, endIndex + 1),
newLines: newCode.split('\n'),
startLine,
endLine
};
// Apply changes to temp file first
lines.splice(startIndex, endIndex - startIndex + 1, ...newCode.split('\n'));
const newContent = lines.join(os.EOL);
const tempPath = `${fullPath}.temp`;
await fs.writeFile(tempPath, newContent, 'utf8');
await this.validateFile(tempPath);
// Apply changes if all validations passed
await fs.rename(tempPath, fullPath);
return {
success: true
};
} catch (error) {
console.error('Error updating file slice:', error);
// Clean up temp file if exists
try {
await fs.unlink(`${fullPath}.temp`);
} catch {}
throw {
error: error,
message: error.message,
details: error.details || error.stack
};
}
}
static async replaceCodeBlock(filePath, oldCode, newCode, message) {
try {
console.log(message);
const fullPath = path.join(__dirname, filePath);
// Check file exists
await fs.access(fullPath);
// Read file content
let content = await fs.readFile(fullPath, 'utf8');
// A small helper to unify line breaks to just `\n`
const unifyLineBreaks = (str) => str.replace(/\r\n/g, '\n');
// Normalize line breaks in file content, oldCode, and newCode
content = unifyLineBreaks(content);
oldCode = unifyLineBreaks(oldCode);
newCode = unifyLineBreaks(newCode);
// Optional: Trim trailing spaces or handle other whitespace normalization if needed
// oldCode = oldCode.trim();
// newCode = newCode.trim();
// Check if oldCode actually exists in the content
const index = content.indexOf(oldCode);
if (index === -1) {
return {
success: false,
message: 'Old code not found in file.'
};
}
// Create a preview before replacing
const preview = {
oldCodeSnippet: oldCode,
newCodeSnippet: newCode
};
// Perform replacement (single occurrence). For multiple, use replaceAll or a loop.
// If you want a global replacement, consider:
// content = content.split(oldCode).join(newCode);
content = content.replace(oldCode, newCode);
// Write to a temp file first
const tempPath = `${fullPath}.temp`;
await fs.writeFile(tempPath, content, 'utf8');
await this.validateFile(tempPath);
// Rename temp file to original
await fs.rename(tempPath, fullPath);
return {
success: true
};
} catch (error) {
console.error('Error replacing code:', error);
return {
error: error,
message: error.message,
details: error.details || error.stack
};
}
}
//todo add validation
static async deleteFileLines(filePath, startLine, endLine, veryShortDescription) {
try {
const fullPath = path.join(__dirname, filePath);
// Check if file exists
await fs.access(fullPath);
// Read file content
const content = await fs.readFile(fullPath, 'utf8');
const lines = content.split('\n');
// Convert to zero-based indices
const startIndex = startLine - 1;
const endIndex = endLine - 1;
// Validate range
if (startIndex < 0 || endIndex >= lines.length || startIndex > endIndex) {
throw new Error(
`Invalid line range: ${startLine}-${endLine}. File has ${lines.length} lines`
);
}
// Prepare a preview of the lines being deleted
const preview = {
deletedLines: lines.slice(startIndex, endIndex + 1),
startLine,
endLine
};
// Remove lines
lines.splice(startIndex, endIndex - startIndex + 1);
// Join remaining lines and write to a temporary file
const newContent = lines.join('\n');
const tempPath = `${fullPath}.temp`;
await fs.writeFile(tempPath, newContent, 'utf8');
await this.validateFile(tempPath);
// Rename temp file to original
await fs.rename(tempPath, fullPath);
return {
success: true
};
} catch (error) {
console.error('Error deleting file lines:', error);
return {
error: error,
message: error.message,
details: error.details || error.stack
};
}
}
static async validateTypeScript(filePath, content = null) {
try {
// Basic validation of JSX syntax
const jsxErrors = [];
if (content !== null) {
// Check for matching braces
if ((content.match(/{/g) || []).length !== (content.match(/}/g) || []).length) {
jsxErrors.push("Unmatched curly braces");
}
// Check for invalid syntax in JSX attributes
if (content.includes('label={')) {
if (!content.match(/label={[^}]+}/)) {
jsxErrors.push("Invalid label attribute syntax");
}
}
if (jsxErrors.length > 0) {
return {
valid: false,
errors: jsxErrors.map(error => ({
code: 'JSX_SYNTAX_ERROR',
severity: 'error',
location: '',
message: error
}))
};
}
}
return {
valid: true,
errors: [],
errorCount: 0,
warningCount: 0
};
} catch (error) {
console.error('TypeScript validation error:', error);
return {
valid: false,
errors: [{
code: 'VALIDATION_FAILED',
severity: 'error',
location: '',
message: `TypeScript validation error: ${error.message}`
}],
errorCount: 1,
warningCount: 0
};
}
}
static async validateBackendFiles(backendPath) {
try {
// Check for syntax errors
await execAsync(`node --check ${backendPath}/src/index.js`);
// Try to run the code in a test environment
const testProcess = exec(
'NODE_ENV=test node -e "try { require(\'./src/index.js\') } catch(e) { console.error(e); process.exit(1) }"',
{ cwd: backendPath }
);
return new Promise((resolve) => {
let output = '';
let error = '';
testProcess.stdout.on('data', (data) => {
output += data;
});
testProcess.stderr.on('data', (data) => {
error += data;
});
testProcess.on('close', (code) => {
if (code === 0) {
resolve({ valid: true });
} else {
resolve({
valid: false,
error: error || output
});
}
});
// Timeout on validation
setTimeout(() => {
testProcess.kill();
resolve({
valid: true,
warning: 'Validation timeout, but no immediate errors found'
});
}, 5000);
});
} catch (error) {
return {
valid: false,
error: error.message
};
}
}
static async createBackup(ROOT_PATH) {
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
const backupDir = path.join(ROOT_PATH, 'backups', timestamp);
try {
await fs.mkdir(path.join(ROOT_PATH, 'backups'), { recursive: true });
const dirsToBackup = ['frontend', 'backend'];
for (const dir of dirsToBackup) {
const sourceDir = path.join(ROOT_PATH, dir);
const targetDir = path.join(backupDir, dir);
await fs.mkdir(targetDir, { recursive: true });
await execAsync(
`cd "${sourceDir}" && ` +
`find . -type f -not -path "*/node_modules/*" -not -path "*/\\.*" | ` +
`while read file; do ` +
`mkdir -p "${targetDir}/$(dirname "$file")" && ` +
`cp "$file" "${targetDir}/$file"; ` +
`done`
);
}
console.log('Backup created at:', backupDir);
return backupDir;
} catch (error) {
console.error('Error creating backup:', error);
throw error;
}
}
static async restoreFromBackup(backupDir, ROOT_PATH) {
try {
console.log('Restoring from backup:', backupDir);
await execAsync(`rm -rf ${ROOT_PATH}/backend/*`);
await execAsync(`cp -r ${backupDir}/* ${ROOT_PATH}/backend/`);
return true;
} catch (error) {
console.error('Error restoring from backup:', error);
throw error;
}
}
static async updateProjectFilesFromScheme(zipFilePath) {
const MAX_FILE_SIZE = 10 * 1024 * 1024;
const ROOT_PATH = path.join(__dirname, '../../../');
try {
console.log('Checking file access...');
await fs.access(zipFilePath);
console.log('Getting file stats...');
const stats = await fs.stat(zipFilePath);
console.log('File size:', stats.size);
if (stats.size > MAX_FILE_SIZE) {
console.log('File size exceeds limit');
return { success: false, error: 'File size exceeds limit' };
}
// Copying zip file to /tmp
const tempZipPath = path.join('/tmp', path.basename(zipFilePath));
await fs.copyFile(zipFilePath, tempZipPath);
// Launching background update process
const servicesUpdate = (async () => {
try {
console.log('Stopping services...');
await stopServices();
console.log('Creating zip instance...');
const zip = new AdmZip(tempZipPath);
console.log('Extracting files to:', ROOT_PATH);
zip.extractAllTo(ROOT_PATH, true);
console.log('Files extracted');
const removedFilesPath = path.join(ROOT_PATH, 'removed_files.json');
try {
await fs.access(removedFilesPath);
const removedFilesContent = await fs.readFile(removedFilesPath, 'utf8');
const filesToRemove = JSON.parse(removedFilesContent);
await removeFiles(filesToRemove, ROOT_PATH);
await fs.unlink(removedFilesPath);
} catch (error) {
console.log('No removed files to process or error accessing removed_files.json:', error);
}
// Remove temp zip file
await fs.unlink(tempZipPath);
// Start services after a delay
setTimeout(() => {
startServices()
.then(() => console.log('Services started successfully'))
.catch(e => console.error('Failed to start services:', e));
}, 1000);
} catch (error) {
console.error('Error in service update process:', error);
}
})();
servicesUpdate.catch(error => {
console.error('Background update process failed:', error);
});
console.log('Returning immediate response');
return {
success: true,
message: 'Update process initiated'
};
} catch (error) {
console.error('Critical error in updateProjectFilesFromScheme:', error);
return {
success: false,
error: error.message
};
}
}
static async getDBSchema() {
try {
return await database.getDBSchema();
} catch (error) {
console.error('Error reading schema:', error);
throw {
error: error,
message: error.message,
details: error.details || error.stack
};
}
}
static async executeSQL(query) {
try {
return await database.executeSQL(query);
} catch (error) {
console.error('Error executing query:', error);
throw {
error: error,
message: error.message,
details: error.details || error.stack
};
}
}
};
async function getDirectoryTree(dirPath) {
const entries = await fs.readdir(dirPath, { withFileTypes: true });
const result = {};
for (const entry of entries) {
const fullPath = path.join(dirPath, entry.name);
if (entry.isDirectory() && (
entry.name === 'node_modules' ||
entry.name === 'app-shell' ||
entry.name === '.git' ||
entry.name === '.idea'
)) {
continue;
}
const relativePath = fullPath.replace('/app', '');
if (entry.isDirectory()) {
const subTree = await getDirectoryTree(fullPath);
Object.keys(subTree).forEach(key => {
result[key.replace('/app', '')] = subTree[key];
});
} else {
const fileContent = await fs.readFile(fullPath, 'utf8');
const lineCount = fileContent.split('\n').length;
result[relativePath] = lineCount;
}
}
return result;
}
async function stopServices() {
try {
console.log('Finding service processes...');
// Frontend stopping
const { stdout: frontendProcess } = await execAsync("ps -o pid,cmd | grep '[n]ext-server' | awk '{print $1}'");
if (frontendProcess.trim()) {
console.log('Stopping frontend, pid:', frontendProcess.trim());
await execAsync(`kill -15 ${frontendProcess.trim()}`);
}
// Backend stopping
const { stdout: backendProcess } = await execAsync("ps -o pid,cmd | grep '[n]ode ./src/index.js' | grep -v app-shell | awk '{print $1}'");
if (backendProcess.trim()) {
console.log('Stopping backend, pid:', backendProcess.trim());
await execAsync(`kill -15 ${backendProcess.trim()}`);
}
await new Promise(resolve => setTimeout(resolve, 2000));
return { success: true };
} catch (error) {
console.error('Error stopping services:', error);
return { success: false, error: error.message };
}
}
async function startServices() {
try {
console.log('Starting services...');
await execAsync('yarn --cwd /app/frontend dev &');
await execAsync('yarn --cwd /app/backend start &');
return { success: true };
} catch (error) {
console.error('Error starting services:', error);
return { success: false, error: error.message };
}
}
async function checkStatus() {
try {
const { stdout } = await execAsync('ps aux');
return {
success: true,
frontendRunning: stdout.includes('next-server'),
backendRunning: stdout.includes('nodemon') && stdout.includes('/app/backend'),
nginxRunning: stdout.includes('nginx: master process')
};
} catch (error) {
return {
success: false,
error: error.message
};
}
}
async function validateJSXSyntax(code) {
// Define validation rules for JSX
const rules = [
{
// JSX attribute with expression
pattern: /^[a-zA-Z][a-zA-Z0-9]*={.*}$/,
message: 'Invalid JSX attribute syntax'
},
{
// Invalid sequences
pattern: /,{2,}/,
message: 'Invalid character sequence detected',
shouldNotMatch: true
},
{
// Ternary expressions
pattern: /^[a-zA-Z][a-zA-Z0-9]*={[\w\s]+\?[^}]+:[^}]+}$/,
message: 'Invalid ternary expression in JSX'
}
];
// Validate each line
const lines = code.split('\n');
for (const line of lines) {
const trimmedLine = line.trim();
// Skip empty lines
if (!trimmedLine) continue;
// Check each rule
for (const rule of rules) {
if (rule.shouldNotMatch) {
// For patterns that should not be present
if (rule.pattern.test(trimmedLine)) {
return {
valid: false,
errors: [{
code: 'JSX_SYNTAX_ERROR',
severity: 'error',
location: '',
message: rule.message
}]
};
}
} else {
// For patterns that should match
if (trimmedLine.includes('=') && !rule.pattern.test(trimmedLine)) {
return {
valid: false,
errors: [{
code: 'JSX_SYNTAX_ERROR',
severity: 'error',
location: '',
message: rule.message
}]
};
}
}
}
// Additional JSX-specific checks
if ((trimmedLine.match(/{/g) || []).length !== (trimmedLine.match(/}/g) || []).length) {
return {
valid: false,
errors: [{
code: 'JSX_SYNTAX_ERROR',
severity: 'error',
location: '',
message: 'Unmatched curly braces in JSX'
}]
};
}
}
// If all checks pass
return {
valid: true,
errors: []
};
}
async function removeFiles(files, rootPath) {
try {
for (const file of files) {
const fullPath = path.join(rootPath, file);
try {
await fs.unlink(fullPath);
console.log(`File removed: ${fullPath}`);
} catch (error) {
console.error(`Error when trying to delete a file ${fullPath}:`, error);
}
}
} catch (error) {
console.error('Error removing files:', error);
throw error;
}
}

View File

@ -0,0 +1,16 @@
const { getNotification, isNotification } = require('../helpers');
module.exports = class ForbiddenError extends Error {
constructor(messageCode) {
let message;
if (messageCode && isNotification(messageCode)) {
message = getNotification(messageCode);
}
message = message || getNotification('errors.forbidden.message');
super(message);
this.code = 403;
}
};

View File

@ -0,0 +1,16 @@
const { getNotification, isNotification } = require('../helpers');
module.exports = class ValidationError extends Error {
constructor(messageCode) {
let message;
if (messageCode && isNotification(messageCode)) {
message = getNotification(messageCode);
}
message = message || getNotification('errors.validation.message');
super(message);
this.code = 400;
}
};

View File

@ -0,0 +1,30 @@
const _get = require('lodash/get');
const errors = require('./list');
function format(message, args) {
if (!message) {
return null;
}
return message.replace(/{(\d+)}/g, function (match, number) {
return typeof args[number] != 'undefined' ? args[number] : match;
});
}
const isNotification = (key) => {
const message = _get(errors, key);
return !!message;
};
const getNotification = (key, ...args) => {
const message = _get(errors, key);
if (!message) {
return key;
}
return format(message, args);
};
exports.getNotification = getNotification;
exports.isNotification = isNotification;

View File

@ -0,0 +1,100 @@
const errors = {
app: {
title: 'test',
},
auth: {
userDisabled: 'Your account is disabled',
forbidden: 'Forbidden',
unauthorized: 'Unauthorized',
userNotFound: `Sorry, we don't recognize your credentials`,
wrongPassword: `Sorry, we don't recognize your credentials`,
weakPassword: 'This password is too weak',
emailAlreadyInUse: 'Email is already in use',
invalidEmail: 'Please provide a valid email',
passwordReset: {
invalidToken: 'Password reset link is invalid or has expired',
error: `Email not recognized`,
},
passwordUpdate: {
samePassword: `You can't use the same password. Please create new password`,
},
userNotVerified: `Sorry, your email has not been verified yet`,
emailAddressVerificationEmail: {
invalidToken: 'Email verification link is invalid or has expired',
error: `Email not recognized`,
},
},
iam: {
errors: {
userAlreadyExists: 'User with this email already exists',
userNotFound: 'User not found',
disablingHimself: `You can't disable yourself`,
revokingOwnPermission: `You can't revoke your own owner permission`,
deletingHimself: `You can't delete yourself`,
emailRequired: 'Email is required',
},
},
importer: {
errors: {
invalidFileEmpty: 'The file is empty',
invalidFileExcel: 'Only excel (.xlsx) files are allowed',
invalidFileUpload:
'Invalid file. Make sure you are using the last version of the template.',
importHashRequired: 'Import hash is required',
importHashExistent: 'Data has already been imported',
userEmailMissing: 'Some items in the CSV do not have an email',
},
},
errors: {
forbidden: {
message: 'Forbidden',
},
validation: {
message: 'An error occurred',
},
searchQueryRequired: {
message: 'Search query is required',
},
},
emails: {
invitation: {
subject: `You've been invited to {0}`,
body: `
<p>Hello,</p>
<p>You've been invited to {0} set password for your {1} account.</p>
<p><a href='{2}'>{2}</a></p>
<p>Thanks,</p>
<p>Your {0} team</p>
`,
},
emailAddressVerification: {
subject: `Verify your email for {0}`,
body: `
<p>Hello,</p>
<p>Follow this link to verify your email address.</p>
<p><a href='{0}'>{0}</a></p>
<p>If you didn't ask to verify this address, you can ignore this email.</p>
<p>Thanks,</p>
<p>Your {1} team</p>
`,
},
passwordReset: {
subject: `Reset your password for {0}`,
body: `
<p>Hello,</p>
<p>Follow this link to reset your {0} password for your {1} account.</p>
<p><a href='{2}'>{2}</a></p>
<p>If you didn't ask to reset your password, you can ignore this email.</p>
<p>Thanks,</p>
<p>Your {0} team</p>
`,
},
},
};
module.exports = errors;

View File

@ -0,0 +1,392 @@
const util = require('util');
const exec = util.promisify(require('child_process').exec);
const path = require('path');
const { promises: fs } = require("fs");
const axios = require('axios');
const ROOT_PATH = '/app';
const MAX_BUFFER = 1024 * 1024 * 50;
const GITEA_DOMAIN = 'gitea.flatlogic.app';
const USERNAME = 'admin';
const API_TOKEN = 'f22e83489657f49c320d081fc934e5c9daacfa08';
class VCS {
// Main method controller of the repository initialization process
static async initRepo(projectId = 'test') {
try {
// 1. Ensure the remote repository exists (create if needed)
const remoteUrl = await this.setupRemote(projectId);
// 2. Set up the local repository (initialization, fetching and reset)
await this.setupLocalRepo(remoteUrl);
console.log(`[DEBUG] Repository "${projectId}" is ready (remote code applied).`);
return { message: `Repository ${projectId} is ready (remote code applied).` };
} catch (error) {
throw new Error(`Error during repo initialization: ${error.message}`);
}
}
// Checks for the existence of the remote repo and creates it if it doesn't exist
static async setupRemote(projectId) {
console.log(`[DEBUG] Checking remote repository "${projectId}"...`);
let repoData = await this.checkRepoExists(projectId);
if (!repoData) {
console.log(`[DEBUG] Remote repository "${projectId}" does not exist. Creating...`);
repoData = await this.createRemoteRepo(projectId);
console.log(`[DEBUG] Remote repository created: ${JSON.stringify(repoData)}`);
} else {
console.log(`[DEBUG] Remote repository "${projectId}" already exists.`);
}
// Return the URL with token authentication
return `https://${USERNAME}:${API_TOKEN}@${GITEA_DOMAIN}/${USERNAME}/${projectId}.git`;
}
// Sets up the local repository: either fetches/reset if .git exists,
// initializes git in a non-empty directory, or clones the repository if empty.
static async setupLocalRepo(remoteUrl) {
const gitDir = path.join(ROOT_PATH, '.git');
const localRepoExists = await this.exists(gitDir);
if (localRepoExists) {
await this.fetchAndResetRepo();
} else {
const files = await fs.readdir(ROOT_PATH);
if (files.length > 0) {
await this.initializeGitRepo(remoteUrl);
} else {
console.log('[DEBUG] Local directory is empty. Cloning remote repository...');
await exec(`git clone ${remoteUrl} .`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
}
}
}
// Check if a file/directory exists
static async exists(pathToCheck) {
try {
await fs.access(pathToCheck);
return true;
} catch {
return false;
}
}
// If the local repository exists, fetches remote data and resets the repository state
static async fetchAndResetRepo() {
console.log('[DEBUG] Local repository exists. Fetching remote...');
await exec(`git fetch origin`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
const branchReset = await this.tryResetToBranch('fl-dev');
if (!branchReset) {
// If 'fl-dev' branch is not found, try 'master'
const masterReset = await this.tryResetToBranch('master');
if (masterReset) {
// Create 'fl-dev' branch and push it to remote
console.log('[DEBUG] Creating and switching to branch "fl-dev"...');
await exec(`git branch fl-dev`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
await exec(`git checkout fl-dev`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
console.log('[DEBUG] Pushing fl-dev branch to remote...');
await exec(`git push -u origin fl-dev --force`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
} else {
// If neither remote master nor fl-dev exist make an initial commit
console.log('[DEBUG] Neither "origin/fl-dev" nor "origin/master" exist. Creating initial commit...');
await this.commitInitialChanges();
}
}
}
// Tries to check out and reset to the specified branch
static async tryResetToBranch(branchName) {
try {
console.log(`[DEBUG] Checking for remote branch "origin/${branchName}"...`);
await exec(`git rev-parse --verify origin/${branchName}`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
console.log(`[DEBUG] Remote branch "origin/${branchName}" found. Resetting local repository to "origin/${branchName}"...`);
await exec(`git reset --hard origin/${branchName}`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
await exec(`git checkout ${branchName === 'fl-dev' ? 'fl-dev' : branchName}`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
return true;
} catch (e) {
console.log(`[DEBUG] Remote branch "origin/${branchName}" does NOT exist.`);
return false;
}
}
// If remote branch doesn't exist, make the initial commit and set up branches
static async commitInitialChanges() {
console.log('[DEBUG] Adding all files for initial commit...');
await exec(`git add .`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
const { stdout: status } = await exec(`git status --porcelain`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
if (status.trim()) {
await exec(`git commit -m "Initial version"`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
await exec(`git push -u origin master --force`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
console.log('[DEBUG] Creating and switching to branch "fl-dev"...');
await exec(`git branch fl-dev`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
await exec(`git checkout fl-dev`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
console.log('[DEBUG] Making fl-dev branch identical to master...');
await exec(`git reset --hard origin/master`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
console.log('[DEBUG] Pushing fl-dev branch to remote...');
await exec(`git push -u origin fl-dev --force`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
} else {
console.log('[DEBUG] No local changes to commit.');
}
}
// If the local directory is not empty but .git doesn't exist, initialize git,
// add .gitignore, configure the user, and add the remote origin.
static async initializeGitRepo(remoteUrl) {
console.log('[DEBUG] Local directory is not empty. Initializing git...');
const gitignorePath = path.join(ROOT_PATH, '.gitignore');
const ignoreContent = `node_modules/\n*/node_modules/\n*/build/\n`;
await fs.writeFile(gitignorePath, ignoreContent, 'utf8');
await exec(`git init`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
console.log('[DEBUG] Configuring git user...');
await exec(`git config user.email "support@flatlogic.com"`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
await exec(`git config user.name "Flatlogic Bot"`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
console.log(`[DEBUG] Adding remote ${remoteUrl}...`);
await exec(`git remote add origin ${remoteUrl}`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
console.log('[DEBUG] Fetching remote...');
await exec(`git fetch origin`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
try {
console.log('[DEBUG] Checking for remote branch "origin/fl-dev"...');
await exec(`git rev-parse --verify origin/fl-dev`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
console.log('[DEBUG] Remote branch "origin/fl-dev" exists. Resetting local repository to origin/fl-dev...');
await exec(`git reset --hard origin/fl-dev`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
console.log('[DEBUG] Switching to branch "fl-dev"...');
await exec(`git checkout -B fl-dev`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
} catch (e) {
console.log('[DEBUG] Remote branch "origin/fl-dev" does NOT exist. Creating initial commit...');
await this.commitInitialChanges();
}
}
// Method to check if the repository exists on remote server
static async checkRepoExists(repoName) {
const url = `https://${GITEA_DOMAIN}/api/v1/repos/${USERNAME}/${repoName}`;
try {
const response = await axios.get(url, {
headers: { Authorization: `token ${API_TOKEN}` }
});
return response.data;
} catch (err) {
if (err.response && err.response.status === 404) {
return null;
}
throw new Error('Error checking repository existence: ' + err.message);
}
}
// Method to create a remote repository via API
static async createRemoteRepo(repoName) {
const createUrl = `https://${GITEA_DOMAIN}/api/v1/user/repos`;
console.log("[DEBUG] createUrl", createUrl);
try {
const response = await axios.post(createUrl, {
name: repoName,
description: `Repository for project ${repoName}`,
private: false
}, {
headers: { Authorization: `token ${API_TOKEN}` }
});
return response.data;
} catch (err) {
throw new Error('Error creating repository via API: ' + err.message);
}
}
static async commitChanges(message = "", files = '.') {
// Ensure that we are on branch 'fl-dev' before making any commits
await this._ensureDevBranch();
try {
await exec(`git add ${files}`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
const { stdout: status } = await exec('git status --porcelain', { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
if (!status.trim()) {
return { message: "No changes to commit" };
}
const now = new Date();
const commitMessage = message || `Auto commit: ${now.toISOString()}`;
console.log('commitMessage:', commitMessage);
await exec(`git commit -m "${commitMessage}"`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
await this._pushChanges();
console.log('Pushed');
return { message: "Changes committed" };
} catch (error) {
throw new Error(`Error during commit: ${error.message}`);
}
}
static async getLog() {
try {
const { stdout } = await exec('git log fl-dev --oneline', { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
const lines = stdout.split(/\r?\n/).filter(line => line.trim() !== '');
const result = {};
lines.forEach((line) => {
const firstSpaceIndex = line.indexOf(' ');
if (firstSpaceIndex > 0) {
const hash = line.substring(0, firstSpaceIndex);
const message = line.substring(firstSpaceIndex + 1).trim();
result[hash] = message;
}
});
return result;
} catch (error) {
throw new Error(`Error during get log: ${error.message}`);
}
}
static async checkout(ref) {
try {
await exec(`git checkout ${ref}`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
return { message: `Checked out to ${ref}` };
} catch (error) {
throw new Error(`Error during checkout: ${error.message}`);
}
}
static async revert(commitHash) {
try {
await exec(`git reset --hard`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
// Rollback to the specified commit hash
await exec(
`git revert --no-edit ${commitHash}..HEAD --no-commit`,
{ cwd: ROOT_PATH, maxBuffer: MAX_BUFFER }
);
// Commit the changes
await exec(
`git commit -m "Revert to version ${commitHash}"`,
{ cwd: ROOT_PATH, maxBuffer: MAX_BUFFER }
);
await this._pushChanges();
return { message: `Reverted to commit ${commitHash}` };
} catch (error) {
console.error("Error during range revert:", error.message);
if (error.stdout) {
console.error("Revert stdout:", error.stdout);
}
if (error.stderr) {
console.error("Revert stderr:", error.stderr);
}
throw new Error(`Error during range revert: ${error.message}`);
}
}
static async mergeDevIntoMaster() {
try {
// Switch to branch 'master'
console.log('Switching to branch "master"...');
await exec(`git checkout master`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
// Merge branch 'fl-dev' into 'master' with a forced merge.
// Parameter -X theirs is used to resolve conflicts by keeping the changes from the branch being merged in case of conflicts.
console.log('Merging branch "fl-dev" into "master" (force merge with -X theirs)...');
await exec(
`git merge fl-dev --no-ff -X theirs -m "Forced merge: merge fl-dev into master"`,
{ cwd: ROOT_PATH, maxBuffer: MAX_BUFFER }
);
// Push the merged 'master' branch to remote
console.log('Pushing merged master branch to remote...');
const { stdout, stderr } = await exec(`git push origin master`, {
cwd: ROOT_PATH,
maxBuffer: MAX_BUFFER
});
if (stdout) {
console.log("Git push stdout:", stdout);
}
if (stderr) {
console.error("Git push stderr:", stderr);
}
return { message: "Branch fl-dev merged into master and pushed to remote" };
} catch (error) {
console.error("Error during mergeDevIntoMaster:", error.message);
if (error.stdout) {
console.error("Merge stdout:", error.stdout);
}
if (error.stderr) {
console.error("Merge stderr:", error.stderr);
}
throw error;
}
}
static async resetDevBranch() {
try {
console.log('[DEBUG] Switching to branch "master"...');
await exec(`git checkout master`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
console.log('[DEBUG] Resetting branch "fl-dev" to be identical to "master"...');
// Command checkout -B fl-dev master creates branch 'fl-dev' from 'master' and switches to it
await exec(`git checkout -B fl-dev master`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
console.log('[DEBUG] Pushing updated branch "fl-dev" to remote (force push)...');
await exec(`git push -u origin fl-dev --force`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
return { message: 'fl-dev branch successfully reset to master.' };
} catch (error) {
console.error("Error during resetting fl-dev branch:", error.message);
if (error.stdout) {
console.error("Reset stdout:", error.stdout);
}
if (error.stderr) {
console.error("Reset stderr:", error.stderr);
}
throw new Error(`Error during resetting fl-dev branch: ${error.message}`);
}
}
static async _pushChanges() {
try {
const { stdout, stderr } = await exec(`git push origin fl-dev`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
if (stdout) {
console.log("Git push stdout:", stdout);
}
if (stderr) {
console.error("Git push stderr:", stderr);
}
return { message: "Changes pushed to remote repository (fl-dev branch)" };
} catch (error) {
console.error("Git push error:", error.message);
if (error.stdout) {
console.error("Git push stdout:", error.stdout);
}
if (error.stderr) {
console.error("Git push stderr:", error.stderr);
}
}
}
static async _ensureDevBranch() {
try {
// Check if branch 'fl-dev' exists
const { stdout: branchList } = await exec(`git branch --list fl-dev`, {
cwd: ROOT_PATH,
maxBuffer: MAX_BUFFER,
});
if (!branchList || branchList.trim() === '') {
console.log("Branch 'fl-dev' not found. Creating branch 'fl-dev'.");
await exec(`git checkout -b fl-dev`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
} else {
// Determine current branch
const { stdout: currentBranchStdout } = await exec(`git rev-parse --abbrev-ref HEAD`, {
cwd: ROOT_PATH,
maxBuffer: MAX_BUFFER,
});
const currentBranch = currentBranchStdout.trim();
if (currentBranch !== 'fl-dev') {
console.log(`Switching from branch '${currentBranch}' to 'fl-dev'.`);
await exec(`git checkout fl-dev`, { cwd: ROOT_PATH, maxBuffer: MAX_BUFFER });
} else {
console.log("Already on branch 'fl-dev'.");
}
}
} catch (error) {
console.error("Error ensuring branch 'fl-dev':", error.message);
throw error;
}
}
}
module.exports = VCS;

3044
app-shell/yarn.lock Normal file

File diff suppressed because it is too large Load Diff

11
backend/.prettierrc Normal file
View File

@ -0,0 +1,11 @@
{
"singleQuote": true,
"tabWidth": 2,
"printWidth": 80,
"trailingComma": "all",
"quoteProps": "as-needed",
"jsxSingleQuote": true,
"bracketSpacing": true,
"bracketSameLine": false,
"arrowParens": "always"
}

7
backend/.sequelizerc Normal file
View File

@ -0,0 +1,7 @@
const path = require('path');
module.exports = {
"config": path.resolve("src", "db", "db.config.js"),
"models-path": path.resolve("src", "db", "models"),
"seeders-path": path.resolve("src", "db", "seeders"),
"migrations-path": path.resolve("src", "db", "migrations")
};

23
backend/Dockerfile Normal file
View File

@ -0,0 +1,23 @@
FROM node:20.15.1-alpine
RUN apk update && apk add bash
# Create app directory
WORKDIR /usr/src/app
# Install app dependencies
# A wildcard is used to ensure both package.json AND package-lock.json are copied
# where available (npm@5+)
COPY package*.json ./
RUN yarn install
# If you are building your code for production
# RUN npm ci --only=production
# Bundle app source
COPY . .
EXPOSE 8080
CMD [ "yarn", "start" ]

67
backend/README.md Normal file
View File

@ -0,0 +1,67 @@
#susatechnology - template backend,
#### Run App on local machine:
##### Install local dependencies:
- `yarn install`
---
##### Adjust local db:
###### 1. Install postgres:
- MacOS:
- `brew install postgres`
- Ubuntu:
- `sudo apt update`
- `sudo apt install postgresql postgresql-contrib`
###### 2. Create db and admin user:
- Before run and test connection, make sure you have created a database as described in the above configuration. You can use the `psql` command to create a user and database.
- `psql postgres --u postgres`
- Next, type this command for creating a new user with password then give access for creating the database.
- `postgres-# CREATE ROLE admin WITH LOGIN PASSWORD 'admin_pass';`
- `postgres-# ALTER ROLE admin CREATEDB;`
- Quit `psql` then log in again using the new user that previously created.
- `postgres-# \q`
- `psql postgres -U admin`
- Type this command to creating a new database.
- `postgres=> CREATE DATABASE db_susatechnology;`
- Then give that new user privileges to the new database then quit the `psql`.
- `postgres=> GRANT ALL PRIVILEGES ON DATABASE db_susatechnology TO admin;`
- `postgres=> \q`
---
#### Api Documentation (Swagger)
http://localhost:8080/api-docs (local host)
http://host_name/api-docs
---
##### Setup database tables or update after schema change
- `yarn db:migrate`
##### Seed the initial data (admin accounts, relevant for the first setup):
- `yarn db:seed`
##### Start build:
- `yarn start`

51
backend/package.json Normal file
View File

@ -0,0 +1,51 @@
{
"name": "susatechnology",
"description": "susatechnology - template backend",
"scripts": {
"start": "npm run db:migrate && npm run db:seed && nodemon ./src/index.js",
"db:migrate": "sequelize-cli db:migrate",
"db:seed": "sequelize-cli db:seed:all",
"db:drop": "sequelize-cli db:drop",
"db:create": "sequelize-cli db:create"
},
"dependencies": {
"@google-cloud/storage": "^5.18.2",
"axios": "^1.6.7",
"bcrypt": "5.1.1",
"cors": "2.8.5",
"csv-parser": "^3.0.0",
"express": "4.18.2",
"formidable": "1.2.2",
"helmet": "4.1.1",
"json2csv": "^5.0.7",
"jsonwebtoken": "8.5.1",
"lodash": "4.17.21",
"moment": "2.30.1",
"multer": "^1.4.4",
"mysql2": "2.2.5",
"nodemailer": "6.9.9",
"passport": "^0.7.0",
"passport-google-oauth2": "^0.2.0",
"passport-jwt": "^4.0.1",
"passport-microsoft": "^0.1.0",
"pg": "8.4.1",
"pg-hstore": "2.3.4",
"sequelize": "6.35.2",
"sequelize-json-schema": "^2.1.1",
"sqlite": "4.0.15",
"swagger-jsdoc": "^6.2.8",
"swagger-ui-express": "^5.0.0",
"tedious": "^18.2.4"
},
"engines": {
"node": ">=18"
},
"private": true,
"devDependencies": {
"cross-env": "7.0.3",
"mocha": "8.1.3",
"node-mocks-http": "1.9.0",
"nodemon": "2.0.5",
"sequelize-cli": "6.6.2"
}
}

79
backend/src/auth/auth.js Normal file
View File

@ -0,0 +1,79 @@
const config = require('../config');
const providers = config.providers;
const helpers = require('../helpers');
const db = require('../db/models');
const passport = require('passport');
const JWTstrategy = require('passport-jwt').Strategy;
const ExtractJWT = require('passport-jwt').ExtractJwt;
const GoogleStrategy = require('passport-google-oauth2').Strategy;
const MicrosoftStrategy = require('passport-microsoft').Strategy;
const UsersDBApi = require('../db/api/users');
passport.use(
new JWTstrategy(
{
passReqToCallback: true,
secretOrKey: config.secret_key,
jwtFromRequest: ExtractJWT.fromAuthHeaderAsBearerToken(),
},
async (req, token, done) => {
try {
const user = await UsersDBApi.findBy({ email: token.user.email });
if (user && user.disabled) {
return done(new Error(`User '${user.email}' is disabled`));
}
req.currentUser = user;
return done(null, user);
} catch (error) {
done(error);
}
},
),
);
passport.use(
new GoogleStrategy(
{
clientID: config.google.clientId,
clientSecret: config.google.clientSecret,
callbackURL: config.apiUrl + '/auth/signin/google/callback',
passReqToCallback: true,
},
function (request, accessToken, refreshToken, profile, done) {
socialStrategy(profile.email, profile, providers.GOOGLE, done);
},
),
);
passport.use(
new MicrosoftStrategy(
{
clientID: config.microsoft.clientId,
clientSecret: config.microsoft.clientSecret,
callbackURL: config.apiUrl + '/auth/signin/microsoft/callback',
passReqToCallback: true,
},
function (request, accessToken, refreshToken, profile, done) {
const email = profile._json.mail || profile._json.userPrincipalName;
socialStrategy(email, profile, providers.MICROSOFT, done);
},
),
);
function socialStrategy(email, profile, provider, done) {
db.users
.findOrCreate({ where: { email, provider } })
.then(([user, created]) => {
const body = {
id: user.id,
email: user.email,
name: profile.displayName,
};
const token = helpers.jwtSign({ user: body });
return done(null, { token });
});
}

73
backend/src/config.js Normal file
View File

@ -0,0 +1,73 @@
const os = require('os');
const config = {
gcloud: {
bucket: 'fldemo-files',
hash: '1b2a4cd99c98c6614dd81ba731df5667',
},
bcrypt: {
saltRounds: 12,
},
admin_pass: 'password',
admin_email: 'admin@flatlogic.com',
providers: {
LOCAL: 'local',
GOOGLE: 'google',
MICROSOFT: 'microsoft',
},
secret_key: 'HUEyqESqgQ1yTwzVlO6wprC9Kf1J1xuA',
remote: '',
port: process.env.NODE_ENV === 'production' ? '' : '8080',
hostUI: process.env.NODE_ENV === 'production' ? '' : 'http://localhost',
portUI: process.env.NODE_ENV === 'production' ? '' : '3000',
portUIProd: process.env.NODE_ENV === 'production' ? '' : ':3000',
swaggerUI: process.env.NODE_ENV === 'production' ? '' : 'http://localhost',
swaggerPort: process.env.NODE_ENV === 'production' ? '' : ':8080',
google: {
clientId:
'671001533244-kf1k1gmp6mnl0r030qmvdu6v36ghmim6.apps.googleusercontent.com',
clientSecret: 'Yo4qbKZniqvojzUQ60iKlxqR',
},
microsoft: {
clientId: '4696f457-31af-40de-897c-e00d7d4cff73',
clientSecret: 'm8jzZ.5UpHF3=-dXzyxiZ4e[F8OF54@p',
},
uploadDir: os.tmpdir(),
email: {
from: 'susatechnology <app@flatlogic.app>',
host: 'email-smtp.us-east-1.amazonaws.com',
port: 587,
auth: {
user: 'AKIAVEW7G4PQUBGM52OF',
pass: process.env.EMAIL_PASS,
},
tls: {
rejectUnauthorized: false,
},
},
roles: {
super_admin: 'Super Administrator',
admin: 'Administrator',
user: 'User',
},
project_uuid: '211609a8-ae51-4b11-bb65-8f947b4a65d1',
flHost:
process.env.NODE_ENV === 'production' ||
process.env.NODE_ENV === 'dev_stage'
? 'https://flatlogic.com/projects'
: 'http://localhost:3000/projects',
};
config.pexelsKey = 'Vc99rnmOhHhJAbgGQoKLZtsaIVfkeownoQNbTj78VemUjKh08ZYRbf18';
config.pexelsQuery = 'abstract technology network background';
config.host =
process.env.NODE_ENV === 'production' ? config.remote : 'http://localhost';
config.apiUrl = `${config.host}${config.port ? `:${config.port}` : ``}/api`;
config.swaggerUrl = `${config.swaggerUI}${config.swaggerPort}`;
config.uiUrl = `${config.hostUI}${config.portUI ? `:${config.portUI}` : ``}/#`;
config.backUrl = `${config.hostUI}${config.portUI ? `:${config.portUI}` : ``}`;
module.exports = config;

View File

@ -0,0 +1,363 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class AssetsDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const assets = await db.assets.create(
{
id: data.id || undefined,
asset_name: data.asset_name || null,
value: data.value || null,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await assets.setCompany(data.company || null, {
transaction,
});
await assets.setCompanies(data.companies || null, {
transaction,
});
return assets;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const assetsData = data.map((item, index) => ({
id: item.id || undefined,
asset_name: item.asset_name || null,
value: item.value || null,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const assets = await db.assets.bulkCreate(assetsData, { transaction });
// For each item created, replace relation files
return assets;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const assets = await db.assets.findByPk(id, {}, { transaction });
const updatePayload = {};
if (data.asset_name !== undefined)
updatePayload.asset_name = data.asset_name;
if (data.value !== undefined) updatePayload.value = data.value;
updatePayload.updatedById = currentUser.id;
await assets.update(updatePayload, { transaction });
if (data.company !== undefined) {
await assets.setCompany(
data.company,
{ transaction },
);
}
if (data.companies !== undefined) {
await assets.setCompanies(
data.companies,
{ transaction },
);
}
return assets;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const assets = await db.assets.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of assets) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of assets) {
await record.destroy({ transaction });
}
});
return assets;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const assets = await db.assets.findByPk(id, options);
await assets.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await assets.destroy({
transaction,
});
return assets;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const assets = await db.assets.findOne({ where }, { transaction });
if (!assets) {
return assets;
}
const output = assets.get({ plain: true });
output.company = await assets.getCompany({
transaction,
});
output.companies = await assets.getCompanies({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.companies,
as: 'company',
},
{
model: db.companies,
as: 'companies',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.asset_name) {
where = {
...where,
[Op.and]: Utils.ilike('assets', 'asset_name', filter.asset_name),
};
}
if (filter.valueRange) {
const [start, end] = filter.valueRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
value: {
...where.value,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
value: {
...where.value,
[Op.lte]: end,
},
};
}
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.company) {
const listItems = filter.company.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companyId: { [Op.or]: listItems },
};
}
if (filter.companies) {
const listItems = filter.companies.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companiesId: { [Op.or]: listItems },
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.assets.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('assets', 'asset_name', query),
],
};
}
const records = await db.assets.findAll({
attributes: ['id', 'asset_name'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['asset_name', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.asset_name,
}));
}
};

View File

@ -0,0 +1,293 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class AttendanceDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const attendance = await db.attendance.create(
{
id: data.id || undefined,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await attendance.setCompanies(data.companies || null, {
transaction,
});
return attendance;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const attendanceData = data.map((item, index) => ({
id: item.id || undefined,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const attendance = await db.attendance.bulkCreate(attendanceData, {
transaction,
});
// For each item created, replace relation files
return attendance;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const attendance = await db.attendance.findByPk(id, {}, { transaction });
const updatePayload = {};
updatePayload.updatedById = currentUser.id;
await attendance.update(updatePayload, { transaction });
if (data.companies !== undefined) {
await attendance.setCompanies(
data.companies,
{ transaction },
);
}
return attendance;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const attendance = await db.attendance.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of attendance) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of attendance) {
await record.destroy({ transaction });
}
});
return attendance;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const attendance = await db.attendance.findByPk(id, options);
await attendance.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await attendance.destroy({
transaction,
});
return attendance;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const attendance = await db.attendance.findOne({ where }, { transaction });
if (!attendance) {
return attendance;
}
const output = attendance.get({ plain: true });
output.companies = await attendance.getCompanies({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.companies,
as: 'companies',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.companies) {
const listItems = filter.companies.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companiesId: { [Op.or]: listItems },
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.attendance.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('attendance', 'id', query),
],
};
}
const records = await db.attendance.findAll({
attributes: ['id', 'id'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['id', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.id,
}));
}
};

View File

@ -0,0 +1,293 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class Audit_logsDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const audit_logs = await db.audit_logs.create(
{
id: data.id || undefined,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await audit_logs.setCompanies(data.companies || null, {
transaction,
});
return audit_logs;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const audit_logsData = data.map((item, index) => ({
id: item.id || undefined,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const audit_logs = await db.audit_logs.bulkCreate(audit_logsData, {
transaction,
});
// For each item created, replace relation files
return audit_logs;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const audit_logs = await db.audit_logs.findByPk(id, {}, { transaction });
const updatePayload = {};
updatePayload.updatedById = currentUser.id;
await audit_logs.update(updatePayload, { transaction });
if (data.companies !== undefined) {
await audit_logs.setCompanies(
data.companies,
{ transaction },
);
}
return audit_logs;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const audit_logs = await db.audit_logs.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of audit_logs) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of audit_logs) {
await record.destroy({ transaction });
}
});
return audit_logs;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const audit_logs = await db.audit_logs.findByPk(id, options);
await audit_logs.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await audit_logs.destroy({
transaction,
});
return audit_logs;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const audit_logs = await db.audit_logs.findOne({ where }, { transaction });
if (!audit_logs) {
return audit_logs;
}
const output = audit_logs.get({ plain: true });
output.companies = await audit_logs.getCompanies({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.companies,
as: 'companies',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.companies) {
const listItems = filter.companies.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companiesId: { [Op.or]: listItems },
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.audit_logs.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('audit_logs', 'id', query),
],
};
}
const records = await db.audit_logs.findAll({
attributes: ['id', 'id'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['id', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.id,
}));
}
};

View File

@ -0,0 +1,291 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class BudgetsDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const budgets = await db.budgets.create(
{
id: data.id || undefined,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await budgets.setCompanies(data.companies || null, {
transaction,
});
return budgets;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const budgetsData = data.map((item, index) => ({
id: item.id || undefined,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const budgets = await db.budgets.bulkCreate(budgetsData, { transaction });
// For each item created, replace relation files
return budgets;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const budgets = await db.budgets.findByPk(id, {}, { transaction });
const updatePayload = {};
updatePayload.updatedById = currentUser.id;
await budgets.update(updatePayload, { transaction });
if (data.companies !== undefined) {
await budgets.setCompanies(
data.companies,
{ transaction },
);
}
return budgets;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const budgets = await db.budgets.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of budgets) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of budgets) {
await record.destroy({ transaction });
}
});
return budgets;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const budgets = await db.budgets.findByPk(id, options);
await budgets.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await budgets.destroy({
transaction,
});
return budgets;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const budgets = await db.budgets.findOne({ where }, { transaction });
if (!budgets) {
return budgets;
}
const output = budgets.get({ plain: true });
output.companies = await budgets.getCompanies({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.companies,
as: 'companies',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.companies) {
const listItems = filter.companies.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companiesId: { [Op.or]: listItems },
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.budgets.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('budgets', 'id', query),
],
};
}
const records = await db.budgets.findAll({
attributes: ['id', 'id'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['id', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.id,
}));
}
};

View File

@ -0,0 +1,430 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class CompaniesDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const companies = await db.companies.create(
{
id: data.id || undefined,
name: data.name || null,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
return companies;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const companiesData = data.map((item, index) => ({
id: item.id || undefined,
name: item.name || null,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const companies = await db.companies.bulkCreate(companiesData, {
transaction,
});
// For each item created, replace relation files
return companies;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const companies = await db.companies.findByPk(id, {}, { transaction });
const updatePayload = {};
if (data.name !== undefined) updatePayload.name = data.name;
updatePayload.updatedById = currentUser.id;
await companies.update(updatePayload, { transaction });
return companies;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const companies = await db.companies.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of companies) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of companies) {
await record.destroy({ transaction });
}
});
return companies;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const companies = await db.companies.findByPk(id, options);
await companies.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await companies.destroy({
transaction,
});
return companies;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const companies = await db.companies.findOne({ where }, { transaction });
if (!companies) {
return companies;
}
const output = companies.get({ plain: true });
output.users_companies = await companies.getUsers_companies({
transaction,
});
output.assets_company = await companies.getAssets_company({
transaction,
});
output.assets_companies = await companies.getAssets_companies({
transaction,
});
output.employees_company = await companies.getEmployees_company({
transaction,
});
output.employees_companies = await companies.getEmployees_companies({
transaction,
});
output.financial_records_company =
await companies.getFinancial_records_company({
transaction,
});
output.financial_records_companies =
await companies.getFinancial_records_companies({
transaction,
});
output.inventories_company = await companies.getInventories_company({
transaction,
});
output.inventories_companies = await companies.getInventories_companies({
transaction,
});
output.manufacturing_orders_company =
await companies.getManufacturing_orders_company({
transaction,
});
output.manufacturing_orders_companies =
await companies.getManufacturing_orders_companies({
transaction,
});
output.procurements_company = await companies.getProcurements_company({
transaction,
});
output.procurements_companies = await companies.getProcurements_companies({
transaction,
});
output.projects_company = await companies.getProjects_company({
transaction,
});
output.projects_companies = await companies.getProjects_companies({
transaction,
});
output.sales_company = await companies.getSales_company({
transaction,
});
output.sales_companies = await companies.getSales_companies({
transaction,
});
output.subscriptions_company = await companies.getSubscriptions_company({
transaction,
});
output.subscriptions_companies = await companies.getSubscriptions_companies(
{
transaction,
},
);
output.invoices_companies = await companies.getInvoices_companies({
transaction,
});
output.payments_companies = await companies.getPayments_companies({
transaction,
});
output.contracts_companies = await companies.getContracts_companies({
transaction,
});
output.shipments_companies = await companies.getShipments_companies({
transaction,
});
output.warehouses_companies = await companies.getWarehouses_companies({
transaction,
});
output.payrolls_companies = await companies.getPayrolls_companies({
transaction,
});
output.attendance_companies = await companies.getAttendance_companies({
transaction,
});
output.leave_requests_companies =
await companies.getLeave_requests_companies({
transaction,
});
output.recruitment_companies = await companies.getRecruitment_companies({
transaction,
});
output.budgets_companies = await companies.getBudgets_companies({
transaction,
});
output.taxes_companies = await companies.getTaxes_companies({
transaction,
});
output.expenses_companies = await companies.getExpenses_companies({
transaction,
});
output.revenues_companies = await companies.getRevenues_companies({
transaction,
});
output.leads_companies = await companies.getLeads_companies({
transaction,
});
output.tickets_companies = await companies.getTickets_companies({
transaction,
});
output.marketing_campaigns_companies =
await companies.getMarketing_campaigns_companies({
transaction,
});
output.audit_logs_companies = await companies.getAudit_logs_companies({
transaction,
});
output.notifications_companies = await companies.getNotifications_companies(
{
transaction,
},
);
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.name) {
where = {
...where,
[Op.and]: Utils.ilike('companies', 'name', filter.name),
};
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.companies.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('companies', 'name', query),
],
};
}
const records = await db.companies.findAll({
attributes: ['id', 'name'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['name', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.name,
}));
}
};

View File

@ -0,0 +1,293 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class ContractsDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const contracts = await db.contracts.create(
{
id: data.id || undefined,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await contracts.setCompanies(data.companies || null, {
transaction,
});
return contracts;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const contractsData = data.map((item, index) => ({
id: item.id || undefined,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const contracts = await db.contracts.bulkCreate(contractsData, {
transaction,
});
// For each item created, replace relation files
return contracts;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const contracts = await db.contracts.findByPk(id, {}, { transaction });
const updatePayload = {};
updatePayload.updatedById = currentUser.id;
await contracts.update(updatePayload, { transaction });
if (data.companies !== undefined) {
await contracts.setCompanies(
data.companies,
{ transaction },
);
}
return contracts;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const contracts = await db.contracts.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of contracts) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of contracts) {
await record.destroy({ transaction });
}
});
return contracts;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const contracts = await db.contracts.findByPk(id, options);
await contracts.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await contracts.destroy({
transaction,
});
return contracts;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const contracts = await db.contracts.findOne({ where }, { transaction });
if (!contracts) {
return contracts;
}
const output = contracts.get({ plain: true });
output.companies = await contracts.getCompanies({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.companies,
as: 'companies',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.companies) {
const listItems = filter.companies.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companiesId: { [Op.or]: listItems },
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.contracts.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('contracts', 'id', query),
],
};
}
const records = await db.contracts.findAll({
attributes: ['id', 'id'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['id', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.id,
}));
}
};

View File

@ -0,0 +1,336 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class EmployeesDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const employees = await db.employees.create(
{
id: data.id || undefined,
full_name: data.full_name || null,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await employees.setCompany(data.company || null, {
transaction,
});
await employees.setCompanies(data.companies || null, {
transaction,
});
return employees;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const employeesData = data.map((item, index) => ({
id: item.id || undefined,
full_name: item.full_name || null,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const employees = await db.employees.bulkCreate(employeesData, {
transaction,
});
// For each item created, replace relation files
return employees;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const employees = await db.employees.findByPk(id, {}, { transaction });
const updatePayload = {};
if (data.full_name !== undefined) updatePayload.full_name = data.full_name;
updatePayload.updatedById = currentUser.id;
await employees.update(updatePayload, { transaction });
if (data.company !== undefined) {
await employees.setCompany(
data.company,
{ transaction },
);
}
if (data.companies !== undefined) {
await employees.setCompanies(
data.companies,
{ transaction },
);
}
return employees;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const employees = await db.employees.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of employees) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of employees) {
await record.destroy({ transaction });
}
});
return employees;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const employees = await db.employees.findByPk(id, options);
await employees.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await employees.destroy({
transaction,
});
return employees;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const employees = await db.employees.findOne({ where }, { transaction });
if (!employees) {
return employees;
}
const output = employees.get({ plain: true });
output.company = await employees.getCompany({
transaction,
});
output.companies = await employees.getCompanies({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.companies,
as: 'company',
},
{
model: db.companies,
as: 'companies',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.full_name) {
where = {
...where,
[Op.and]: Utils.ilike('employees', 'full_name', filter.full_name),
};
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.company) {
const listItems = filter.company.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companyId: { [Op.or]: listItems },
};
}
if (filter.companies) {
const listItems = filter.companies.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companiesId: { [Op.or]: listItems },
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.employees.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('employees', 'full_name', query),
],
};
}
const records = await db.employees.findAll({
attributes: ['id', 'full_name'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['full_name', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.full_name,
}));
}
};

View File

@ -0,0 +1,293 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class ExpensesDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const expenses = await db.expenses.create(
{
id: data.id || undefined,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await expenses.setCompanies(data.companies || null, {
transaction,
});
return expenses;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const expensesData = data.map((item, index) => ({
id: item.id || undefined,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const expenses = await db.expenses.bulkCreate(expensesData, {
transaction,
});
// For each item created, replace relation files
return expenses;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const expenses = await db.expenses.findByPk(id, {}, { transaction });
const updatePayload = {};
updatePayload.updatedById = currentUser.id;
await expenses.update(updatePayload, { transaction });
if (data.companies !== undefined) {
await expenses.setCompanies(
data.companies,
{ transaction },
);
}
return expenses;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const expenses = await db.expenses.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of expenses) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of expenses) {
await record.destroy({ transaction });
}
});
return expenses;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const expenses = await db.expenses.findByPk(id, options);
await expenses.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await expenses.destroy({
transaction,
});
return expenses;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const expenses = await db.expenses.findOne({ where }, { transaction });
if (!expenses) {
return expenses;
}
const output = expenses.get({ plain: true });
output.companies = await expenses.getCompanies({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.companies,
as: 'companies',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.companies) {
const listItems = filter.companies.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companiesId: { [Op.or]: listItems },
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.expenses.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('expenses', 'id', query),
],
};
}
const records = await db.expenses.findAll({
attributes: ['id', 'id'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['id', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.id,
}));
}
};

View File

@ -0,0 +1,73 @@
const db = require('../models');
const assert = require('assert');
const services = require('../../services/file');
module.exports = class FileDBApi {
static async replaceRelationFiles(relation, rawFiles, options) {
assert(relation.belongsTo, 'belongsTo is required');
assert(relation.belongsToColumn, 'belongsToColumn is required');
assert(relation.belongsToId, 'belongsToId is required');
let files = [];
if (Array.isArray(rawFiles)) {
files = rawFiles;
} else {
files = rawFiles ? [rawFiles] : [];
}
await this._removeLegacyFiles(relation, files, options);
await this._addFiles(relation, files, options);
}
static async _addFiles(relation, files, options) {
const transaction = (options && options.transaction) || undefined;
const currentUser = (options && options.currentUser) || { id: null };
const inexistentFiles = files.filter((file) => !!file.new);
for (const file of inexistentFiles) {
await db.file.create(
{
belongsTo: relation.belongsTo,
belongsToColumn: relation.belongsToColumn,
belongsToId: relation.belongsToId,
name: file.name,
sizeInBytes: file.sizeInBytes,
privateUrl: file.privateUrl,
publicUrl: file.publicUrl,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{
transaction,
},
);
}
}
static async _removeLegacyFiles(relation, files, options) {
const transaction = (options && options.transaction) || undefined;
const filesToDelete = await db.file.findAll({
where: {
belongsTo: relation.belongsTo,
belongsToId: relation.belongsToId,
belongsToColumn: relation.belongsToColumn,
id: {
[db.Sequelize.Op.notIn]: files
.filter((file) => !file.new)
.map((file) => file.id),
},
},
transaction,
});
for (let file of filesToDelete) {
await services.deleteGCloud(file.privateUrl);
await file.destroy({
transaction,
});
}
}
};

View File

@ -0,0 +1,407 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class Financial_recordsDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const financial_records = await db.financial_records.create(
{
id: data.id || undefined,
record_type: data.record_type || null,
amount: data.amount || null,
date: data.date || null,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await financial_records.setCompany(data.company || null, {
transaction,
});
await financial_records.setCompanies(data.companies || null, {
transaction,
});
return financial_records;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const financial_recordsData = data.map((item, index) => ({
id: item.id || undefined,
record_type: item.record_type || null,
amount: item.amount || null,
date: item.date || null,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const financial_records = await db.financial_records.bulkCreate(
financial_recordsData,
{ transaction },
);
// For each item created, replace relation files
return financial_records;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const financial_records = await db.financial_records.findByPk(
id,
{},
{ transaction },
);
const updatePayload = {};
if (data.record_type !== undefined)
updatePayload.record_type = data.record_type;
if (data.amount !== undefined) updatePayload.amount = data.amount;
if (data.date !== undefined) updatePayload.date = data.date;
updatePayload.updatedById = currentUser.id;
await financial_records.update(updatePayload, { transaction });
if (data.company !== undefined) {
await financial_records.setCompany(
data.company,
{ transaction },
);
}
if (data.companies !== undefined) {
await financial_records.setCompanies(
data.companies,
{ transaction },
);
}
return financial_records;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const financial_records = await db.financial_records.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of financial_records) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of financial_records) {
await record.destroy({ transaction });
}
});
return financial_records;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const financial_records = await db.financial_records.findByPk(id, options);
await financial_records.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await financial_records.destroy({
transaction,
});
return financial_records;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const financial_records = await db.financial_records.findOne(
{ where },
{ transaction },
);
if (!financial_records) {
return financial_records;
}
const output = financial_records.get({ plain: true });
output.company = await financial_records.getCompany({
transaction,
});
output.companies = await financial_records.getCompanies({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.companies,
as: 'company',
},
{
model: db.companies,
as: 'companies',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.record_type) {
where = {
...where,
[Op.and]: Utils.ilike(
'financial_records',
'record_type',
filter.record_type,
),
};
}
if (filter.amountRange) {
const [start, end] = filter.amountRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
amount: {
...where.amount,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
amount: {
...where.amount,
[Op.lte]: end,
},
};
}
}
if (filter.dateRange) {
const [start, end] = filter.dateRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
date: {
...where.date,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
date: {
...where.date,
[Op.lte]: end,
},
};
}
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.company) {
const listItems = filter.company.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companyId: { [Op.or]: listItems },
};
}
if (filter.companies) {
const listItems = filter.companies.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companiesId: { [Op.or]: listItems },
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.financial_records.findAndCountAll(
queryOptions,
);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('financial_records', 'record_type', query),
],
};
}
const records = await db.financial_records.findAll({
attributes: ['id', 'record_type'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['record_type', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.record_type,
}));
}
};

View File

@ -0,0 +1,369 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class InventoriesDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const inventories = await db.inventories.create(
{
id: data.id || undefined,
item_name: data.item_name || null,
quantity: data.quantity || null,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await inventories.setCompany(data.company || null, {
transaction,
});
await inventories.setCompanies(data.companies || null, {
transaction,
});
return inventories;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const inventoriesData = data.map((item, index) => ({
id: item.id || undefined,
item_name: item.item_name || null,
quantity: item.quantity || null,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const inventories = await db.inventories.bulkCreate(inventoriesData, {
transaction,
});
// For each item created, replace relation files
return inventories;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const inventories = await db.inventories.findByPk(id, {}, { transaction });
const updatePayload = {};
if (data.item_name !== undefined) updatePayload.item_name = data.item_name;
if (data.quantity !== undefined) updatePayload.quantity = data.quantity;
updatePayload.updatedById = currentUser.id;
await inventories.update(updatePayload, { transaction });
if (data.company !== undefined) {
await inventories.setCompany(
data.company,
{ transaction },
);
}
if (data.companies !== undefined) {
await inventories.setCompanies(
data.companies,
{ transaction },
);
}
return inventories;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const inventories = await db.inventories.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of inventories) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of inventories) {
await record.destroy({ transaction });
}
});
return inventories;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const inventories = await db.inventories.findByPk(id, options);
await inventories.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await inventories.destroy({
transaction,
});
return inventories;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const inventories = await db.inventories.findOne(
{ where },
{ transaction },
);
if (!inventories) {
return inventories;
}
const output = inventories.get({ plain: true });
output.company = await inventories.getCompany({
transaction,
});
output.companies = await inventories.getCompanies({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.companies,
as: 'company',
},
{
model: db.companies,
as: 'companies',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.item_name) {
where = {
...where,
[Op.and]: Utils.ilike('inventories', 'item_name', filter.item_name),
};
}
if (filter.quantityRange) {
const [start, end] = filter.quantityRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
quantity: {
...where.quantity,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
quantity: {
...where.quantity,
[Op.lte]: end,
},
};
}
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.company) {
const listItems = filter.company.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companyId: { [Op.or]: listItems },
};
}
if (filter.companies) {
const listItems = filter.companies.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companiesId: { [Op.or]: listItems },
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.inventories.findAndCountAll(
queryOptions,
);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('inventories', 'item_name', query),
],
};
}
const records = await db.inventories.findAll({
attributes: ['id', 'item_name'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['item_name', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.item_name,
}));
}
};

View File

@ -0,0 +1,293 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class InvoicesDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const invoices = await db.invoices.create(
{
id: data.id || undefined,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await invoices.setCompanies(data.companies || null, {
transaction,
});
return invoices;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const invoicesData = data.map((item, index) => ({
id: item.id || undefined,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const invoices = await db.invoices.bulkCreate(invoicesData, {
transaction,
});
// For each item created, replace relation files
return invoices;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const invoices = await db.invoices.findByPk(id, {}, { transaction });
const updatePayload = {};
updatePayload.updatedById = currentUser.id;
await invoices.update(updatePayload, { transaction });
if (data.companies !== undefined) {
await invoices.setCompanies(
data.companies,
{ transaction },
);
}
return invoices;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const invoices = await db.invoices.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of invoices) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of invoices) {
await record.destroy({ transaction });
}
});
return invoices;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const invoices = await db.invoices.findByPk(id, options);
await invoices.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await invoices.destroy({
transaction,
});
return invoices;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const invoices = await db.invoices.findOne({ where }, { transaction });
if (!invoices) {
return invoices;
}
const output = invoices.get({ plain: true });
output.companies = await invoices.getCompanies({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.companies,
as: 'companies',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.companies) {
const listItems = filter.companies.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companiesId: { [Op.or]: listItems },
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.invoices.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('invoices', 'id', query),
],
};
}
const records = await db.invoices.findAll({
attributes: ['id', 'id'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['id', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.id,
}));
}
};

291
backend/src/db/api/leads.js Normal file
View File

@ -0,0 +1,291 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class LeadsDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const leads = await db.leads.create(
{
id: data.id || undefined,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await leads.setCompanies(data.companies || null, {
transaction,
});
return leads;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const leadsData = data.map((item, index) => ({
id: item.id || undefined,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const leads = await db.leads.bulkCreate(leadsData, { transaction });
// For each item created, replace relation files
return leads;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const leads = await db.leads.findByPk(id, {}, { transaction });
const updatePayload = {};
updatePayload.updatedById = currentUser.id;
await leads.update(updatePayload, { transaction });
if (data.companies !== undefined) {
await leads.setCompanies(
data.companies,
{ transaction },
);
}
return leads;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const leads = await db.leads.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of leads) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of leads) {
await record.destroy({ transaction });
}
});
return leads;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const leads = await db.leads.findByPk(id, options);
await leads.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await leads.destroy({
transaction,
});
return leads;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const leads = await db.leads.findOne({ where }, { transaction });
if (!leads) {
return leads;
}
const output = leads.get({ plain: true });
output.companies = await leads.getCompanies({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.companies,
as: 'companies',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.companies) {
const listItems = filter.companies.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companiesId: { [Op.or]: listItems },
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.leads.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('leads', 'id', query),
],
};
}
const records = await db.leads.findAll({
attributes: ['id', 'id'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['id', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.id,
}));
}
};

View File

@ -0,0 +1,303 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class Leave_requestsDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const leave_requests = await db.leave_requests.create(
{
id: data.id || undefined,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await leave_requests.setCompanies(data.companies || null, {
transaction,
});
return leave_requests;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const leave_requestsData = data.map((item, index) => ({
id: item.id || undefined,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const leave_requests = await db.leave_requests.bulkCreate(
leave_requestsData,
{ transaction },
);
// For each item created, replace relation files
return leave_requests;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const leave_requests = await db.leave_requests.findByPk(
id,
{},
{ transaction },
);
const updatePayload = {};
updatePayload.updatedById = currentUser.id;
await leave_requests.update(updatePayload, { transaction });
if (data.companies !== undefined) {
await leave_requests.setCompanies(
data.companies,
{ transaction },
);
}
return leave_requests;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const leave_requests = await db.leave_requests.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of leave_requests) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of leave_requests) {
await record.destroy({ transaction });
}
});
return leave_requests;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const leave_requests = await db.leave_requests.findByPk(id, options);
await leave_requests.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await leave_requests.destroy({
transaction,
});
return leave_requests;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const leave_requests = await db.leave_requests.findOne(
{ where },
{ transaction },
);
if (!leave_requests) {
return leave_requests;
}
const output = leave_requests.get({ plain: true });
output.companies = await leave_requests.getCompanies({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.companies,
as: 'companies',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.companies) {
const listItems = filter.companies.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companiesId: { [Op.or]: listItems },
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.leave_requests.findAndCountAll(
queryOptions,
);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('leave_requests', 'id', query),
],
};
}
const records = await db.leave_requests.findAll({
attributes: ['id', 'id'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['id', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.id,
}));
}
};

View File

@ -0,0 +1,401 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class Manufacturing_ordersDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const manufacturing_orders = await db.manufacturing_orders.create(
{
id: data.id || undefined,
order_name: data.order_name || null,
scheduled_date: data.scheduled_date || null,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await manufacturing_orders.setCompany(data.company || null, {
transaction,
});
await manufacturing_orders.setCompanies(data.companies || null, {
transaction,
});
return manufacturing_orders;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const manufacturing_ordersData = data.map((item, index) => ({
id: item.id || undefined,
order_name: item.order_name || null,
scheduled_date: item.scheduled_date || null,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const manufacturing_orders = await db.manufacturing_orders.bulkCreate(
manufacturing_ordersData,
{ transaction },
);
// For each item created, replace relation files
return manufacturing_orders;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const manufacturing_orders = await db.manufacturing_orders.findByPk(
id,
{},
{ transaction },
);
const updatePayload = {};
if (data.order_name !== undefined)
updatePayload.order_name = data.order_name;
if (data.scheduled_date !== undefined)
updatePayload.scheduled_date = data.scheduled_date;
updatePayload.updatedById = currentUser.id;
await manufacturing_orders.update(updatePayload, { transaction });
if (data.company !== undefined) {
await manufacturing_orders.setCompany(
data.company,
{ transaction },
);
}
if (data.companies !== undefined) {
await manufacturing_orders.setCompanies(
data.companies,
{ transaction },
);
}
return manufacturing_orders;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const manufacturing_orders = await db.manufacturing_orders.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of manufacturing_orders) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of manufacturing_orders) {
await record.destroy({ transaction });
}
});
return manufacturing_orders;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const manufacturing_orders = await db.manufacturing_orders.findByPk(
id,
options,
);
await manufacturing_orders.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await manufacturing_orders.destroy({
transaction,
});
return manufacturing_orders;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const manufacturing_orders = await db.manufacturing_orders.findOne(
{ where },
{ transaction },
);
if (!manufacturing_orders) {
return manufacturing_orders;
}
const output = manufacturing_orders.get({ plain: true });
output.company = await manufacturing_orders.getCompany({
transaction,
});
output.companies = await manufacturing_orders.getCompanies({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.companies,
as: 'company',
},
{
model: db.companies,
as: 'companies',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.order_name) {
where = {
...where,
[Op.and]: Utils.ilike(
'manufacturing_orders',
'order_name',
filter.order_name,
),
};
}
if (filter.calendarStart && filter.calendarEnd) {
where = {
...where,
[Op.or]: [
{
scheduled_date: {
[Op.between]: [filter.calendarStart, filter.calendarEnd],
},
},
{
scheduled_date: {
[Op.between]: [filter.calendarStart, filter.calendarEnd],
},
},
],
};
}
if (filter.scheduled_dateRange) {
const [start, end] = filter.scheduled_dateRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
scheduled_date: {
...where.scheduled_date,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
scheduled_date: {
...where.scheduled_date,
[Op.lte]: end,
},
};
}
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.company) {
const listItems = filter.company.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companyId: { [Op.or]: listItems },
};
}
if (filter.companies) {
const listItems = filter.companies.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companiesId: { [Op.or]: listItems },
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.manufacturing_orders.findAndCountAll(
queryOptions,
);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('manufacturing_orders', 'order_name', query),
],
};
}
const records = await db.manufacturing_orders.findAll({
attributes: ['id', 'order_name'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['order_name', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.order_name,
}));
}
};

View File

@ -0,0 +1,306 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class Marketing_campaignsDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const marketing_campaigns = await db.marketing_campaigns.create(
{
id: data.id || undefined,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await marketing_campaigns.setCompanies(data.companies || null, {
transaction,
});
return marketing_campaigns;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const marketing_campaignsData = data.map((item, index) => ({
id: item.id || undefined,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const marketing_campaigns = await db.marketing_campaigns.bulkCreate(
marketing_campaignsData,
{ transaction },
);
// For each item created, replace relation files
return marketing_campaigns;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const marketing_campaigns = await db.marketing_campaigns.findByPk(
id,
{},
{ transaction },
);
const updatePayload = {};
updatePayload.updatedById = currentUser.id;
await marketing_campaigns.update(updatePayload, { transaction });
if (data.companies !== undefined) {
await marketing_campaigns.setCompanies(
data.companies,
{ transaction },
);
}
return marketing_campaigns;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const marketing_campaigns = await db.marketing_campaigns.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of marketing_campaigns) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of marketing_campaigns) {
await record.destroy({ transaction });
}
});
return marketing_campaigns;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const marketing_campaigns = await db.marketing_campaigns.findByPk(
id,
options,
);
await marketing_campaigns.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await marketing_campaigns.destroy({
transaction,
});
return marketing_campaigns;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const marketing_campaigns = await db.marketing_campaigns.findOne(
{ where },
{ transaction },
);
if (!marketing_campaigns) {
return marketing_campaigns;
}
const output = marketing_campaigns.get({ plain: true });
output.companies = await marketing_campaigns.getCompanies({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.companies,
as: 'companies',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.companies) {
const listItems = filter.companies.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companiesId: { [Op.or]: listItems },
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.marketing_campaigns.findAndCountAll(
queryOptions,
);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('marketing_campaigns', 'id', query),
],
};
}
const records = await db.marketing_campaigns.findAll({
attributes: ['id', 'id'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['id', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.id,
}));
}
};

View File

@ -0,0 +1,302 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class NotificationsDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const notifications = await db.notifications.create(
{
id: data.id || undefined,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await notifications.setCompanies(data.companies || null, {
transaction,
});
return notifications;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const notificationsData = data.map((item, index) => ({
id: item.id || undefined,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const notifications = await db.notifications.bulkCreate(notificationsData, {
transaction,
});
// For each item created, replace relation files
return notifications;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const notifications = await db.notifications.findByPk(
id,
{},
{ transaction },
);
const updatePayload = {};
updatePayload.updatedById = currentUser.id;
await notifications.update(updatePayload, { transaction });
if (data.companies !== undefined) {
await notifications.setCompanies(
data.companies,
{ transaction },
);
}
return notifications;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const notifications = await db.notifications.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of notifications) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of notifications) {
await record.destroy({ transaction });
}
});
return notifications;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const notifications = await db.notifications.findByPk(id, options);
await notifications.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await notifications.destroy({
transaction,
});
return notifications;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const notifications = await db.notifications.findOne(
{ where },
{ transaction },
);
if (!notifications) {
return notifications;
}
const output = notifications.get({ plain: true });
output.companies = await notifications.getCompanies({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.companies,
as: 'companies',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.companies) {
const listItems = filter.companies.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companiesId: { [Op.or]: listItems },
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.notifications.findAndCountAll(
queryOptions,
);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('notifications', 'id', query),
],
};
}
const records = await db.notifications.findAll({
attributes: ['id', 'id'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['id', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.id,
}));
}
};

View File

@ -0,0 +1,293 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class PaymentsDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const payments = await db.payments.create(
{
id: data.id || undefined,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await payments.setCompanies(data.companies || null, {
transaction,
});
return payments;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const paymentsData = data.map((item, index) => ({
id: item.id || undefined,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const payments = await db.payments.bulkCreate(paymentsData, {
transaction,
});
// For each item created, replace relation files
return payments;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const payments = await db.payments.findByPk(id, {}, { transaction });
const updatePayload = {};
updatePayload.updatedById = currentUser.id;
await payments.update(updatePayload, { transaction });
if (data.companies !== undefined) {
await payments.setCompanies(
data.companies,
{ transaction },
);
}
return payments;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const payments = await db.payments.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of payments) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of payments) {
await record.destroy({ transaction });
}
});
return payments;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const payments = await db.payments.findByPk(id, options);
await payments.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await payments.destroy({
transaction,
});
return payments;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const payments = await db.payments.findOne({ where }, { transaction });
if (!payments) {
return payments;
}
const output = payments.get({ plain: true });
output.companies = await payments.getCompanies({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.companies,
as: 'companies',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.companies) {
const listItems = filter.companies.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companiesId: { [Op.or]: listItems },
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.payments.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('payments', 'id', query),
],
};
}
const records = await db.payments.findAll({
attributes: ['id', 'id'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['id', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.id,
}));
}
};

View File

@ -0,0 +1,293 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class PayrollsDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const payrolls = await db.payrolls.create(
{
id: data.id || undefined,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await payrolls.setCompanies(data.companies || null, {
transaction,
});
return payrolls;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const payrollsData = data.map((item, index) => ({
id: item.id || undefined,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const payrolls = await db.payrolls.bulkCreate(payrollsData, {
transaction,
});
// For each item created, replace relation files
return payrolls;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const payrolls = await db.payrolls.findByPk(id, {}, { transaction });
const updatePayload = {};
updatePayload.updatedById = currentUser.id;
await payrolls.update(updatePayload, { transaction });
if (data.companies !== undefined) {
await payrolls.setCompanies(
data.companies,
{ transaction },
);
}
return payrolls;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const payrolls = await db.payrolls.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of payrolls) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of payrolls) {
await record.destroy({ transaction });
}
});
return payrolls;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const payrolls = await db.payrolls.findByPk(id, options);
await payrolls.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await payrolls.destroy({
transaction,
});
return payrolls;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const payrolls = await db.payrolls.findOne({ where }, { transaction });
if (!payrolls) {
return payrolls;
}
const output = payrolls.get({ plain: true });
output.companies = await payrolls.getCompanies({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.companies,
as: 'companies',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.companies) {
const listItems = filter.companies.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companiesId: { [Op.or]: listItems },
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.payrolls.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('payrolls', 'id', query),
],
};
}
const records = await db.payrolls.findAll({
attributes: ['id', 'id'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['id', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.id,
}));
}
};

View File

@ -0,0 +1,257 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class PermissionsDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const permissions = await db.permissions.create(
{
id: data.id || undefined,
name: data.name || null,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
return permissions;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const permissionsData = data.map((item, index) => ({
id: item.id || undefined,
name: item.name || null,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const permissions = await db.permissions.bulkCreate(permissionsData, {
transaction,
});
// For each item created, replace relation files
return permissions;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const permissions = await db.permissions.findByPk(id, {}, { transaction });
const updatePayload = {};
if (data.name !== undefined) updatePayload.name = data.name;
updatePayload.updatedById = currentUser.id;
await permissions.update(updatePayload, { transaction });
return permissions;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const permissions = await db.permissions.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of permissions) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of permissions) {
await record.destroy({ transaction });
}
});
return permissions;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const permissions = await db.permissions.findByPk(id, options);
await permissions.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await permissions.destroy({
transaction,
});
return permissions;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const permissions = await db.permissions.findOne(
{ where },
{ transaction },
);
if (!permissions) {
return permissions;
}
const output = permissions.get({ plain: true });
return output;
}
static async findAll(filter, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.name) {
where = {
...where,
[Op.and]: Utils.ilike('permissions', 'name', filter.name),
};
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.permissions.findAndCountAll(
queryOptions,
);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(query, limit, offset) {
let where = {};
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('permissions', 'name', query),
],
};
}
const records = await db.permissions.findAll({
attributes: ['id', 'name'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['name', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.name,
}));
}
};

View File

@ -0,0 +1,362 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class ProcurementsDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const procurements = await db.procurements.create(
{
id: data.id || undefined,
request_name: data.request_name || null,
approval_status: data.approval_status || null,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await procurements.setCompany(data.company || null, {
transaction,
});
await procurements.setCompanies(data.companies || null, {
transaction,
});
return procurements;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const procurementsData = data.map((item, index) => ({
id: item.id || undefined,
request_name: item.request_name || null,
approval_status: item.approval_status || null,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const procurements = await db.procurements.bulkCreate(procurementsData, {
transaction,
});
// For each item created, replace relation files
return procurements;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const procurements = await db.procurements.findByPk(
id,
{},
{ transaction },
);
const updatePayload = {};
if (data.request_name !== undefined)
updatePayload.request_name = data.request_name;
if (data.approval_status !== undefined)
updatePayload.approval_status = data.approval_status;
updatePayload.updatedById = currentUser.id;
await procurements.update(updatePayload, { transaction });
if (data.company !== undefined) {
await procurements.setCompany(
data.company,
{ transaction },
);
}
if (data.companies !== undefined) {
await procurements.setCompanies(
data.companies,
{ transaction },
);
}
return procurements;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const procurements = await db.procurements.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of procurements) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of procurements) {
await record.destroy({ transaction });
}
});
return procurements;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const procurements = await db.procurements.findByPk(id, options);
await procurements.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await procurements.destroy({
transaction,
});
return procurements;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const procurements = await db.procurements.findOne(
{ where },
{ transaction },
);
if (!procurements) {
return procurements;
}
const output = procurements.get({ plain: true });
output.company = await procurements.getCompany({
transaction,
});
output.companies = await procurements.getCompanies({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.companies,
as: 'company',
},
{
model: db.companies,
as: 'companies',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.request_name) {
where = {
...where,
[Op.and]: Utils.ilike(
'procurements',
'request_name',
filter.request_name,
),
};
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.approval_status) {
where = {
...where,
approval_status: filter.approval_status,
};
}
if (filter.company) {
const listItems = filter.company.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companyId: { [Op.or]: listItems },
};
}
if (filter.companies) {
const listItems = filter.companies.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companiesId: { [Op.or]: listItems },
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.procurements.findAndCountAll(
queryOptions,
);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('procurements', 'request_name', query),
],
};
}
const records = await db.procurements.findAll({
attributes: ['id', 'request_name'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['request_name', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.request_name,
}));
}
};

View File

@ -0,0 +1,416 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class ProjectsDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const projects = await db.projects.create(
{
id: data.id || undefined,
project_name: data.project_name || null,
start_date: data.start_date || null,
end_date: data.end_date || null,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await projects.setCompany(data.company || null, {
transaction,
});
await projects.setCompanies(data.companies || null, {
transaction,
});
return projects;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const projectsData = data.map((item, index) => ({
id: item.id || undefined,
project_name: item.project_name || null,
start_date: item.start_date || null,
end_date: item.end_date || null,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const projects = await db.projects.bulkCreate(projectsData, {
transaction,
});
// For each item created, replace relation files
return projects;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const projects = await db.projects.findByPk(id, {}, { transaction });
const updatePayload = {};
if (data.project_name !== undefined)
updatePayload.project_name = data.project_name;
if (data.start_date !== undefined)
updatePayload.start_date = data.start_date;
if (data.end_date !== undefined) updatePayload.end_date = data.end_date;
updatePayload.updatedById = currentUser.id;
await projects.update(updatePayload, { transaction });
if (data.company !== undefined) {
await projects.setCompany(
data.company,
{ transaction },
);
}
if (data.companies !== undefined) {
await projects.setCompanies(
data.companies,
{ transaction },
);
}
return projects;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const projects = await db.projects.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of projects) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of projects) {
await record.destroy({ transaction });
}
});
return projects;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const projects = await db.projects.findByPk(id, options);
await projects.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await projects.destroy({
transaction,
});
return projects;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const projects = await db.projects.findOne({ where }, { transaction });
if (!projects) {
return projects;
}
const output = projects.get({ plain: true });
output.company = await projects.getCompany({
transaction,
});
output.companies = await projects.getCompanies({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.companies,
as: 'company',
},
{
model: db.companies,
as: 'companies',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.project_name) {
where = {
...where,
[Op.and]: Utils.ilike(
'projects',
'project_name',
filter.project_name,
),
};
}
if (filter.calendarStart && filter.calendarEnd) {
where = {
...where,
[Op.or]: [
{
start_date: {
[Op.between]: [filter.calendarStart, filter.calendarEnd],
},
},
{
end_date: {
[Op.between]: [filter.calendarStart, filter.calendarEnd],
},
},
],
};
}
if (filter.start_dateRange) {
const [start, end] = filter.start_dateRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
start_date: {
...where.start_date,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
start_date: {
...where.start_date,
[Op.lte]: end,
},
};
}
}
if (filter.end_dateRange) {
const [start, end] = filter.end_dateRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
end_date: {
...where.end_date,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
end_date: {
...where.end_date,
[Op.lte]: end,
},
};
}
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.company) {
const listItems = filter.company.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companyId: { [Op.or]: listItems },
};
}
if (filter.companies) {
const listItems = filter.companies.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companiesId: { [Op.or]: listItems },
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.projects.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('projects', 'project_name', query),
],
};
}
const records = await db.projects.findAll({
attributes: ['id', 'project_name'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['project_name', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.project_name,
}));
}
};

View File

@ -0,0 +1,298 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class RecruitmentDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const recruitment = await db.recruitment.create(
{
id: data.id || undefined,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await recruitment.setCompanies(data.companies || null, {
transaction,
});
return recruitment;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const recruitmentData = data.map((item, index) => ({
id: item.id || undefined,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const recruitment = await db.recruitment.bulkCreate(recruitmentData, {
transaction,
});
// For each item created, replace relation files
return recruitment;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const recruitment = await db.recruitment.findByPk(id, {}, { transaction });
const updatePayload = {};
updatePayload.updatedById = currentUser.id;
await recruitment.update(updatePayload, { transaction });
if (data.companies !== undefined) {
await recruitment.setCompanies(
data.companies,
{ transaction },
);
}
return recruitment;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const recruitment = await db.recruitment.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of recruitment) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of recruitment) {
await record.destroy({ transaction });
}
});
return recruitment;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const recruitment = await db.recruitment.findByPk(id, options);
await recruitment.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await recruitment.destroy({
transaction,
});
return recruitment;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const recruitment = await db.recruitment.findOne(
{ where },
{ transaction },
);
if (!recruitment) {
return recruitment;
}
const output = recruitment.get({ plain: true });
output.companies = await recruitment.getCompanies({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.companies,
as: 'companies',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.companies) {
const listItems = filter.companies.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companiesId: { [Op.or]: listItems },
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.recruitment.findAndCountAll(
queryOptions,
);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('recruitment', 'id', query),
],
};
}
const records = await db.recruitment.findAll({
attributes: ['id', 'id'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['id', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.id,
}));
}
};

View File

@ -0,0 +1,293 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class RevenuesDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const revenues = await db.revenues.create(
{
id: data.id || undefined,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await revenues.setCompanies(data.companies || null, {
transaction,
});
return revenues;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const revenuesData = data.map((item, index) => ({
id: item.id || undefined,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const revenues = await db.revenues.bulkCreate(revenuesData, {
transaction,
});
// For each item created, replace relation files
return revenues;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const revenues = await db.revenues.findByPk(id, {}, { transaction });
const updatePayload = {};
updatePayload.updatedById = currentUser.id;
await revenues.update(updatePayload, { transaction });
if (data.companies !== undefined) {
await revenues.setCompanies(
data.companies,
{ transaction },
);
}
return revenues;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const revenues = await db.revenues.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of revenues) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of revenues) {
await record.destroy({ transaction });
}
});
return revenues;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const revenues = await db.revenues.findByPk(id, options);
await revenues.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await revenues.destroy({
transaction,
});
return revenues;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const revenues = await db.revenues.findOne({ where }, { transaction });
if (!revenues) {
return revenues;
}
const output = revenues.get({ plain: true });
output.companies = await revenues.getCompanies({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.companies,
as: 'companies',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.companies) {
const listItems = filter.companies.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companiesId: { [Op.or]: listItems },
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.revenues.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('revenues', 'id', query),
],
};
}
const records = await db.revenues.findAll({
attributes: ['id', 'id'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['id', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.id,
}));
}
};

343
backend/src/db/api/roles.js Normal file
View File

@ -0,0 +1,343 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const config = require('../../config');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class RolesDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const roles = await db.roles.create(
{
id: data.id || undefined,
name: data.name || null,
role_customization: data.role_customization || null,
globalAccess: data.globalAccess || false,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await roles.setPermissions(data.permissions || [], {
transaction,
});
return roles;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const rolesData = data.map((item, index) => ({
id: item.id || undefined,
name: item.name || null,
role_customization: item.role_customization || null,
globalAccess: item.globalAccess || false,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const roles = await db.roles.bulkCreate(rolesData, { transaction });
// For each item created, replace relation files
return roles;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const roles = await db.roles.findByPk(id, {}, { transaction });
const updatePayload = {};
if (data.name !== undefined) updatePayload.name = data.name;
if (data.role_customization !== undefined)
updatePayload.role_customization = data.role_customization;
if (data.globalAccess !== undefined)
updatePayload.globalAccess = data.globalAccess;
updatePayload.updatedById = currentUser.id;
await roles.update(updatePayload, { transaction });
if (data.permissions !== undefined) {
await roles.setPermissions(data.permissions, { transaction });
}
return roles;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const roles = await db.roles.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of roles) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of roles) {
await record.destroy({ transaction });
}
});
return roles;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const roles = await db.roles.findByPk(id, options);
await roles.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await roles.destroy({
transaction,
});
return roles;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const roles = await db.roles.findOne({ where }, { transaction });
if (!roles) {
return roles;
}
const output = roles.get({ plain: true });
output.users_app_role = await roles.getUsers_app_role({
transaction,
});
output.permissions = await roles.getPermissions({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.permissions,
as: 'permissions',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.name) {
where = {
...where,
[Op.and]: Utils.ilike('roles', 'name', filter.name),
};
}
if (filter.role_customization) {
where = {
...where,
[Op.and]: Utils.ilike(
'roles',
'role_customization',
filter.role_customization,
),
};
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.globalAccess) {
where = {
...where,
globalAccess: filter.globalAccess,
};
}
if (filter.permissions) {
const searchTerms = filter.permissions.split('|');
include = [
{
model: db.permissions,
as: 'permissions_filter',
required: searchTerms.length > 0,
where:
searchTerms.length > 0
? {
[Op.or]: [
{
id: {
[Op.in]: searchTerms.map((term) => Utils.uuid(term)),
},
},
{
name: {
[Op.or]: searchTerms.map((term) => ({
[Op.iLike]: `%${term}%`,
})),
},
},
],
}
: undefined,
},
...include,
];
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (!globalAccess) {
where = { name: { [Op.ne]: config.roles.super_admin } };
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.roles.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(query, limit, offset, globalAccess) {
let where = {};
if (!globalAccess) {
where = { name: { [Op.ne]: config.roles.super_admin } };
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('roles', 'name', query),
],
};
}
const records = await db.roles.findAll({
attributes: ['id', 'name'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['name', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.name,
}));
}
};

345
backend/src/db/api/sales.js Normal file
View File

@ -0,0 +1,345 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class SalesDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const sales = await db.sales.create(
{
id: data.id || undefined,
lead_name: data.lead_name || null,
status: data.status || null,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await sales.setCompany(data.company || null, {
transaction,
});
await sales.setCompanies(data.companies || null, {
transaction,
});
return sales;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const salesData = data.map((item, index) => ({
id: item.id || undefined,
lead_name: item.lead_name || null,
status: item.status || null,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const sales = await db.sales.bulkCreate(salesData, { transaction });
// For each item created, replace relation files
return sales;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const sales = await db.sales.findByPk(id, {}, { transaction });
const updatePayload = {};
if (data.lead_name !== undefined) updatePayload.lead_name = data.lead_name;
if (data.status !== undefined) updatePayload.status = data.status;
updatePayload.updatedById = currentUser.id;
await sales.update(updatePayload, { transaction });
if (data.company !== undefined) {
await sales.setCompany(
data.company,
{ transaction },
);
}
if (data.companies !== undefined) {
await sales.setCompanies(
data.companies,
{ transaction },
);
}
return sales;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const sales = await db.sales.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of sales) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of sales) {
await record.destroy({ transaction });
}
});
return sales;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const sales = await db.sales.findByPk(id, options);
await sales.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await sales.destroy({
transaction,
});
return sales;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const sales = await db.sales.findOne({ where }, { transaction });
if (!sales) {
return sales;
}
const output = sales.get({ plain: true });
output.company = await sales.getCompany({
transaction,
});
output.companies = await sales.getCompanies({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.companies,
as: 'company',
},
{
model: db.companies,
as: 'companies',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.lead_name) {
where = {
...where,
[Op.and]: Utils.ilike('sales', 'lead_name', filter.lead_name),
};
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.status) {
where = {
...where,
status: filter.status,
};
}
if (filter.company) {
const listItems = filter.company.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companyId: { [Op.or]: listItems },
};
}
if (filter.companies) {
const listItems = filter.companies.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companiesId: { [Op.or]: listItems },
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.sales.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('sales', 'lead_name', query),
],
};
}
const records = await db.sales.findAll({
attributes: ['id', 'lead_name'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['lead_name', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.lead_name,
}));
}
};

View File

@ -0,0 +1,293 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class ShipmentsDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const shipments = await db.shipments.create(
{
id: data.id || undefined,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await shipments.setCompanies(data.companies || null, {
transaction,
});
return shipments;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const shipmentsData = data.map((item, index) => ({
id: item.id || undefined,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const shipments = await db.shipments.bulkCreate(shipmentsData, {
transaction,
});
// For each item created, replace relation files
return shipments;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const shipments = await db.shipments.findByPk(id, {}, { transaction });
const updatePayload = {};
updatePayload.updatedById = currentUser.id;
await shipments.update(updatePayload, { transaction });
if (data.companies !== undefined) {
await shipments.setCompanies(
data.companies,
{ transaction },
);
}
return shipments;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const shipments = await db.shipments.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of shipments) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of shipments) {
await record.destroy({ transaction });
}
});
return shipments;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const shipments = await db.shipments.findByPk(id, options);
await shipments.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await shipments.destroy({
transaction,
});
return shipments;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const shipments = await db.shipments.findOne({ where }, { transaction });
if (!shipments) {
return shipments;
}
const output = shipments.get({ plain: true });
output.companies = await shipments.getCompanies({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.companies,
as: 'companies',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.companies) {
const listItems = filter.companies.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companiesId: { [Op.or]: listItems },
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.shipments.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('shipments', 'id', query),
],
};
}
const records = await db.shipments.findAll({
attributes: ['id', 'id'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['id', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.id,
}));
}
};

View File

@ -0,0 +1,397 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class SubscriptionsDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const subscriptions = await db.subscriptions.create(
{
id: data.id || undefined,
subscription_name: data.subscription_name || null,
renewal_date: data.renewal_date || null,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await subscriptions.setCompany(data.company || null, {
transaction,
});
await subscriptions.setCompanies(data.companies || null, {
transaction,
});
return subscriptions;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const subscriptionsData = data.map((item, index) => ({
id: item.id || undefined,
subscription_name: item.subscription_name || null,
renewal_date: item.renewal_date || null,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const subscriptions = await db.subscriptions.bulkCreate(subscriptionsData, {
transaction,
});
// For each item created, replace relation files
return subscriptions;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const subscriptions = await db.subscriptions.findByPk(
id,
{},
{ transaction },
);
const updatePayload = {};
if (data.subscription_name !== undefined)
updatePayload.subscription_name = data.subscription_name;
if (data.renewal_date !== undefined)
updatePayload.renewal_date = data.renewal_date;
updatePayload.updatedById = currentUser.id;
await subscriptions.update(updatePayload, { transaction });
if (data.company !== undefined) {
await subscriptions.setCompany(
data.company,
{ transaction },
);
}
if (data.companies !== undefined) {
await subscriptions.setCompanies(
data.companies,
{ transaction },
);
}
return subscriptions;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const subscriptions = await db.subscriptions.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of subscriptions) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of subscriptions) {
await record.destroy({ transaction });
}
});
return subscriptions;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const subscriptions = await db.subscriptions.findByPk(id, options);
await subscriptions.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await subscriptions.destroy({
transaction,
});
return subscriptions;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const subscriptions = await db.subscriptions.findOne(
{ where },
{ transaction },
);
if (!subscriptions) {
return subscriptions;
}
const output = subscriptions.get({ plain: true });
output.company = await subscriptions.getCompany({
transaction,
});
output.companies = await subscriptions.getCompanies({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.companies,
as: 'company',
},
{
model: db.companies,
as: 'companies',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.subscription_name) {
where = {
...where,
[Op.and]: Utils.ilike(
'subscriptions',
'subscription_name',
filter.subscription_name,
),
};
}
if (filter.calendarStart && filter.calendarEnd) {
where = {
...where,
[Op.or]: [
{
renewal_date: {
[Op.between]: [filter.calendarStart, filter.calendarEnd],
},
},
{
renewal_date: {
[Op.between]: [filter.calendarStart, filter.calendarEnd],
},
},
],
};
}
if (filter.renewal_dateRange) {
const [start, end] = filter.renewal_dateRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
renewal_date: {
...where.renewal_date,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
renewal_date: {
...where.renewal_date,
[Op.lte]: end,
},
};
}
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.company) {
const listItems = filter.company.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companyId: { [Op.or]: listItems },
};
}
if (filter.companies) {
const listItems = filter.companies.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companiesId: { [Op.or]: listItems },
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.subscriptions.findAndCountAll(
queryOptions,
);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('subscriptions', 'subscription_name', query),
],
};
}
const records = await db.subscriptions.findAll({
attributes: ['id', 'subscription_name'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['subscription_name', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.subscription_name,
}));
}
};

291
backend/src/db/api/taxes.js Normal file
View File

@ -0,0 +1,291 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class TaxesDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const taxes = await db.taxes.create(
{
id: data.id || undefined,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await taxes.setCompanies(data.companies || null, {
transaction,
});
return taxes;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const taxesData = data.map((item, index) => ({
id: item.id || undefined,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const taxes = await db.taxes.bulkCreate(taxesData, { transaction });
// For each item created, replace relation files
return taxes;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const taxes = await db.taxes.findByPk(id, {}, { transaction });
const updatePayload = {};
updatePayload.updatedById = currentUser.id;
await taxes.update(updatePayload, { transaction });
if (data.companies !== undefined) {
await taxes.setCompanies(
data.companies,
{ transaction },
);
}
return taxes;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const taxes = await db.taxes.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of taxes) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of taxes) {
await record.destroy({ transaction });
}
});
return taxes;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const taxes = await db.taxes.findByPk(id, options);
await taxes.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await taxes.destroy({
transaction,
});
return taxes;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const taxes = await db.taxes.findOne({ where }, { transaction });
if (!taxes) {
return taxes;
}
const output = taxes.get({ plain: true });
output.companies = await taxes.getCompanies({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.companies,
as: 'companies',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.companies) {
const listItems = filter.companies.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companiesId: { [Op.or]: listItems },
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.taxes.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('taxes', 'id', query),
],
};
}
const records = await db.taxes.findAll({
attributes: ['id', 'id'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['id', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.id,
}));
}
};

View File

@ -0,0 +1,291 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class TicketsDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const tickets = await db.tickets.create(
{
id: data.id || undefined,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await tickets.setCompanies(data.companies || null, {
transaction,
});
return tickets;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const ticketsData = data.map((item, index) => ({
id: item.id || undefined,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const tickets = await db.tickets.bulkCreate(ticketsData, { transaction });
// For each item created, replace relation files
return tickets;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const tickets = await db.tickets.findByPk(id, {}, { transaction });
const updatePayload = {};
updatePayload.updatedById = currentUser.id;
await tickets.update(updatePayload, { transaction });
if (data.companies !== undefined) {
await tickets.setCompanies(
data.companies,
{ transaction },
);
}
return tickets;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const tickets = await db.tickets.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of tickets) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of tickets) {
await record.destroy({ transaction });
}
});
return tickets;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const tickets = await db.tickets.findByPk(id, options);
await tickets.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await tickets.destroy({
transaction,
});
return tickets;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const tickets = await db.tickets.findOne({ where }, { transaction });
if (!tickets) {
return tickets;
}
const output = tickets.get({ plain: true });
output.companies = await tickets.getCompanies({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.companies,
as: 'companies',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.companies) {
const listItems = filter.companies.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companiesId: { [Op.or]: listItems },
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.tickets.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('tickets', 'id', query),
],
};
}
const records = await db.tickets.findAll({
attributes: ['id', 'id'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['id', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.id,
}));
}
};

799
backend/src/db/api/users.js Normal file
View File

@ -0,0 +1,799 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const bcrypt = require('bcrypt');
const config = require('../../config');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class UsersDBApi {
static async create(data, globalAccess, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const users = await db.users.create(
{
id: data.data.id || undefined,
firstName: data.data.firstName || null,
lastName: data.data.lastName || null,
phoneNumber: data.data.phoneNumber || null,
email: data.data.email || null,
disabled: data.data.disabled || false,
password: data.data.password || null,
emailVerified: data.data.emailVerified || true,
emailVerificationToken: data.data.emailVerificationToken || null,
emailVerificationTokenExpiresAt:
data.data.emailVerificationTokenExpiresAt || null,
passwordResetToken: data.data.passwordResetToken || null,
passwordResetTokenExpiresAt:
data.data.passwordResetTokenExpiresAt || null,
provider: data.data.provider || null,
importHash: data.data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
if (!data.data.app_role) {
const role = await db.roles.findOne({
where: { name: 'User' },
});
if (role) {
await users.setApp_role(role, {
transaction,
});
}
} else {
await users.setApp_role(data.data.app_role || null, {
transaction,
});
}
await users.setCompanies(data.data.companies || null, {
transaction,
});
await users.setCustom_permissions(data.data.custom_permissions || [], {
transaction,
});
await FileDBApi.replaceRelationFiles(
{
belongsTo: db.users.getTableName(),
belongsToColumn: 'avatar',
belongsToId: users.id,
},
data.data.avatar,
options,
);
return users;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const usersData = data.map((item, index) => ({
id: item.id || undefined,
firstName: item.firstName || null,
lastName: item.lastName || null,
phoneNumber: item.phoneNumber || null,
email: item.email || null,
disabled: item.disabled || false,
password: item.password || null,
emailVerified: item.emailVerified || false,
emailVerificationToken: item.emailVerificationToken || null,
emailVerificationTokenExpiresAt:
item.emailVerificationTokenExpiresAt || null,
passwordResetToken: item.passwordResetToken || null,
passwordResetTokenExpiresAt: item.passwordResetTokenExpiresAt || null,
provider: item.provider || null,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const users = await db.users.bulkCreate(usersData, { transaction });
// For each item created, replace relation files
for (let i = 0; i < users.length; i++) {
await FileDBApi.replaceRelationFiles(
{
belongsTo: db.users.getTableName(),
belongsToColumn: 'avatar',
belongsToId: users[i].id,
},
data[i].avatar,
options,
);
}
return users;
}
static async update(id, data, globalAccess, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const users = await db.users.findByPk(id, {}, { transaction });
if (!data?.app_role) {
data.app_role = users?.app_role?.id;
}
if (!data?.custom_permissions) {
data.custom_permissions = users?.custom_permissions?.map(
(item) => item.id,
);
}
if (data.password) {
data.password = bcrypt.hashSync(data.password, config.bcrypt.saltRounds);
} else {
data.password = users.password;
}
const updatePayload = {};
if (data.firstName !== undefined) updatePayload.firstName = data.firstName;
if (data.lastName !== undefined) updatePayload.lastName = data.lastName;
if (data.phoneNumber !== undefined)
updatePayload.phoneNumber = data.phoneNumber;
if (data.email !== undefined) updatePayload.email = data.email;
if (data.disabled !== undefined) updatePayload.disabled = data.disabled;
if (data.password !== undefined) updatePayload.password = data.password;
if (data.emailVerified !== undefined)
updatePayload.emailVerified = data.emailVerified;
else updatePayload.emailVerified = true;
if (data.emailVerificationToken !== undefined)
updatePayload.emailVerificationToken = data.emailVerificationToken;
if (data.emailVerificationTokenExpiresAt !== undefined)
updatePayload.emailVerificationTokenExpiresAt =
data.emailVerificationTokenExpiresAt;
if (data.passwordResetToken !== undefined)
updatePayload.passwordResetToken = data.passwordResetToken;
if (data.passwordResetTokenExpiresAt !== undefined)
updatePayload.passwordResetTokenExpiresAt =
data.passwordResetTokenExpiresAt;
if (data.provider !== undefined) updatePayload.provider = data.provider;
updatePayload.updatedById = currentUser.id;
await users.update(updatePayload, { transaction });
if (data.app_role !== undefined) {
await users.setApp_role(
data.app_role,
{ transaction },
);
}
if (data.companies !== undefined) {
await users.setCompanies(
data.companies,
{ transaction },
);
}
if (data.custom_permissions !== undefined) {
await users.setCustom_permissions(data.custom_permissions, {
transaction,
});
}
await FileDBApi.replaceRelationFiles(
{
belongsTo: db.users.getTableName(),
belongsToColumn: 'avatar',
belongsToId: users.id,
},
data.avatar,
options,
);
return users;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const users = await db.users.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of users) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of users) {
await record.destroy({ transaction });
}
});
return users;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const users = await db.users.findByPk(id, options);
await users.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await users.destroy({
transaction,
});
return users;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const users = await db.users.findOne({ where }, { transaction });
if (!users) {
return users;
}
const output = users.get({ plain: true });
output.avatar = await users.getAvatar({
transaction,
});
output.app_role = await users.getApp_role({
transaction,
});
if (output.app_role) {
output.app_role_permissions = await output.app_role.getPermissions({
transaction,
});
}
output.custom_permissions = await users.getCustom_permissions({
transaction,
});
output.companies = await users.getCompanies({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.roles,
as: 'app_role',
where: filter.app_role
? {
[Op.or]: [
{
id: {
[Op.in]: filter.app_role
.split('|')
.map((term) => Utils.uuid(term)),
},
},
{
name: {
[Op.or]: filter.app_role
.split('|')
.map((term) => ({ [Op.iLike]: `%${term}%` })),
},
},
],
}
: {},
},
{
model: db.companies,
as: 'companies',
},
{
model: db.permissions,
as: 'custom_permissions',
},
{
model: db.file,
as: 'avatar',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.firstName) {
where = {
...where,
[Op.and]: Utils.ilike('users', 'firstName', filter.firstName),
};
}
if (filter.lastName) {
where = {
...where,
[Op.and]: Utils.ilike('users', 'lastName', filter.lastName),
};
}
if (filter.phoneNumber) {
where = {
...where,
[Op.and]: Utils.ilike('users', 'phoneNumber', filter.phoneNumber),
};
}
if (filter.email) {
where = {
...where,
[Op.and]: Utils.ilike('users', 'email', filter.email),
};
}
if (filter.password) {
where = {
...where,
[Op.and]: Utils.ilike('users', 'password', filter.password),
};
}
if (filter.emailVerificationToken) {
where = {
...where,
[Op.and]: Utils.ilike(
'users',
'emailVerificationToken',
filter.emailVerificationToken,
),
};
}
if (filter.passwordResetToken) {
where = {
...where,
[Op.and]: Utils.ilike(
'users',
'passwordResetToken',
filter.passwordResetToken,
),
};
}
if (filter.provider) {
where = {
...where,
[Op.and]: Utils.ilike('users', 'provider', filter.provider),
};
}
if (filter.emailVerificationTokenExpiresAtRange) {
const [start, end] = filter.emailVerificationTokenExpiresAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
emailVerificationTokenExpiresAt: {
...where.emailVerificationTokenExpiresAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
emailVerificationTokenExpiresAt: {
...where.emailVerificationTokenExpiresAt,
[Op.lte]: end,
},
};
}
}
if (filter.passwordResetTokenExpiresAtRange) {
const [start, end] = filter.passwordResetTokenExpiresAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
passwordResetTokenExpiresAt: {
...where.passwordResetTokenExpiresAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
passwordResetTokenExpiresAt: {
...where.passwordResetTokenExpiresAt,
[Op.lte]: end,
},
};
}
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.disabled) {
where = {
...where,
disabled: filter.disabled,
};
}
if (filter.emailVerified) {
where = {
...where,
emailVerified: filter.emailVerified,
};
}
if (filter.companies) {
const listItems = filter.companies.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companiesId: { [Op.or]: listItems },
};
}
if (filter.custom_permissions) {
const searchTerms = filter.custom_permissions.split('|');
include = [
{
model: db.permissions,
as: 'custom_permissions_filter',
required: searchTerms.length > 0,
where:
searchTerms.length > 0
? {
[Op.or]: [
{
id: {
[Op.in]: searchTerms.map((term) => Utils.uuid(term)),
},
},
{
name: {
[Op.or]: searchTerms.map((term) => ({
[Op.iLike]: `%${term}%`,
})),
},
},
],
}
: undefined,
},
...include,
];
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.users.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('users', 'firstName', query),
],
};
}
const records = await db.users.findAll({
attributes: ['id', 'firstName'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['firstName', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.firstName,
}));
}
static async createFromAuth(data, options) {
const transaction = (options && options.transaction) || undefined;
const users = await db.users.create(
{
email: data.email,
firstName: data.firstName,
authenticationUid: data.authenticationUid,
password: data.password,
organizationId: data.organizationId,
},
{ transaction },
);
const app_role = await db.roles.findOne({
where: { name: 'User' },
});
if (app_role?.id) {
await users.setApp_role(app_role?.id || null, {
transaction,
});
}
await users.update(
{
authenticationUid: users.id,
},
{ transaction },
);
delete users.password;
return users;
}
static async updatePassword(id, password, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const users = await db.users.findByPk(id, {
transaction,
});
await users.update(
{
password,
authenticationUid: id,
updatedById: currentUser.id,
},
{ transaction },
);
return users;
}
static async generateEmailVerificationToken(email, options) {
return this._generateToken(
['emailVerificationToken', 'emailVerificationTokenExpiresAt'],
email,
options,
);
}
static async generatePasswordResetToken(email, options) {
return this._generateToken(
['passwordResetToken', 'passwordResetTokenExpiresAt'],
email,
options,
);
}
static async findByPasswordResetToken(token, options) {
const transaction = (options && options.transaction) || undefined;
return db.users.findOne(
{
where: {
passwordResetToken: token,
passwordResetTokenExpiresAt: {
[db.Sequelize.Op.gt]: Date.now(),
},
},
},
{ transaction },
);
}
static async findByEmailVerificationToken(token, options) {
const transaction = (options && options.transaction) || undefined;
return db.users.findOne(
{
where: {
emailVerificationToken: token,
emailVerificationTokenExpiresAt: {
[db.Sequelize.Op.gt]: Date.now(),
},
},
},
{ transaction },
);
}
static async markEmailVerified(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const users = await db.users.findByPk(id, {
transaction,
});
await users.update(
{
emailVerified: true,
updatedById: currentUser.id,
},
{ transaction },
);
return true;
}
static async _generateToken(keyNames, email, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const users = await db.users.findOne(
{
where: { email: email.toLowerCase() },
},
{
transaction,
},
);
const token = crypto.randomBytes(20).toString('hex');
const tokenExpiresAt = Date.now() + 360000;
if (users) {
await users.update(
{
[keyNames[0]]: token,
[keyNames[1]]: tokenExpiresAt,
updatedById: currentUser.id,
},
{ transaction },
);
}
return token;
}
};

View File

@ -0,0 +1,293 @@
const db = require('../models');
const FileDBApi = require('./file');
const crypto = require('crypto');
const Utils = require('../utils');
const Sequelize = db.Sequelize;
const Op = Sequelize.Op;
module.exports = class WarehousesDBApi {
static async create(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const warehouses = await db.warehouses.create(
{
id: data.id || undefined,
importHash: data.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
},
{ transaction },
);
await warehouses.setCompanies(data.companies || null, {
transaction,
});
return warehouses;
}
static async bulkImport(data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
// Prepare data - wrapping individual data transformations in a map() method
const warehousesData = data.map((item, index) => ({
id: item.id || undefined,
importHash: item.importHash || null,
createdById: currentUser.id,
updatedById: currentUser.id,
createdAt: new Date(Date.now() + index * 1000),
}));
// Bulk create items
const warehouses = await db.warehouses.bulkCreate(warehousesData, {
transaction,
});
// For each item created, replace relation files
return warehouses;
}
static async update(id, data, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const globalAccess = currentUser.app_role?.globalAccess;
const warehouses = await db.warehouses.findByPk(id, {}, { transaction });
const updatePayload = {};
updatePayload.updatedById = currentUser.id;
await warehouses.update(updatePayload, { transaction });
if (data.companies !== undefined) {
await warehouses.setCompanies(
data.companies,
{ transaction },
);
}
return warehouses;
}
static async deleteByIds(ids, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const warehouses = await db.warehouses.findAll({
where: {
id: {
[Op.in]: ids,
},
},
transaction,
});
await db.sequelize.transaction(async (transaction) => {
for (const record of warehouses) {
await record.update({ deletedBy: currentUser.id }, { transaction });
}
for (const record of warehouses) {
await record.destroy({ transaction });
}
});
return warehouses;
}
static async remove(id, options) {
const currentUser = (options && options.currentUser) || { id: null };
const transaction = (options && options.transaction) || undefined;
const warehouses = await db.warehouses.findByPk(id, options);
await warehouses.update(
{
deletedBy: currentUser.id,
},
{
transaction,
},
);
await warehouses.destroy({
transaction,
});
return warehouses;
}
static async findBy(where, options) {
const transaction = (options && options.transaction) || undefined;
const warehouses = await db.warehouses.findOne({ where }, { transaction });
if (!warehouses) {
return warehouses;
}
const output = warehouses.get({ plain: true });
output.companies = await warehouses.getCompanies({
transaction,
});
return output;
}
static async findAll(filter, globalAccess, options) {
const limit = filter.limit || 0;
let offset = 0;
let where = {};
const currentPage = +filter.page;
const user = (options && options.currentUser) || null;
const userCompanies = (user && user.companies?.id) || null;
if (userCompanies) {
if (options?.currentUser?.companiesId) {
where.companiesId = options.currentUser.companiesId;
}
}
offset = currentPage * limit;
const orderBy = null;
const transaction = (options && options.transaction) || undefined;
let include = [
{
model: db.companies,
as: 'companies',
},
];
if (filter) {
if (filter.id) {
where = {
...where,
['id']: Utils.uuid(filter.id),
};
}
if (filter.active !== undefined) {
where = {
...where,
active: filter.active === true || filter.active === 'true',
};
}
if (filter.companies) {
const listItems = filter.companies.split('|').map((item) => {
return Utils.uuid(item);
});
where = {
...where,
companiesId: { [Op.or]: listItems },
};
}
if (filter.createdAtRange) {
const [start, end] = filter.createdAtRange;
if (start !== undefined && start !== null && start !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.gte]: start,
},
};
}
if (end !== undefined && end !== null && end !== '') {
where = {
...where,
['createdAt']: {
...where.createdAt,
[Op.lte]: end,
},
};
}
}
}
if (globalAccess) {
delete where.companiesId;
}
const queryOptions = {
where,
include,
distinct: true,
order:
filter.field && filter.sort
? [[filter.field, filter.sort]]
: [['createdAt', 'desc']],
transaction: options?.transaction,
logging: console.log,
};
if (!options?.countOnly) {
queryOptions.limit = limit ? Number(limit) : undefined;
queryOptions.offset = offset ? Number(offset) : undefined;
}
try {
const { rows, count } = await db.warehouses.findAndCountAll(queryOptions);
return {
rows: options?.countOnly ? [] : rows,
count: count,
};
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
static async findAllAutocomplete(
query,
limit,
offset,
globalAccess,
organizationId,
) {
let where = {};
if (!globalAccess && organizationId) {
where.organizationId = organizationId;
}
if (query) {
where = {
[Op.or]: [
{ ['id']: Utils.uuid(query) },
Utils.ilike('warehouses', 'id', query),
],
};
}
const records = await db.warehouses.findAll({
attributes: ['id', 'id'],
where,
limit: limit ? Number(limit) : undefined,
offset: offset ? Number(offset) : undefined,
orderBy: [['id', 'ASC']],
});
return records.map((record) => ({
id: record.id,
label: record.id,
}));
}
};

View File

@ -0,0 +1,31 @@
module.exports = {
production: {
dialect: 'postgres',
username: process.env.DB_USER,
password: process.env.DB_PASS,
database: process.env.DB_NAME,
host: process.env.DB_HOST,
port: process.env.DB_PORT,
logging: console.log,
seederStorage: 'sequelize',
},
development: {
username: 'postgres',
dialect: 'postgres',
password: '',
database: 'db_susatechnology',
host: process.env.DB_HOST || 'localhost',
logging: console.log,
seederStorage: 'sequelize',
},
dev_stage: {
dialect: 'postgres',
username: process.env.DB_USER,
password: process.env.DB_PASS,
database: process.env.DB_NAME,
host: process.env.DB_HOST,
port: process.env.DB_PORT,
logging: console.log,
seederStorage: 'sequelize',
},
};

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,69 @@
const config = require('../../config');
const providers = config.providers;
const crypto = require('crypto');
const bcrypt = require('bcrypt');
const moment = require('moment');
module.exports = function (sequelize, DataTypes) {
const assets = sequelize.define(
'assets',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
asset_name: {
type: DataTypes.TEXT,
},
value: {
type: DataTypes.DECIMAL,
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
},
);
assets.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.assets.belongsTo(db.companies, {
as: 'company',
foreignKey: {
name: 'companyId',
},
constraints: false,
});
db.assets.belongsTo(db.companies, {
as: 'companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.assets.belongsTo(db.users, {
as: 'createdBy',
});
db.assets.belongsTo(db.users, {
as: 'updatedBy',
});
};
return assets;
};

View File

@ -0,0 +1,53 @@
const config = require('../../config');
const providers = config.providers;
const crypto = require('crypto');
const bcrypt = require('bcrypt');
const moment = require('moment');
module.exports = function (sequelize, DataTypes) {
const attendance = sequelize.define(
'attendance',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
},
);
attendance.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.attendance.belongsTo(db.companies, {
as: 'companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.attendance.belongsTo(db.users, {
as: 'createdBy',
});
db.attendance.belongsTo(db.users, {
as: 'updatedBy',
});
};
return attendance;
};

View File

@ -0,0 +1,53 @@
const config = require('../../config');
const providers = config.providers;
const crypto = require('crypto');
const bcrypt = require('bcrypt');
const moment = require('moment');
module.exports = function (sequelize, DataTypes) {
const audit_logs = sequelize.define(
'audit_logs',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
},
);
audit_logs.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.audit_logs.belongsTo(db.companies, {
as: 'companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.audit_logs.belongsTo(db.users, {
as: 'createdBy',
});
db.audit_logs.belongsTo(db.users, {
as: 'updatedBy',
});
};
return audit_logs;
};

View File

@ -0,0 +1,53 @@
const config = require('../../config');
const providers = config.providers;
const crypto = require('crypto');
const bcrypt = require('bcrypt');
const moment = require('moment');
module.exports = function (sequelize, DataTypes) {
const budgets = sequelize.define(
'budgets',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
},
);
budgets.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.budgets.belongsTo(db.companies, {
as: 'companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.budgets.belongsTo(db.users, {
as: 'createdBy',
});
db.budgets.belongsTo(db.users, {
as: 'updatedBy',
});
};
return budgets;
};

View File

@ -0,0 +1,345 @@
const config = require('../../config');
const providers = config.providers;
const crypto = require('crypto');
const bcrypt = require('bcrypt');
const moment = require('moment');
module.exports = function (sequelize, DataTypes) {
const companies = sequelize.define(
'companies',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
name: {
type: DataTypes.TEXT,
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
},
);
companies.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
db.companies.hasMany(db.users, {
as: 'users_companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.companies.hasMany(db.assets, {
as: 'assets_company',
foreignKey: {
name: 'companyId',
},
constraints: false,
});
db.companies.hasMany(db.assets, {
as: 'assets_companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.companies.hasMany(db.employees, {
as: 'employees_company',
foreignKey: {
name: 'companyId',
},
constraints: false,
});
db.companies.hasMany(db.employees, {
as: 'employees_companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.companies.hasMany(db.financial_records, {
as: 'financial_records_company',
foreignKey: {
name: 'companyId',
},
constraints: false,
});
db.companies.hasMany(db.financial_records, {
as: 'financial_records_companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.companies.hasMany(db.inventories, {
as: 'inventories_company',
foreignKey: {
name: 'companyId',
},
constraints: false,
});
db.companies.hasMany(db.inventories, {
as: 'inventories_companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.companies.hasMany(db.manufacturing_orders, {
as: 'manufacturing_orders_company',
foreignKey: {
name: 'companyId',
},
constraints: false,
});
db.companies.hasMany(db.manufacturing_orders, {
as: 'manufacturing_orders_companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.companies.hasMany(db.procurements, {
as: 'procurements_company',
foreignKey: {
name: 'companyId',
},
constraints: false,
});
db.companies.hasMany(db.procurements, {
as: 'procurements_companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.companies.hasMany(db.projects, {
as: 'projects_company',
foreignKey: {
name: 'companyId',
},
constraints: false,
});
db.companies.hasMany(db.projects, {
as: 'projects_companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.companies.hasMany(db.sales, {
as: 'sales_company',
foreignKey: {
name: 'companyId',
},
constraints: false,
});
db.companies.hasMany(db.sales, {
as: 'sales_companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.companies.hasMany(db.subscriptions, {
as: 'subscriptions_company',
foreignKey: {
name: 'companyId',
},
constraints: false,
});
db.companies.hasMany(db.subscriptions, {
as: 'subscriptions_companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.companies.hasMany(db.invoices, {
as: 'invoices_companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.companies.hasMany(db.payments, {
as: 'payments_companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.companies.hasMany(db.contracts, {
as: 'contracts_companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.companies.hasMany(db.shipments, {
as: 'shipments_companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.companies.hasMany(db.warehouses, {
as: 'warehouses_companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.companies.hasMany(db.payrolls, {
as: 'payrolls_companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.companies.hasMany(db.attendance, {
as: 'attendance_companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.companies.hasMany(db.leave_requests, {
as: 'leave_requests_companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.companies.hasMany(db.recruitment, {
as: 'recruitment_companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.companies.hasMany(db.budgets, {
as: 'budgets_companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.companies.hasMany(db.taxes, {
as: 'taxes_companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.companies.hasMany(db.expenses, {
as: 'expenses_companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.companies.hasMany(db.revenues, {
as: 'revenues_companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.companies.hasMany(db.leads, {
as: 'leads_companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.companies.hasMany(db.tickets, {
as: 'tickets_companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.companies.hasMany(db.marketing_campaigns, {
as: 'marketing_campaigns_companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.companies.hasMany(db.audit_logs, {
as: 'audit_logs_companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.companies.hasMany(db.notifications, {
as: 'notifications_companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
//end loop
db.companies.belongsTo(db.users, {
as: 'createdBy',
});
db.companies.belongsTo(db.users, {
as: 'updatedBy',
});
};
return companies;
};

View File

@ -0,0 +1,53 @@
const config = require('../../config');
const providers = config.providers;
const crypto = require('crypto');
const bcrypt = require('bcrypt');
const moment = require('moment');
module.exports = function (sequelize, DataTypes) {
const contracts = sequelize.define(
'contracts',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
},
);
contracts.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.contracts.belongsTo(db.companies, {
as: 'companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.contracts.belongsTo(db.users, {
as: 'createdBy',
});
db.contracts.belongsTo(db.users, {
as: 'updatedBy',
});
};
return contracts;
};

View File

@ -0,0 +1,65 @@
const config = require('../../config');
const providers = config.providers;
const crypto = require('crypto');
const bcrypt = require('bcrypt');
const moment = require('moment');
module.exports = function (sequelize, DataTypes) {
const employees = sequelize.define(
'employees',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
full_name: {
type: DataTypes.TEXT,
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
},
);
employees.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.employees.belongsTo(db.companies, {
as: 'company',
foreignKey: {
name: 'companyId',
},
constraints: false,
});
db.employees.belongsTo(db.companies, {
as: 'companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.employees.belongsTo(db.users, {
as: 'createdBy',
});
db.employees.belongsTo(db.users, {
as: 'updatedBy',
});
};
return employees;
};

View File

@ -0,0 +1,53 @@
const config = require('../../config');
const providers = config.providers;
const crypto = require('crypto');
const bcrypt = require('bcrypt');
const moment = require('moment');
module.exports = function (sequelize, DataTypes) {
const expenses = sequelize.define(
'expenses',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
},
);
expenses.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.expenses.belongsTo(db.companies, {
as: 'companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.expenses.belongsTo(db.users, {
as: 'createdBy',
});
db.expenses.belongsTo(db.users, {
as: 'updatedBy',
});
};
return expenses;
};

View File

@ -0,0 +1,53 @@
module.exports = function (sequelize, DataTypes) {
const file = sequelize.define(
'file',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
belongsTo: DataTypes.STRING(255),
belongsToId: DataTypes.UUID,
belongsToColumn: DataTypes.STRING(255),
name: {
type: DataTypes.STRING(2083),
allowNull: false,
validate: {
notEmpty: true,
},
},
sizeInBytes: {
type: DataTypes.INTEGER,
allowNull: true,
},
privateUrl: {
type: DataTypes.STRING(2083),
allowNull: true,
},
publicUrl: {
type: DataTypes.STRING(2083),
allowNull: false,
validate: {
notEmpty: true,
},
},
},
{
timestamps: true,
paranoid: true,
},
);
file.associate = (db) => {
db.file.belongsTo(db.users, {
as: 'createdBy',
});
db.file.belongsTo(db.users, {
as: 'updatedBy',
});
};
return file;
};

View File

@ -0,0 +1,73 @@
const config = require('../../config');
const providers = config.providers;
const crypto = require('crypto');
const bcrypt = require('bcrypt');
const moment = require('moment');
module.exports = function (sequelize, DataTypes) {
const financial_records = sequelize.define(
'financial_records',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
record_type: {
type: DataTypes.TEXT,
},
amount: {
type: DataTypes.DECIMAL,
},
date: {
type: DataTypes.DATE,
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
},
);
financial_records.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.financial_records.belongsTo(db.companies, {
as: 'company',
foreignKey: {
name: 'companyId',
},
constraints: false,
});
db.financial_records.belongsTo(db.companies, {
as: 'companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.financial_records.belongsTo(db.users, {
as: 'createdBy',
});
db.financial_records.belongsTo(db.users, {
as: 'updatedBy',
});
};
return financial_records;
};

View File

@ -0,0 +1,47 @@
'use strict';
const fs = require('fs');
const path = require('path');
const Sequelize = require('sequelize');
const basename = path.basename(__filename);
const env = process.env.NODE_ENV || 'development';
const config = require('../db.config')[env];
const db = {};
let sequelize;
console.log(env);
if (config.use_env_variable) {
sequelize = new Sequelize(process.env[config.use_env_variable], config);
} else {
sequelize = new Sequelize(
config.database,
config.username,
config.password,
config,
);
}
fs.readdirSync(__dirname)
.filter((file) => {
return (
file.indexOf('.') !== 0 && file !== basename && file.slice(-3) === '.js'
);
})
.forEach((file) => {
const model = require(path.join(__dirname, file))(
sequelize,
Sequelize.DataTypes,
);
db[model.name] = model;
});
Object.keys(db).forEach((modelName) => {
if (db[modelName].associate) {
db[modelName].associate(db);
}
});
db.sequelize = sequelize;
db.Sequelize = Sequelize;
module.exports = db;

View File

@ -0,0 +1,69 @@
const config = require('../../config');
const providers = config.providers;
const crypto = require('crypto');
const bcrypt = require('bcrypt');
const moment = require('moment');
module.exports = function (sequelize, DataTypes) {
const inventories = sequelize.define(
'inventories',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
item_name: {
type: DataTypes.TEXT,
},
quantity: {
type: DataTypes.INTEGER,
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
},
);
inventories.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.inventories.belongsTo(db.companies, {
as: 'company',
foreignKey: {
name: 'companyId',
},
constraints: false,
});
db.inventories.belongsTo(db.companies, {
as: 'companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.inventories.belongsTo(db.users, {
as: 'createdBy',
});
db.inventories.belongsTo(db.users, {
as: 'updatedBy',
});
};
return inventories;
};

View File

@ -0,0 +1,53 @@
const config = require('../../config');
const providers = config.providers;
const crypto = require('crypto');
const bcrypt = require('bcrypt');
const moment = require('moment');
module.exports = function (sequelize, DataTypes) {
const invoices = sequelize.define(
'invoices',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
},
);
invoices.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.invoices.belongsTo(db.companies, {
as: 'companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.invoices.belongsTo(db.users, {
as: 'createdBy',
});
db.invoices.belongsTo(db.users, {
as: 'updatedBy',
});
};
return invoices;
};

View File

@ -0,0 +1,53 @@
const config = require('../../config');
const providers = config.providers;
const crypto = require('crypto');
const bcrypt = require('bcrypt');
const moment = require('moment');
module.exports = function (sequelize, DataTypes) {
const leads = sequelize.define(
'leads',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
},
);
leads.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.leads.belongsTo(db.companies, {
as: 'companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.leads.belongsTo(db.users, {
as: 'createdBy',
});
db.leads.belongsTo(db.users, {
as: 'updatedBy',
});
};
return leads;
};

View File

@ -0,0 +1,53 @@
const config = require('../../config');
const providers = config.providers;
const crypto = require('crypto');
const bcrypt = require('bcrypt');
const moment = require('moment');
module.exports = function (sequelize, DataTypes) {
const leave_requests = sequelize.define(
'leave_requests',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
},
);
leave_requests.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.leave_requests.belongsTo(db.companies, {
as: 'companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.leave_requests.belongsTo(db.users, {
as: 'createdBy',
});
db.leave_requests.belongsTo(db.users, {
as: 'updatedBy',
});
};
return leave_requests;
};

View File

@ -0,0 +1,69 @@
const config = require('../../config');
const providers = config.providers;
const crypto = require('crypto');
const bcrypt = require('bcrypt');
const moment = require('moment');
module.exports = function (sequelize, DataTypes) {
const manufacturing_orders = sequelize.define(
'manufacturing_orders',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
order_name: {
type: DataTypes.TEXT,
},
scheduled_date: {
type: DataTypes.DATE,
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
},
);
manufacturing_orders.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.manufacturing_orders.belongsTo(db.companies, {
as: 'company',
foreignKey: {
name: 'companyId',
},
constraints: false,
});
db.manufacturing_orders.belongsTo(db.companies, {
as: 'companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.manufacturing_orders.belongsTo(db.users, {
as: 'createdBy',
});
db.manufacturing_orders.belongsTo(db.users, {
as: 'updatedBy',
});
};
return manufacturing_orders;
};

View File

@ -0,0 +1,53 @@
const config = require('../../config');
const providers = config.providers;
const crypto = require('crypto');
const bcrypt = require('bcrypt');
const moment = require('moment');
module.exports = function (sequelize, DataTypes) {
const marketing_campaigns = sequelize.define(
'marketing_campaigns',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
},
);
marketing_campaigns.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.marketing_campaigns.belongsTo(db.companies, {
as: 'companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.marketing_campaigns.belongsTo(db.users, {
as: 'createdBy',
});
db.marketing_campaigns.belongsTo(db.users, {
as: 'updatedBy',
});
};
return marketing_campaigns;
};

View File

@ -0,0 +1,53 @@
const config = require('../../config');
const providers = config.providers;
const crypto = require('crypto');
const bcrypt = require('bcrypt');
const moment = require('moment');
module.exports = function (sequelize, DataTypes) {
const notifications = sequelize.define(
'notifications',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
},
);
notifications.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.notifications.belongsTo(db.companies, {
as: 'companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.notifications.belongsTo(db.users, {
as: 'createdBy',
});
db.notifications.belongsTo(db.users, {
as: 'updatedBy',
});
};
return notifications;
};

View File

@ -0,0 +1,53 @@
const config = require('../../config');
const providers = config.providers;
const crypto = require('crypto');
const bcrypt = require('bcrypt');
const moment = require('moment');
module.exports = function (sequelize, DataTypes) {
const payments = sequelize.define(
'payments',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
},
);
payments.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.payments.belongsTo(db.companies, {
as: 'companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.payments.belongsTo(db.users, {
as: 'createdBy',
});
db.payments.belongsTo(db.users, {
as: 'updatedBy',
});
};
return payments;
};

View File

@ -0,0 +1,53 @@
const config = require('../../config');
const providers = config.providers;
const crypto = require('crypto');
const bcrypt = require('bcrypt');
const moment = require('moment');
module.exports = function (sequelize, DataTypes) {
const payrolls = sequelize.define(
'payrolls',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
},
);
payrolls.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.payrolls.belongsTo(db.companies, {
as: 'companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.payrolls.belongsTo(db.users, {
as: 'createdBy',
});
db.payrolls.belongsTo(db.users, {
as: 'updatedBy',
});
};
return payrolls;
};

View File

@ -0,0 +1,49 @@
const config = require('../../config');
const providers = config.providers;
const crypto = require('crypto');
const bcrypt = require('bcrypt');
const moment = require('moment');
module.exports = function (sequelize, DataTypes) {
const permissions = sequelize.define(
'permissions',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
name: {
type: DataTypes.TEXT,
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
},
);
permissions.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.permissions.belongsTo(db.users, {
as: 'createdBy',
});
db.permissions.belongsTo(db.users, {
as: 'updatedBy',
});
};
return permissions;
};

View File

@ -0,0 +1,71 @@
const config = require('../../config');
const providers = config.providers;
const crypto = require('crypto');
const bcrypt = require('bcrypt');
const moment = require('moment');
module.exports = function (sequelize, DataTypes) {
const procurements = sequelize.define(
'procurements',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
request_name: {
type: DataTypes.TEXT,
},
approval_status: {
type: DataTypes.ENUM,
values: ['Pending', 'Approved', 'Rejected'],
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
},
);
procurements.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.procurements.belongsTo(db.companies, {
as: 'company',
foreignKey: {
name: 'companyId',
},
constraints: false,
});
db.procurements.belongsTo(db.companies, {
as: 'companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.procurements.belongsTo(db.users, {
as: 'createdBy',
});
db.procurements.belongsTo(db.users, {
as: 'updatedBy',
});
};
return procurements;
};

View File

@ -0,0 +1,73 @@
const config = require('../../config');
const providers = config.providers;
const crypto = require('crypto');
const bcrypt = require('bcrypt');
const moment = require('moment');
module.exports = function (sequelize, DataTypes) {
const projects = sequelize.define(
'projects',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
project_name: {
type: DataTypes.TEXT,
},
start_date: {
type: DataTypes.DATE,
},
end_date: {
type: DataTypes.DATE,
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
},
);
projects.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.projects.belongsTo(db.companies, {
as: 'company',
foreignKey: {
name: 'companyId',
},
constraints: false,
});
db.projects.belongsTo(db.companies, {
as: 'companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.projects.belongsTo(db.users, {
as: 'createdBy',
});
db.projects.belongsTo(db.users, {
as: 'updatedBy',
});
};
return projects;
};

View File

@ -0,0 +1,53 @@
const config = require('../../config');
const providers = config.providers;
const crypto = require('crypto');
const bcrypt = require('bcrypt');
const moment = require('moment');
module.exports = function (sequelize, DataTypes) {
const recruitment = sequelize.define(
'recruitment',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
},
);
recruitment.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.recruitment.belongsTo(db.companies, {
as: 'companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.recruitment.belongsTo(db.users, {
as: 'createdBy',
});
db.recruitment.belongsTo(db.users, {
as: 'updatedBy',
});
};
return recruitment;
};

View File

@ -0,0 +1,53 @@
const config = require('../../config');
const providers = config.providers;
const crypto = require('crypto');
const bcrypt = require('bcrypt');
const moment = require('moment');
module.exports = function (sequelize, DataTypes) {
const revenues = sequelize.define(
'revenues',
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
importHash: {
type: DataTypes.STRING(255),
allowNull: true,
unique: true,
},
},
{
timestamps: true,
paranoid: true,
freezeTableName: true,
},
);
revenues.associate = (db) => {
/// loop through entities and it's fields, and if ref === current e[name] and create relation has many on parent entity
//end loop
db.revenues.belongsTo(db.companies, {
as: 'companies',
foreignKey: {
name: 'companiesId',
},
constraints: false,
});
db.revenues.belongsTo(db.users, {
as: 'createdBy',
});
db.revenues.belongsTo(db.users, {
as: 'updatedBy',
});
};
return revenues;
};

Some files were not shown because too many files have changed in this diff Show More