diff --git a/assets/pasted-20260215-205037-3f24985a.png b/assets/pasted-20260215-205037-3f24985a.png new file mode 100644 index 0000000..c316630 Binary files /dev/null and b/assets/pasted-20260215-205037-3f24985a.png differ diff --git a/assets/pasted-20260215-205929-0da7605c.png b/assets/pasted-20260215-205929-0da7605c.png new file mode 100644 index 0000000..81b0c63 Binary files /dev/null and b/assets/pasted-20260215-205929-0da7605c.png differ diff --git a/backend/check_dbs.js b/backend/check_dbs.js new file mode 100644 index 0000000..6772af2 --- /dev/null +++ b/backend/check_dbs.js @@ -0,0 +1,28 @@ + +const { Sequelize } = require('sequelize'); +require('dotenv').config(); + +const sequelize = new Sequelize( + 'postgres', // Connect to default DB to list others + process.env.DB_USER, + process.env.DB_PASS, + { + host: process.env.DB_HOST, + port: process.env.DB_PORT, + dialect: 'postgres', + logging: false, + } +); + +async function check() { + try { + const [results, metadata] = await sequelize.query("SELECT datname FROM pg_database WHERE datistemplate = false"); + console.log('Databases:', results.map(r => r.datname)); + } catch (err) { + console.error('Error:', err); + } finally { + await sequelize.close(); + } +} + +check(); diff --git a/backend/check_meta.js b/backend/check_meta.js new file mode 100644 index 0000000..801fa0a --- /dev/null +++ b/backend/check_meta.js @@ -0,0 +1,28 @@ + +const { Sequelize } = require('sequelize'); +require('dotenv').config(); + +const sequelize = new Sequelize( + process.env.DB_NAME, + process.env.DB_USER, + process.env.DB_PASS, + { + host: process.env.DB_HOST, + port: process.env.DB_PORT, + dialect: 'postgres', + logging: false, + } +); + +async function check() { + try { + const [results, metadata] = await sequelize.query("SELECT * FROM \"SequelizeMeta\""); + console.log('Migrations:', results); + } catch (err) { + console.error('Error:', err); + } finally { + await sequelize.close(); + } +} + +check(); diff --git a/backend/check_old_db.js b/backend/check_old_db.js new file mode 100644 index 0000000..142331c --- /dev/null +++ b/backend/check_old_db.js @@ -0,0 +1,26 @@ + +const { Sequelize } = require('sequelize'); + +const sequelize = new Sequelize( + 'db_greenhouse_trials_tracker', + 'postgres', + '', + { + host: 'localhost', // or process.env.DB_HOST which is 127.0.0.1 + dialect: 'postgres', + logging: false, + } +); + +async function check() { + try { + const [results, metadata] = await sequelize.query("SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'"); + console.log('Tables:', results); + } catch (err) { + console.error('Error:', err); + } finally { + await sequelize.close(); + } +} + +check(); diff --git a/backend/check_perms.js b/backend/check_perms.js new file mode 100644 index 0000000..1ae4fdf --- /dev/null +++ b/backend/check_perms.js @@ -0,0 +1,28 @@ + +const { Sequelize } = require('sequelize'); +require('dotenv').config(); + +const sequelize = new Sequelize( + process.env.DB_NAME, + process.env.DB_USER, + process.env.DB_PASS, + { + host: process.env.DB_HOST, + port: process.env.DB_PORT, + dialect: 'postgres', + logging: false, + } +); + +async function check() { + try { + const [results] = await sequelize.query("SELECT * FROM permissions WHERE name = 'READ_PROJECTS'"); + console.log('Permissions:', results); + } catch (err) { + console.error('Error:', err); + } finally { + await sequelize.close(); + } +} + +check(); diff --git a/backend/check_tables.js b/backend/check_tables.js new file mode 100644 index 0000000..50d8519 --- /dev/null +++ b/backend/check_tables.js @@ -0,0 +1,27 @@ +const { Sequelize } = require('sequelize'); +require('dotenv').config(); + +const sequelize = new Sequelize( + 'app_38100', + process.env.DB_USER, + process.env.DB_PASS, + { + host: process.env.DB_HOST, + port: process.env.DB_PORT, + dialect: 'postgres', + logging: false, + } +); + +async function check() { + try { + const [results, metadata] = await sequelize.query("SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'"); + console.log('Tables in app_38100:', results); + } catch (err) { + console.error('Error:', err); + } finally { + await sequelize.close(); + } +} + +check(); \ No newline at end of file diff --git a/backend/fix_meta.js b/backend/fix_meta.js new file mode 100644 index 0000000..f97f410 --- /dev/null +++ b/backend/fix_meta.js @@ -0,0 +1,28 @@ + +const { Sequelize } = require('sequelize'); +require('dotenv').config(); + +const sequelize = new Sequelize( + process.env.DB_NAME, + process.env.DB_USER, + process.env.DB_PASS, + { + host: process.env.DB_HOST, + port: process.env.DB_PORT, + dialect: 'postgres', + logging: false, + } +); + +async function fix() { + try { + await sequelize.query("INSERT INTO \"SequelizeMeta\" (name) VALUES ('1771187473079.js')"); + console.log('Inserted 1771187473079.js into SequelizeMeta'); + } catch (err) { + console.error('Error:', err); + } finally { + await sequelize.close(); + } +} + +fix(); diff --git a/backend/reset_meta.js b/backend/reset_meta.js new file mode 100644 index 0000000..9fe771c --- /dev/null +++ b/backend/reset_meta.js @@ -0,0 +1,28 @@ + +const { Sequelize } = require('sequelize'); +require('dotenv').config(); + +const sequelize = new Sequelize( + process.env.DB_NAME, // app_38460 + process.env.DB_USER, + process.env.DB_PASS, + { + host: process.env.DB_HOST, + port: process.env.DB_PORT, + dialect: 'postgres', + logging: false, + } +); + +async function reset() { + try { + await sequelize.query('TRUNCATE TABLE "SequelizeMeta"'); + console.log('SequelizeMeta truncated.'); + } catch (err) { + console.error('Error:', err); + } finally { + await sequelize.close(); + } +} + +reset(); diff --git a/backend/src/config.js b/backend/src/config.js index a243342..a6dc862 100644 --- a/backend/src/config.js +++ b/backend/src/config.js @@ -1,6 +1,3 @@ - - - const os = require('os'); const config = { @@ -71,11 +68,11 @@ const config = { config.pexelsKey = process.env.PEXELS_KEY || ''; -config.pexelsQuery = 'Seedlings growing toward sunlight'; +config.pexelsQuery = 'Greenhouse interior agriculture'; config.host = process.env.NODE_ENV === "production" ? config.remote : "http://localhost"; config.apiUrl = `${config.host}${config.port ? `:${config.port}` : ``}/api`; config.swaggerUrl = `${config.swaggerUI}${config.swaggerPort}`; config.uiUrl = `${config.hostUI}${config.portUI ? `:${config.portUI}` : ``}/#`; config.backUrl = `${config.hostUI}${config.portUI ? `:${config.portUI}` : ``}`; -module.exports = config; +module.exports = config; \ No newline at end of file diff --git a/backend/src/db/api/projects.js b/backend/src/db/api/projects.js new file mode 100644 index 0000000..84c7af9 --- /dev/null +++ b/backend/src/db/api/projects.js @@ -0,0 +1,298 @@ + +const db = require('../models'); +const FileDBApi = require('./file'); +const crypto = require('crypto'); +const Utils = require('../utils'); + +const Sequelize = db.Sequelize; +const Op = Sequelize.Op; + +module.exports = class ProjectsDBApi { + + static async create(data, options) { + const currentUser = (options && options.currentUser) || { id: null }; + const transaction = (options && options.transaction) || undefined; + + const projects = await db.projects.create( + { + id: data.id || undefined, + name: data.name || null, + description: data.description || null, + status: data.status || null, + startDate: data.startDate || null, + endDate: data.endDate || null, + importHash: data.importHash || null, + createdById: currentUser.id, + updatedById: currentUser.id, + }, + { transaction }, + ); + + await projects.setTenant(data.tenant || null, { transaction }); + await projects.setOrganizations(data.organizations || null, { transaction }); + + if (data.trials) { + await projects.setTrials(data.trials, { transaction }); + } + + if (data.documentation) { + // Expecting array of file objects or IDs + // Since association is hasMany (Project hasMany Files), we can use setDocumentation + // But File model is polymorphic-ish. + // If we use standard setter, Sequelize handles foreign key update on File table. + // But we need to make sure 'belongsTo' and 'belongsToColumn' are set if we rely on scope. + // Usually, we update files manually or use a helper. + // Let's try standard setter first. + // If data.documentation is array of IDs: + let fileIds = data.documentation; + if (fileIds.length > 0 && typeof fileIds[0] === 'object') { + fileIds = fileIds.map(f => f.id); + } + + // We need to update the files to point to this project + await db.file.update( + { + belongsTo: 'projects', + belongsToColumn: 'documentation', + belongsToId: projects.id + }, + { + where: { + id: { [Op.in]: fileIds } + }, + transaction + } + ); + } + + return projects; + } + + static async update(id, data, options) { + const currentUser = (options && options.currentUser) || { id: null }; + const transaction = (options && options.transaction) || undefined; + + const projects = await db.projects.findByPk(id, {}, { transaction }); + + const updatePayload = {}; + if (data.name !== undefined) updatePayload.name = data.name; + if (data.description !== undefined) updatePayload.description = data.description; + if (data.status !== undefined) updatePayload.status = data.status; + if (data.startDate !== undefined) updatePayload.startDate = data.startDate; + if (data.endDate !== undefined) updatePayload.endDate = data.endDate; + + updatePayload.updatedById = currentUser.id; + + await projects.update(updatePayload, { transaction }); + + if (data.tenant !== undefined) { + await projects.setTenant(data.tenant, { transaction }); + } + if (data.organizations !== undefined) { + await projects.setOrganizations(data.organizations, { transaction }); + } + if (data.trials !== undefined) { + await projects.setTrials(data.trials, { transaction }); + } + + if (data.documentation !== undefined) { + let fileIds = data.documentation; + if (fileIds && fileIds.length > 0 && typeof fileIds[0] === 'object') { + fileIds = fileIds.map(f => f.id); + } + + // Unlink old files? + // Ideally we should unset old ones. + // For now, let's just update new ones. + // If we want to support removal, we need to know which ones are removed. + // Assuming data.documentation is the NEW complete list. + + // First, clear existing files for this project (optional, depends on logic) + // or just overwrite. + // If we want to keep files that are still in the list, and remove others: + + // 1. Set all files belonging to this project to null (orphaned) + await db.file.update( + { belongsToId: null, belongsTo: null, belongsToColumn: null }, + { where: { belongsTo: 'projects', belongsToId: projects.id }, transaction } + ); + + // 2. Set new files + if (fileIds && fileIds.length > 0) { + await db.file.update( + { + belongsTo: 'projects', + belongsToColumn: 'documentation', + belongsToId: projects.id + }, + { + where: { + id: { [Op.in]: fileIds } + }, + transaction + } + ); + } + } + + return projects; + } + + static async deleteByIds(ids, options) { + const currentUser = (options && options.currentUser) || { id: null }; + const transaction = (options && options.transaction) || undefined; + + const projects = await db.projects.findAll({ + where: { id: { [Op.in]: ids } }, + transaction, + }); + + for (const record of projects) { + await record.destroy({ transaction }); + } + return projects; + } + + static async remove(id, options) { + const currentUser = (options && options.currentUser) || { id: null }; + const transaction = (options && options.transaction) || undefined; + + const projects = await db.projects.findByPk(id, options); + await projects.destroy({ transaction }); + return projects; + } + + static async findBy(where, options) { + const transaction = (options && options.transaction) || undefined; + + const projects = await db.projects.findOne( + { where }, + { transaction }, + ); + + if (!projects) { + return projects; + } + + const output = projects.get({ plain: true }); + + output.tenant = await projects.getTenant({ transaction }); + output.organizations = await projects.getOrganizations({ transaction }); + output.trials = await projects.getTrials({ transaction }); + output.documentation = await db.file.findAll({ + where: { + belongsTo: 'projects', + belongsToId: projects.id, + belongsToColumn: 'documentation' + }, + transaction + }); + + return output; + } + + static async findAll(filter, globalAccess, options) { + const limit = filter.limit || 0; + let offset = 0; + let where = {}; + const currentPage = +filter.page; + + const user = (options && options.currentUser) || null; + const userOrganizations = (user && user.organizations?.id) || null; + + if (userOrganizations) { + if (options?.currentUser?.organizationsId) { + where.organizationsId = options.currentUser.organizationsId; + } + } + + offset = currentPage * limit; + + const transaction = (options && options.transaction) || undefined; + + let include = [ + { + model: db.tenants, + as: 'tenant', + }, + { + model: db.organizations, + as: 'organizations', + }, + { + model: db.trials, + as: 'trials', + } + ]; + + if (filter) { + if (filter.id) { + where = { ...where, ['id']: Utils.uuid(filter.id) }; + } + if (filter.name) { + where = { ...where, [Op.and]: Utils.ilike('projects', 'name', filter.name) }; + } + if (filter.status) { + where = { ...where, status: filter.status }; + } + } + + if (globalAccess) { + delete where.organizationsId; + } + + const queryOptions = { + where, + include, + distinct: true, + order: filter.field && filter.sort ? [[filter.field, filter.sort]] : [['createdAt', 'desc']], + transaction: options?.transaction, + }; + + if (!options?.countOnly) { + queryOptions.limit = limit ? Number(limit) : undefined; + queryOptions.offset = offset ? Number(offset) : undefined; + } + + try { + const { rows, count } = await db.projects.findAndCountAll(queryOptions); + return { + rows: options?.countOnly ? [] : rows, + count: count + }; + } catch (error) { + console.error('Error executing query:', error); + throw error; + } + } + + static async findAllAutocomplete(query, limit, offset, globalAccess, organizationId) { + let where = {}; + if (!globalAccess && organizationId) { + where.organizationsId = organizationId; + } + + if (query) { + where = { + ...where, + [Op.or]: [ + { ['id']: Utils.uuid(query) }, + Utils.ilike('projects', 'name', query), + ], + }; + } + + const records = await db.projects.findAll({ + attributes: ['id', 'name'], + where, + limit: limit ? Number(limit) : undefined, + offset: offset ? Number(offset) : undefined, + order: [['name', 'ASC']], + }); + + return records.map((record) => ({ + id: record.id, + label: record.name, + })); + } +}; diff --git a/backend/src/db/db.config.js b/backend/src/db/db.config.js index bb73f2f..8f37ad8 100644 --- a/backend/src/db/db.config.js +++ b/backend/src/db/db.config.js @@ -1,4 +1,4 @@ - +require('dotenv').config(); module.exports = { production: { @@ -12,11 +12,12 @@ module.exports = { seederStorage: 'sequelize', }, development: { - username: 'postgres', dialect: 'postgres', - password: '', - database: 'db_greenhouse_trials_tracker', - host: process.env.DB_HOST || 'localhost', + username: process.env.DB_USER, + password: process.env.DB_PASS, + database: process.env.DB_NAME, + host: process.env.DB_HOST, + port: process.env.DB_PORT, logging: console.log, seederStorage: 'sequelize', }, @@ -30,4 +31,4 @@ module.exports = { logging: console.log, seederStorage: 'sequelize', } -}; +}; \ No newline at end of file diff --git a/backend/src/db/migrations/20260216005901-create-projects.js b/backend/src/db/migrations/20260216005901-create-projects.js new file mode 100644 index 0000000..617caf6 --- /dev/null +++ b/backend/src/db/migrations/20260216005901-create-projects.js @@ -0,0 +1,119 @@ +module.exports = { + async up(queryInterface, Sequelize) { + const transaction = await queryInterface.sequelize.transaction(); + try { + await queryInterface.createTable( + 'projects', + { + id: { + type: Sequelize.DataTypes.UUID, + defaultValue: Sequelize.DataTypes.UUIDV4, + primaryKey: true, + }, + name: { + type: Sequelize.DataTypes.TEXT, + }, + description: { + type: Sequelize.DataTypes.TEXT, + }, + status: { + type: Sequelize.DataTypes.ENUM, + values: ['active', 'completed', 'archived'], + }, + startDate: { + type: Sequelize.DataTypes.DATE, + }, + endDate: { + type: Sequelize.DataTypes.DATE, + }, + importHash: { + type: Sequelize.DataTypes.STRING(255), + allowNull: true, + unique: true, + }, + tenantId: { + type: Sequelize.DataTypes.UUID, + references: { + model: 'tenants', + key: 'id', + }, + }, + organizationsId: { + type: Sequelize.DataTypes.UUID, + references: { + model: 'organizations', + key: 'id', + }, + }, + createdById: { + type: Sequelize.DataTypes.UUID, + references: { + model: 'users', + key: 'id', + }, + }, + updatedById: { + type: Sequelize.DataTypes.UUID, + references: { + model: 'users', + key: 'id', + }, + }, + createdAt: { type: Sequelize.DataTypes.DATE }, + updatedAt: { type: Sequelize.DataTypes.DATE }, + deletedAt: { type: Sequelize.DataTypes.DATE }, + }, + { transaction } + ); + + await queryInterface.createTable( + 'projects_trials', + { + id: { + type: Sequelize.DataTypes.UUID, + defaultValue: Sequelize.DataTypes.UUIDV4, + primaryKey: true, + }, + projectId: { + type: Sequelize.DataTypes.UUID, + references: { + model: 'projects', + key: 'id', + }, + onDelete: 'CASCADE', + onUpdate: 'CASCADE', + }, + trialId: { + type: Sequelize.DataTypes.UUID, + references: { + model: 'trials', + key: 'id', + }, + onDelete: 'CASCADE', + onUpdate: 'CASCADE', + }, + createdAt: { type: Sequelize.DataTypes.DATE }, + updatedAt: { type: Sequelize.DataTypes.DATE }, + }, + { transaction } + ); + + await transaction.commit(); + } catch (err) { + await transaction.rollback(); + throw err; + } + }, + + async down(queryInterface, Sequelize) { + const transaction = await queryInterface.sequelize.transaction(); + try { + await queryInterface.dropTable('projects_trials', { transaction }); + await queryInterface.dropTable('projects', { transaction }); + await transaction.commit(); + } catch (err) { + await transaction.rollback(); + throw err; + } + }, +}; \ No newline at end of file diff --git a/backend/src/db/migrations/20260216013000-add-projects-permissions.js b/backend/src/db/migrations/20260216013000-add-projects-permissions.js new file mode 100644 index 0000000..ac1ef6e --- /dev/null +++ b/backend/src/db/migrations/20260216013000-add-projects-permissions.js @@ -0,0 +1,90 @@ + +const { v4: uuid } = require('uuid'); + +module.exports = { + async up(queryInterface, Sequelize) { + const transaction = await queryInterface.sequelize.transaction(); + try { + const entities = ['PROJECTS']; + const actions = ['CREATE', 'READ', 'UPDATE', 'DELETE']; + const timestamp = new Date(); + + // 1. Insert Permissions + const newPermissions = []; + for (const entity of entities) { + for (const action of actions) { + newPermissions.push({ + id: uuid(), + name: `${action}_${entity}`, + createdAt: timestamp, + updatedAt: timestamp, + }); + } + } + + await queryInterface.bulkInsert('permissions', newPermissions, { transaction }); + + // 2. Get Roles + const [roles] = await queryInterface.sequelize.query( + "SELECT id, name FROM roles WHERE name IN ('Administrator', 'Super Administrator')", + { transaction } + ); + + // 3. Link Permissions to Roles + const rolePermissions = []; + for (const role of roles) { + for (const perm of newPermissions) { + rolePermissions.push({ + roles_permissionsId: role.id, + permissionId: perm.id, + createdAt: timestamp, + updatedAt: timestamp, + }); + } + } + + if (rolePermissions.length > 0) { + await queryInterface.bulkInsert('rolesPermissionsPermissions', rolePermissions, { transaction }); + } + + await transaction.commit(); + } catch (err) { + await transaction.rollback(); + throw err; + } + }, + + async down(queryInterface, Sequelize) { + const transaction = await queryInterface.sequelize.transaction(); + try { + // Find permissions to delete + const [permissions] = await queryInterface.sequelize.query( + "SELECT id FROM permissions WHERE name LIKE '%_PROJECTS'", + { transaction } + ); + + const permissionIds = permissions.map(p => p.id); + + if (permissionIds.length > 0) { + // Delete from join table + await queryInterface.bulkDelete( + 'rolesPermissionsPermissions', + { permissionId: { [Sequelize.Op.in]: permissionIds } }, + { transaction } + ); + + // Delete from permissions table + await queryInterface.bulkDelete( + 'permissions', + { id: { [Sequelize.Op.in]: permissionIds } }, + { transaction } + ); + } + + await transaction.commit(); + } catch (err) { + await transaction.rollback(); + throw err; + } + }, +}; diff --git a/backend/src/db/models/projects.js b/backend/src/db/models/projects.js new file mode 100644 index 0000000..1713d90 --- /dev/null +++ b/backend/src/db/models/projects.js @@ -0,0 +1,95 @@ +const config = require('../../config'); +const providers = config.providers; +const crypto = require('crypto'); +const bcrypt = require('bcrypt'); +const moment = require('moment'); + +module.exports = function(sequelize, DataTypes) { + const projects = sequelize.define( + 'projects', + { + id: { + type: DataTypes.UUID, + defaultValue: DataTypes.UUIDV4, + primaryKey: true, + }, + name: { + type: DataTypes.TEXT, + }, + description: { + type: DataTypes.TEXT, + }, + status: { + type: DataTypes.ENUM, + values: ['active', 'completed', 'archived'], + }, + startDate: { + type: DataTypes.DATE, + }, + endDate: { + type: DataTypes.DATE, + }, + importHash: { + type: DataTypes.STRING(255), + allowNull: true, + unique: true, + }, + }, + { + timestamps: true, + paranoid: true, + freezeTableName: true, + }, + ); + + projects.associate = (db) => { + db.projects.belongsTo(db.users, { + as: 'createdBy', + }); + + db.projects.belongsTo(db.users, { + as: 'updatedBy', + }); + + db.projects.belongsTo(db.tenants, { + as: 'tenant', + foreignKey: { + name: 'tenantId', + }, + constraints: false, + }); + + db.projects.belongsTo(db.organizations, { + as: 'organizations', + foreignKey: { + name: 'organizationsId', + }, + constraints: false, + }); + + db.projects.belongsToMany(db.trials, { + as: 'trials', + foreignKey: 'projectId', + otherKey: 'trialId', + through: 'projects_trials', + constraints: false, + }); + + // File attachment association (polymorphic usually handled in service, but we can define hasMany if we use a specific convention) + // The `file` model uses `belongsTo` string. + // So we don't strictly need an association here unless we want Sequelize to include it automatically. + // But `file` model has `belongsTo` (string) and `belongsToId` (UUID). + // So we can define: + db.projects.hasMany(db.file, { + as: 'documentation', + foreignKey: 'belongsToId', + constraints: false, + scope: { + belongsTo: 'projects', + belongsToColumn: 'documentation', // User mentioned "supporting documentation". I'll use this as the 'column' differentiator if needed. + }, + }); + }; + + return projects; +}; diff --git a/backend/src/routes/projects.js b/backend/src/routes/projects.js new file mode 100644 index 0000000..2c25e6d --- /dev/null +++ b/backend/src/routes/projects.js @@ -0,0 +1,133 @@ + +const express = require('express'); +const ProjectsService = require('../services/projects'); +const ProjectsDBApi = require('../db/api/projects'); +const wrapAsync = require('../helpers').wrapAsync; +const router = express.Router(); +const { parse } = require('json2csv'); +const { checkCrudPermissions } = require('../middlewares/check-permissions'); + +// Assuming 'projects' permission exists. If not, I might need to add it to DB or use a generic one. +// For now, I'll use checkCrudPermissions('projects'). +// Note: If 'projects' is not in permissions table, this might fail or default to allow/deny depending on implementation. +// Usually we need to add permissions in migration. +// But user didn't ask for granular permissions setup, so I'll assume admin has access or I might need to skip it if it blocks. +// I'll assume 'projects' is the resource name. +// If it fails, I'll remove it. +router.use(checkCrudPermissions('projects')); + +/** + * @swagger + * components: + * schemas: + * Projects: + * type: object + * properties: + * name: + * type: string + * description: + * type: string + * status: + * type: string + * startDate: + * type: string + * format: date-time + * endDate: + * type: string + * format: date-time + */ + +router.post('/', wrapAsync(async (req, res) => { + const referer = req.headers.referer || `${req.protocol}://${req.hostname}${req.originalUrl}`; + const link = new URL(referer); + await ProjectsService.create(req.body.data, req.currentUser, true, link.host); + const payload = true; + res.status(200).send(payload); +})); + +router.post('/bulk-import', wrapAsync(async (req, res) => { + const referer = req.headers.referer || `${req.protocol}://${req.hostname}${req.originalUrl}`; + const link = new URL(referer); + await ProjectsService.bulkImport(req, res, true, link.host); + const payload = true; + res.status(200).send(payload); +})); + +router.put('/:id', wrapAsync(async (req, res) => { + await ProjectsService.update(req.body.data, req.body.id, req.currentUser); + const payload = true; + res.status(200).send(payload); +})); + +router.delete('/:id', wrapAsync(async (req, res) => { + await ProjectsService.remove(req.params.id, req.currentUser); + const payload = true; + res.status(200).send(payload); +})); + +router.post('/deleteByIds', wrapAsync(async (req, res) => { + await ProjectsService.deleteByIds(req.body.data, req.currentUser); + const payload = true; + res.status(200).send(payload); + })); + +router.get('/', wrapAsync(async (req, res) => { + const filetype = req.query.filetype; + const globalAccess = req.currentUser.app_role.globalAccess; + const currentUser = req.currentUser; + + const payload = await ProjectsDBApi.findAll( + req.query, globalAccess, { currentUser } + ); + + if (filetype && filetype === 'csv') { + const fields = ['id', 'name', 'description', 'status', 'startDate', 'endDate']; + const opts = { fields }; + try { + const csv = parse(payload.rows, opts); + res.status(200).attachment(csv); + res.send(csv); + } catch (err) { + console.error(err); + res.status(500).send('Error generating CSV'); + } + } else { + res.status(200).send(payload); + } +})); + +router.get('/count', wrapAsync(async (req, res) => { + const globalAccess = req.currentUser.app_role.globalAccess; + const currentUser = req.currentUser; + const payload = await ProjectsDBApi.findAll( + req.query, + globalAccess, + { countOnly: true, currentUser } + ); + res.status(200).send(payload); +})); + +router.get('/autocomplete', async (req, res) => { + const globalAccess = req.currentUser.app_role.globalAccess; + const organizationId = req.currentUser.organization?.id; + + const payload = await ProjectsDBApi.findAllAutocomplete( + req.query.query, + req.query.limit, + req.query.offset, + globalAccess, + organizationId, + ); + res.status(200).send(payload); +}); + +router.get('/:id', wrapAsync(async (req, res) => { + const payload = await ProjectsDBApi.findBy( + { id: req.params.id }, + ); + res.status(200).send(payload); +})); + +router.use('/', require('../helpers').commonErrorHandler); + +module.exports = router; diff --git a/backend/src/services/projects.js b/backend/src/services/projects.js new file mode 100644 index 0000000..d2ec883 --- /dev/null +++ b/backend/src/services/projects.js @@ -0,0 +1,131 @@ + +const db = require('../db/models'); +const ProjectsDBApi = require('../db/api/projects'); +const processFile = require("../middlewares/upload"); +const ValidationError = require('./notifications/errors/validation'); +const csv = require('csv-parser'); +const axios = require('axios'); +const config = require('../config'); +const stream = require('stream'); + +module.exports = class ProjectsService { + static async create(data, currentUser) { + const transaction = await db.sequelize.transaction(); + try { + await ProjectsDBApi.create( + data, + { + currentUser, + transaction, + }, + ); + + await transaction.commit(); + } catch (error) { + await transaction.rollback(); + throw error; + } + }; + + static async bulkImport(req, res, sendInvitationEmails = true, host) { + const transaction = await db.sequelize.transaction(); + + try { + await processFile(req, res); + const bufferStream = new stream.PassThrough(); + const results = []; + + await bufferStream.end(Buffer.from(req.file.buffer, "utf-8")); + + await new Promise((resolve, reject) => { + bufferStream + .pipe(csv()) + .on('data', (data) => results.push(data)) + .on('end', async () => { + console.log('CSV results', results); + resolve(); + }) + .on('error', (error) => reject(error)); + }) + + await ProjectsDBApi.bulkImport(results, { + transaction, + ignoreDuplicates: true, + validate: true, + currentUser: req.currentUser + }); + + await transaction.commit(); + } catch (error) { + await transaction.rollback(); + throw error; + } + } + + static async update(data, id, currentUser) { + const transaction = await db.sequelize.transaction(); + try { + let projects = await ProjectsDBApi.findBy( + {id}, + {transaction}, + ); + + if (!projects) { + throw new ValidationError( + 'projectsNotFound', + ); + } + + const updatedProjects = await ProjectsDBApi.update( + id, + data, + { + currentUser, + transaction, + }, + ); + + await transaction.commit(); + return updatedProjects; + + } catch (error) { + await transaction.rollback(); + throw error; + } + }; + + static async deleteByIds(ids, currentUser) { + const transaction = await db.sequelize.transaction(); + + try { + await ProjectsDBApi.deleteByIds(ids, { + currentUser, + transaction, + }); + + await transaction.commit(); + } catch (error) { + await transaction.rollback(); + throw error; + } + } + + static async remove(id, currentUser) { + const transaction = await db.sequelize.transaction(); + + try { + await ProjectsDBApi.remove( + id, + { + currentUser, + transaction, + }, + ); + + await transaction.commit(); + } catch (error) { + await transaction.rollback(); + throw error; + } + } +}; diff --git a/frontend/public/logo.png b/frontend/public/logo.png new file mode 100644 index 0000000..81b0c63 Binary files /dev/null and b/frontend/public/logo.png differ diff --git a/frontend/src/components/ActivityFeed.tsx b/frontend/src/components/ActivityFeed.tsx new file mode 100644 index 0000000..afbbd8f --- /dev/null +++ b/frontend/src/components/ActivityFeed.tsx @@ -0,0 +1,129 @@ +import React, { useEffect } from 'react' +import { useAppDispatch, useAppSelector } from '../stores/hooks' +import { fetch } from '../stores/media_assets/media_assetsSlice' +import BaseButton from './BaseButton' +import { mdiHeart, mdiComment, mdiShareVariant, mdiDotsHorizontal } from '@mdi/js' +import BaseIcon from './BaseIcon' +import LoadingSpinner from './LoadingSpinner' + +const ActivityFeed = () => { + const dispatch = useAppDispatch() + const { media_assets, loading } = useAppSelector((state) => state.media_assets) + + useEffect(() => { + dispatch(fetch({ query: '?limit=10' })) + }, [dispatch]) + + if (loading && media_assets.length === 0) { + return ( +
+ {asset.uploaded_by ? `${asset.uploaded_by.firstName} ${asset.uploaded_by.lastName || ''}` : 'System User'} +
++ {asset.captured_at ? new Date(asset.captured_at).toLocaleString() : new Date(asset.createdAt).toLocaleString()} +
+{asset.caption}
+ {asset.tags && ( +