Autosave: 20260203-130645

This commit is contained in:
Flatlogic Bot 2026-02-03 13:06:45 +00:00
parent 88aaaf8f2d
commit 6ee47994e6
11 changed files with 368 additions and 105 deletions

View File

@ -87,7 +87,7 @@ module.exports = class UsersDBApi {
matriculePaie: data.data.matriculePaie || null,
workdayId: data.data.workdayId || null,
productionSite: data.data.productionSite || null,
remoteWork: data.data.remoteWork || null,
remoteWork: data.data.remoteWork === true ? 'Oui' : (data.data.remoteWork === false ? 'Non' : (data.data.remoteWork || null)),
hiringDate: data.data.hiringDate || null,
positionEntryDate: data.data.positionEntryDate || null,
departureDate: data.data.departureDate || null,
@ -227,6 +227,7 @@ module.exports = class UsersDBApi {
position: item.position || null,
team: item.team || null,
departmentId: item.department || item.departmentId || null,
app_roleId: item.app_role || item.app_roleId || null,
importHash: item.importHash || null,
createdById: currentUser.id,
@ -235,20 +236,25 @@ module.exports = class UsersDBApi {
}));
// Bulk create items
const users = await db.users.bulkCreate(usersData, { transaction });
const users = await db.users.bulkCreate(usersData, {
transaction,
ignoreDuplicates: options.ignoreDuplicates,
validate: options.validate
});
// For each item created, replace relation files
// For each item created, replace relation files only if avatar is provided
for (let i = 0; i < users.length; i++) {
await FileDBApi.replaceRelationFiles(
{
belongsTo: db.users.getTableName(),
belongsToColumn: 'avatar',
belongsToId: users[i].id,
},
data[i].avatar,
options,
);
if (data[i].avatar) {
await FileDBApi.replaceRelationFiles(
{
belongsTo: db.users.getTableName(),
belongsToColumn: 'avatar',
belongsToId: users[i].id,
},
data[i].avatar,
options,
);
}
}
@ -324,7 +330,9 @@ module.exports = class UsersDBApi {
if (data.matriculePaie !== undefined) updatePayload.matriculePaie = data.matriculePaie;
if (data.workdayId !== undefined) updatePayload.workdayId = data.workdayId;
if (data.productionSite !== undefined) updatePayload.productionSite = data.productionSite;
if (data.remoteWork !== undefined) updatePayload.remoteWork = data.remoteWork;
if (data.remoteWork !== undefined) {
updatePayload.remoteWork = data.remoteWork === true ? 'Oui' : (data.remoteWork === false ? 'Non' : data.remoteWork);
}
if (data.hiringDate !== undefined) updatePayload.hiringDate = data.hiringDate;
if (data.positionEntryDate !== undefined) updatePayload.positionEntryDate = data.positionEntryDate;
if (data.departureDate !== undefined) updatePayload.departureDate = data.departureDate;
@ -1022,4 +1030,4 @@ module.exports = class UsersDBApi {
};
};

View File

@ -1,6 +1,6 @@
const util = require('util');
const Multer = require('multer');
const maxSize = 10 * 1024 * 1024;
const maxSize = 50 * 1024 * 1024;
let processFile = Multer({
storage: Multer.memoryStorage(),
@ -8,4 +8,4 @@ let processFile = Multer({
}).single("file");
let processFileMiddleware = util.promisify(processFile);
module.exports = processFileMiddleware;
module.exports = processFileMiddleware;

View File

@ -1,4 +1,3 @@
const express = require('express');
const UsersService = require('../services/users');
@ -297,11 +296,23 @@ router.get('/', wrapAsync(async (req, res) => {
req.query, { currentUser }
);
if (filetype && filetype === 'csv') {
const fields = ['id','firstName','lastName','phoneNumber','email',
];
const fields = [
{ label: 'Matricule Paie', value: 'matriculePaie' },
{ label: 'WD ID', value: 'workdayId' },
{ label: 'Nom', value: 'lastName' },
{ label: 'Prénom', value: 'firstName' },
{ label: 'N° Tel', value: 'phoneNumber' },
{ label: 'Mail professionnel', value: 'email' },
{ label: 'Site de production', value: 'productionSite' },
{ label: 'Télétravail', value: 'remoteWork' },
{ label: 'Date d\'embauche', value: 'hiringDate' },
{ label: 'Date d\'entrée poste', value: 'positionEntryDate' },
{ label: 'Date de départ', value: 'departureDate' },
{ label: 'Département', value: (row) => row.department ? row.department.name : '' },
{ label: 'Service', value: 'service' },
{ label: 'Poste', value: 'position' },
{ label: 'Équipe (N+1)', value: 'team' },
];
const opts = { fields };
try {
const csv = parse(payload.rows, opts);
@ -437,4 +448,4 @@ router.get('/:id', wrapAsync(async (req, res) => {
router.use('/', require('../helpers').commonErrorHandler);
module.exports = router;
module.exports = router;

View File

@ -2,11 +2,20 @@ const config = require('../../config');
const assert = require('assert');
const nodemailer = require('nodemailer');
let transporter = null;
module.exports = class EmailSender {
constructor(email) {
this.email = email;
}
static getTransporter() {
if (!transporter && EmailSender.isConfigured) {
transporter = nodemailer.createTransport(config.email);
}
return transporter;
}
async send() {
assert(this.email, 'email is required');
assert(this.email.to, 'email.to is required');
@ -15,7 +24,11 @@ module.exports = class EmailSender {
const htmlContent = await this.email.html();
const transporter = nodemailer.createTransport(this.transportConfig);
const currentTransporter = EmailSender.getTransporter();
if (!currentTransporter) {
console.warn('Email sender not configured, skipping email to:', this.email.to);
return;
}
const mailOptions = {
from: this.from,
@ -27,7 +40,7 @@ module.exports = class EmailSender {
},
};
return transporter.sendMail(mailOptions);
return currentTransporter.sendMail(mailOptions);
}
static get isConfigured() {
@ -41,4 +54,4 @@ module.exports = class EmailSender {
get from() {
return config.email.from;
}
};
};

View File

@ -50,6 +50,8 @@ const errors = {
importHashRequired: 'Import hash is required',
importHashExistent: 'Data has already been imported',
userEmailMissing: 'Some items in the CSV do not have an email',
noRowsFound: 'No rows found in the file or header mapping failed',
fileRequired: 'File is required',
},
},
@ -101,4 +103,4 @@ const errors = {
},
};
module.exports = errors;
module.exports = errors;

View File

@ -6,6 +6,7 @@ const csv = require('csv-parser');
const axios = require('axios');
const config = require('../config');
const stream = require('stream');
const moment = require('moment');
const InvitationEmail = require('./email/list/invitation');
@ -52,54 +53,214 @@ module.exports = class UsersService {
}
static async bulkImport(req, res, sendInvitationEmails = true, host) {
const transaction = await db.sequelize.transaction();
let emailsToInvite = [];
console.log('Starting bulk import...');
try {
await processFile(req, res);
const bufferStream = new stream.PassThrough();
const results = [];
await bufferStream.end(Buffer.from(req.file.buffer, "utf-8")); // convert Buffer to Stream
await new Promise((resolve, reject) => {
bufferStream
.pipe(csv())
.on('data', (data) => results.push(data))
.on('end', () => {
console.log('results csv', results);
resolve();
})
.on('error', (error) => reject(error));
});
const hasAllEmails = results.every((result) => result.email);
if (!hasAllEmails) {
throw new ValidationError('importer.errors.userEmailMissing');
if (!req.file || !req.file.buffer) {
throw new ValidationError('importer.errors.fileRequired');
}
await UsersDBApi.bulkImport(results, {
transaction,
ignoreDuplicates: true,
validate: true,
currentUser: req.currentUser
console.log('File received, size:', req.file.size);
// Detect separator
const content = req.file.buffer.toString('utf-8');
const firstLine = content.split('\n')[0];
let separator = ',';
if (firstLine.includes(';')) separator = ';';
console.log(`Detected separator: "${separator}"`);
// Get all departments to map names to IDs
const departments = await db.departments.findAll();
const departmentMap = {};
departments.forEach(dept => {
if (dept.name) {
departmentMap[dept.name.toLowerCase().trim()] = dept.id;
}
});
emailsToInvite = results.map((result) => result.email);
// Get existing emails to avoid duplicates
const existingUsers = await db.users.findAll({ attributes: ['email'], paranoid: false });
const existingEmails = new Set(existingUsers.map(u => u.email?.toLowerCase().trim()));
// Get the default role
const defaultRoleName = config.roles?.user || 'Employee';
const userRole = await db.roles.findOne({ where: { name: defaultRoleName } });
const headerMapping = {
'email': 'email',
'e-mail': 'email',
'mail professionnel': 'email',
'prénom': 'firstName',
'prenom': 'firstName',
'nom': 'lastName',
'téléphone': 'phoneNumber',
'telephone': 'phoneNumber',
'n° tel': 'phoneNumber',
'matricule': 'matriculePaie',
'matricule paie': 'matriculePaie',
'wd id': 'workdayId',
'workday': 'workdayId',
'site': 'productionSite',
'site de production': 'productionSite',
'télétravail': 'remoteWork',
'teletravail': 'remoteWork',
'date d\'embauche': 'hiringDate',
'embauche': 'hiringDate',
'date d\'entrée': 'positionEntryDate',
'date d\'entrée poste': 'positionEntryDate',
'entrée poste': 'positionEntryDate',
'date de départ': 'departureDate',
'départ': 'departureDate',
'service': 'service',
'poste': 'position',
'équipe': 'team',
'equipe': 'team',
'équipe (n+1)': 'team',
'département': 'department',
'departement': 'department'
};
const bufferStream = new stream.PassThrough();
let currentBatch = [];
const batchSize = 1000;
let totalProcessed = 0;
let emailsToInvite = [];
const processBatch = async (batch) => {
if (batch.length === 0) return;
const transaction = await db.sequelize.transaction();
try {
await UsersDBApi.bulkImport(batch, {
transaction,
ignoreDuplicates: true,
validate: false, // Disable validation for speed in large imports
currentUser: req.currentUser
});
await transaction.commit();
totalProcessed += batch.length;
console.log(`Processed batch of ${batch.length}. Total processed: ${totalProcessed}`);
} catch (error) {
await transaction.rollback();
console.error('Batch processing error:', error);
// Continue with next batch? For now, we stop on first error in batch
throw error;
}
};
const parsePromise = new Promise((resolve, reject) => {
bufferStream
.pipe(csv({
separator: separator,
mapHeaders: ({ header }) => {
const lowerHeader = header.toLowerCase().trim();
const cleanHeader = lowerHeader.replace(/^\uFEFF/, '');
return headerMapping[cleanHeader] || cleanHeader;
}
}))
.on('data', async (data) => {
// Clean up data
Object.keys(data).forEach(key => {
if (typeof data[key] === 'string') {
data[key] = data[key].trim();
if (data[key] === '') data[key] = null;
}
});
const email = data.email?.toLowerCase().trim();
if (email && email.includes('@') && !existingEmails.has(email)) {
if (data.department) {
const deptName = data.department.toLowerCase().trim();
if (departmentMap[deptName]) {
data.departmentId = departmentMap[deptName];
}
}
if (data.remoteWork) {
const val = data.remoteWork.toLowerCase().trim();
if (['oui', 'yes', 'y', 'true'].includes(val)) data.remoteWork = 'Oui';
else if (['non', 'no', 'n', 'false'].includes(val)) data.remoteWork = 'Non';
}
['hiringDate', 'positionEntryDate', 'departureDate'].forEach(field => {
if (data[field]) {
const parsedDate = moment(data[field], ['DD/MM/YYYY', 'YYYY-MM-DD', 'MM/DD/YYYY'], true);
data[field] = parsedDate.isValid() ? parsedDate.toDate() : null;
}
});
if (!data.app_roleId && userRole) {
data.app_roleId = userRole.id;
}
currentBatch.push(data);
existingEmails.add(email);
emailsToInvite.push(email);
if (currentBatch.length >= batchSize) {
const batchToProcess = [...currentBatch];
currentBatch = [];
// We need to pause the stream to wait for the batch to be processed
bufferStream.pause();
processBatch(batchToProcess)
.then(() => bufferStream.resume())
.catch(err => {
bufferStream.destroy(err);
reject(err);
});
}
}
})
.on('end', async () => {
try {
if (currentBatch.length > 0) {
await processBatch(currentBatch);
}
console.log('CSV parsing and batch processing finished. Total:', totalProcessed);
resolve();
} catch (err) {
reject(err);
}
})
.on('error', (error) => {
console.error('CSV parsing error:', error);
reject(error);
});
});
bufferStream.end(req.file.buffer);
await parsePromise;
if (totalProcessed === 0) {
throw new ValidationError('importer.errors.noRowsFound');
}
// Send emails in background to avoid blocking the response further
if (emailsToInvite.length > 0 && sendInvitationEmails) {
console.log(`Starting background email sending for ${emailsToInvite.length} users...`);
// Use a simple background loop with delays to avoid overwhelming SMTP
const sendEmailsInBackground = async (emails) => {
const batchSize = 50;
for (let i = 0; i < emails.length; i += batchSize) {
const batch = emails.slice(i, i + batchSize);
await Promise.all(batch.map(email =>
AuthService.sendPasswordResetEmail(email, 'invitation', host).catch(err => console.error(`Failed to send email to ${email}:`, err))
));
console.log(`Sent email batch ${i / batchSize + 1}. Total sent: ${Math.min(i + batchSize, emails.length)}`);
// Small delay between batches
await new Promise(resolve => setTimeout(resolve, 1000));
}
};
sendEmailsInBackground(emailsToInvite);
}
await transaction.commit();
} catch (error) {
await transaction.rollback();
console.error('Bulk import error:', error);
throw error;
}
if (emailsToInvite && emailsToInvite.length && !sendInvitationEmails) {
emailsToInvite.forEach((email) => {
AuthService.sendPasswordResetEmail(email, 'invitation', host);
});
}
}
static async update(data, id, currentUser) {
@ -167,5 +328,3 @@ module.exports = class UsersService {
}
}
};

Binary file not shown.

After

Width:  |  Height:  |  Size: 77 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 50 KiB

View File

@ -36,36 +36,6 @@ export const loadColumns = async (
const hasUpdatePermission = hasPermission(user, 'UPDATE_USERS')
return [
{
field: 'firstName',
headerName: 'First Name',
flex: 1,
minWidth: 120,
filterable: false,
headerClassName: 'datagrid--header',
cellClassName: 'datagrid--cell',
editable: hasUpdatePermission,
},
{
field: 'lastName',
headerName: 'Last Name',
flex: 1,
minWidth: 120,
filterable: false,
headerClassName: 'datagrid--header',
cellClassName: 'datagrid--cell',
editable: hasUpdatePermission,
},
{
field: 'email',
headerName: 'E-Mail',
flex: 1,
minWidth: 150,
filterable: false,
headerClassName: 'datagrid--header',
cellClassName: 'datagrid--cell',
editable: hasUpdatePermission,
},
{
field: 'matriculePaie',
headerName: 'Matricule Paie',
@ -86,6 +56,102 @@ export const loadColumns = async (
cellClassName: 'datagrid--cell',
editable: hasUpdatePermission,
},
{
field: 'lastName',
headerName: 'Nom',
flex: 1,
minWidth: 120,
filterable: false,
headerClassName: 'datagrid--header',
cellClassName: 'datagrid--cell',
editable: hasUpdatePermission,
},
{
field: 'firstName',
headerName: 'Prénom',
flex: 1,
minWidth: 120,
filterable: false,
headerClassName: 'datagrid--header',
cellClassName: 'datagrid--cell',
editable: hasUpdatePermission,
},
{
field: 'phoneNumber',
headerName: 'N° Tel',
flex: 1,
minWidth: 120,
filterable: false,
headerClassName: 'datagrid--header',
cellClassName: 'datagrid--cell',
editable: hasUpdatePermission,
},
{
field: 'email',
headerName: 'Mail professionnel',
flex: 1,
minWidth: 180,
filterable: false,
headerClassName: 'datagrid--header',
cellClassName: 'datagrid--cell',
editable: hasUpdatePermission,
},
{
field: 'productionSite',
headerName: 'Site de production',
flex: 1,
minWidth: 150,
filterable: false,
headerClassName: 'datagrid--header',
cellClassName: 'datagrid--cell',
editable: hasUpdatePermission,
},
{
field: 'remoteWork',
headerName: 'Télétravail',
flex: 1,
minWidth: 100,
filterable: false,
headerClassName: 'datagrid--header',
cellClassName: 'datagrid--cell',
editable: hasUpdatePermission,
},
{
field: 'hiringDate',
headerName: 'Date d\'embauche',
flex: 1,
minWidth: 150,
filterable: false,
headerClassName: 'datagrid--header',
cellClassName: 'datagrid--cell',
editable: hasUpdatePermission,
valueGetter: (params: GridValueGetterParams) =>
dataFormatter.dateFormatter(params.value),
},
{
field: 'positionEntryDate',
headerName: 'Date d\'entrée poste',
flex: 1,
minWidth: 150,
filterable: false,
headerClassName: 'datagrid--header',
cellClassName: 'datagrid--cell',
editable: hasUpdatePermission,
valueGetter: (params: GridValueGetterParams) =>
dataFormatter.dateFormatter(params.value),
},
{
field: 'departureDate',
headerName: 'Date de départ',
flex: 1,
minWidth: 150,
filterable: false,
headerClassName: 'datagrid--header',
cellClassName: 'datagrid--cell',
editable: hasUpdatePermission,
valueGetter: (params: GridValueGetterParams) =>
dataFormatter.dateFormatter(params.value),
},
{
field: 'department',
headerName: 'Département',
@ -124,10 +190,10 @@ export const loadColumns = async (
editable: hasUpdatePermission,
},
{
field: 'productionSite',
headerName: 'Site de production',
field: 'team',
headerName: 'Équipe (N+1)',
flex: 1,
minWidth: 150,
minWidth: 120,
filterable: false,
headerClassName: 'datagrid--header',
cellClassName: 'datagrid--cell',

View File

@ -149,7 +149,7 @@ const Dashboard = () => {
<div className="flex justify-between align-center">
<div>
<div className="text-lg leading-tight text-gray-500 dark:text-gray-400">
Users
Collaborateurs
</div>
<div className="text-3xl leading-tight font-semibold">
{users}
@ -349,4 +349,4 @@ Dashboard.getLayout = function getLayout(page: ReactElement) {
return <LayoutAuthenticated>{page}</LayoutAuthenticated>
}
export default Dashboard
export default Dashboard

View File

@ -5,7 +5,11 @@ events {
}
http {
client_max_body_size 10M;
client_max_body_size 50M;
proxy_read_timeout 300s;
proxy_connect_timeout 300s;
proxy_send_timeout 300s;
map $http_upgrade $connection_upgrade {
default upgrade;
'' close;
@ -93,4 +97,4 @@ http {
proxy_set_header X-Forwarded-Proto $scheme;
}
}
}
}