first commit after crearting ligin signup and token estoring in db

This commit is contained in:
yashwin-foxy 2025-09-09 19:49:42 +05:30
commit 036b47235a
33 changed files with 3787 additions and 0 deletions

View File

@ -0,0 +1,140 @@
---
description:
globs:
alwaysApply: true
---
RULE-001: API Design Standards
yamlrule_id: API-001
category: API Design
applies_to: All API endpoints
requirements:
- RESTful design principles
- Consistent response format (status, message, data, timestamp)
- Proper HTTP status codes
- API versioning (v1, v2)
- OpenAPI/Swagger documentation
validation:
- Success responses must follow {status, message, data, timestamp}
- Error responses must include error code and details
- Version must be in the endpoint path (/api/v1/...)
RULE-002: Authentication & Security
yamlrule_id: AUTH-002
category: Security
applies_to: All protected endpoints
requirements:
- JWT validation middleware
- OAuth 2.0 for external service integrations
- Token refresh mechanism
- Encrypted storage of OAuth tokens in MySQL
- Rate limiting by user/IP
validation:
- All routes must check for valid JWT
- OAuth tokens refreshed before expiry
- Failed login attempts logged with user/IP
RULE-003: n8n Integration Standards
yamlrule_id: N8N-003
category: Integration
applies_to: All external system connections via n8n
requirements:
- Standardized n8n workflows for Zoho, QuickBooks, HubSpot, BambooHR
- Webhook verification for data pushed into backend
- Error handling and retry logic in workflows
- Async processing for heavy sync tasks
- Logging of workflow execution status
validation:
- All workflows must use verified webhooks
- Workflow failures must trigger error events
- Retry mechanism configured for transient API failures
RULE-004: Database Operations
yamlrule_id: DB-004
category: Database
applies_to: All MySQL interactions
requirements:
- Use Sequelize ORM (or Prisma) for MySQL operations
- Strong relational schema with constraints
- Data validation at model level
- Indexing for frequently queried fields
- Soft delete for critical data
- Audit trail for token and integration logs
validation:
- All models must define schema + validations
- Foreign keys must enforce data integrity
- Sensitive data encrypted at rest
- Audit tables must log all changes in integrations
RULE-005: Background Jobs & Scheduling
yamlrule_id: JOB-005
category: Background Processing
applies_to: All scheduled tasks
requirements:
- Job queue implementation (Bull/Agenda/Redis Queue)
- Scheduled sync jobs with external services
- Dead letter queue for failed jobs
- Error handling and retry policies
- Monitoring + alerting for failed jobs
validation:
- Jobs must define timeout & retries
- Failed jobs logged with execution context
- DLQ retention period configured
RULE-006: Caching Strategy
yamlrule_id: CACHE-006
category: Performance
applies_to: All cache operations
requirements:
- Redis for session storage and caching API responses
- TTL-based cache expiration
- Cache invalidation strategies for sync jobs
- Cache key naming: {service}:{operation}:{identifier}
validation:
- Cache entries must always include TTL
- Sync workflows must invalidate outdated cache
- Cache hit/miss ratio tracked in monitoring
RULE-007: Error Handling & Logging
yamlrule_id: ERROR-007
category: Reliability
applies_to: All backend services
requirements:
- Centralized error middleware
- Structured logging with correlation IDs
- Error classification: system, integration, validation
- Log levels: ERROR, WARN, INFO, DEBUG
- Log rotation & retention policies
validation:
- Logs must never include sensitive tokens
- All errors logged with workflow ID (if integration-related)
- Error responses must mask internal details
RULE-008: Workflow Integration (n8n Specific)
yamlrule_id: WORKFLOW-008
category: Integration
applies_to: All workflows triggered via n8n
requirements:
- Secure webhook verification
- Async workflow execution for long tasks
- Workflow status tracking in MySQL
- Monitoring for success/failure metrics
- Recovery mechanisms for failed sync
validation:
- Workflows must store run status in DB
- Webhook events verified via signatures
- Failures trigger retry + alert notification
RULE-009: Environment Configuration
yamlrule_id: CONFIG-009
category: Configuration
applies_to: All environments
requirements:
- Environment-specific configs (dev/stage/prod)
- Secrets in Vault/ENV, not in source code
- Feature flagging for experimental services
- Health check endpoints
- Graceful shutdown on service stop
validation:
- Secrets must be injected at runtime
- Health checks must validate DB, Redis, and n8n connectivity
- Feature flags documented per environment

View File

@ -0,0 +1,195 @@
---
description:
globs:
alwaysApply: true
---
RULE-001: Root Structure
yamlrule_id: FS-001
category: Folder Structure
applies_to: Project Root
requirements:
- Must include the following top-level folders:
- /src → All application code
- /config → Configuration files
- /scripts → Utility scripts (migrations, seeds)
- /tests → Unit & integration tests
- /docs → Documentation & API specs
- Must include the following root files:
- package.json
- .env.example
- README.md
- tsconfig.json (if TypeScript)
validation:
- No business logic in root folder
- Config files must not include secrets
RULE-002: API Layer
yamlrule_id: FS-002
category: Folder Structure
applies_to: /src/api
requirements:
- /src/api must contain:
- /routes → API route definitions
- /controllers → Request handlers
- /middlewares → Shared middleware (auth, rate limit, logging)
- /validators → Request validation schemas
validation:
- Routes must only delegate to controllers
- Controllers must not include business logic
- Middleware must be reusable across services
RULE-003: Authentication
yamlrule_id: FS-003
category: Security
applies_to: /src/auth
requirements:
- Must include:
- jwt.service.js (JWT management)
- oauth.service.js (OAuth 2.0 flows)
- session.service.js (session handling)
- Must store provider configs in /config/auth.js
validation:
- Auth services must not contain route logic
- Token utilities must be stateless
RULE-004: Business Logic Layer
yamlrule_id: FS-004
category: Application Logic
applies_to: /src/services
requirements:
- Each domain service in its own folder:
- /dashboard → Dashboard services
- /integration → Integration orchestration
- /reporting → Reporting/aggregation (future BI)
- Business logic implemented as service classes/functions
validation:
- Services must not directly access DB models
- Services must call repository layer
RULE-005: Integration Layer (n8n & APIs)
yamlrule_id: FS-005
category: Integration
applies_to: /src/integrations
requirements:
- Must include subfolders per integration:
- /zoho
- /quickbooks
- /hubspot
- /bamboohr
- /n8n
- Each integration must contain:
- client.js → API client
- mapper.js → Data mapping/transformation
- handler.js → Webhook/event handler
validation:
- No direct DB writes inside client files
- Handlers must go through service layer
RULE-006: Data Persistence Layer
yamlrule_id: FS-006
category: Data Layer
applies_to: /src/data
requirements:
- Must include:
- /models → Sequelize/Prisma models
- /repositories → Data access logic
- /migrations → Database migrations
- /seeds → Initial test/demo data
validation:
- Repositories must be the only layer accessing models
- No raw queries in services (must go through repository)
RULE-007: Background Jobs
yamlrule_id: FS-007
category: Jobs
applies_to: /src/jobs
requirements:
- Must include:
- /workers → Queue processors
- /schedulers → Cron/scheduled tasks
- /queues → Job definitions
validation:
- Jobs must not block main thread
- Workers must log execution status
RULE-008: Utilities & Shared Modules
yamlrule_id: FS-008
category: Utilities
applies_to: /src/utils
requirements:
- Must include:
- logger.js
- error-handler.js
- constants.js
- helpers.js
- Utilities must not depend on services
validation:
- Utilities must be stateless
- Logger must include correlation IDs
RULE-009: Configuration
yamlrule_id: FS-009
category: Configuration
applies_to: /config
requirements:
- Must include:
- database.js (MySQL config)
- redis.js (cache config)
- auth.js (OAuth provider config)
- app.js (app-level configs)
- Must support multiple environments (dev/stage/prod)
validation:
- No hardcoded secrets
- Configs must be environment-driven
RULE-010: Testing
yamlrule_id: FS-010
category: Testing
applies_to: /tests
requirements:
- Must include:
- /unit → Unit tests per service
- /integration → API & DB integration tests
- /mocks → Mock data
- Must use Jest or Mocha
validation:
- All controllers must have unit tests
- Critical integrations must have mock-based tests
/CentralizedReportingBackend
/src
/api
/routes
/controllers
/middlewares
/validators
/auth
/services
/dashboard
/integration
/integrations
/zoho
/quickbooks
/hubspot
/bamboohr
/n8n
/data
/models
/repositories
/migrations
/seeds
/jobs
/workers
/schedulers
/queues
/utils
/config
/scripts
/tests
/unit
/integration
/mocks
/docs
package.json
README.md
.env.example

83
.gitignore vendored Normal file
View File

@ -0,0 +1,83 @@
node_modules/
.env
uploads/
npm-debug.log*
coverage/
.DS_Store
dist/
tmp/
# OSX
#
.DS_Store
# Xcode
#
build/
*.pbxuser
!default.pbxuser
*.mode1v3
!default.mode1v3
*.mode2v3
!default.mode2v3
*.perspectivev3
!default.perspectivev3
xcuserdata
*.xccheckout
*.moved-aside
DerivedData
*.hmap
*.ipa
*.xcuserstate
**/.xcode.env.local
# Android/IntelliJ
#
build/
.idea
.gradle
local.properties
*.iml
*.hprof
.cxx/
*.keystore
!debug.keystore
.kotlin/
# node.js
#
node_modules/
npm-debug.log
yarn-error.log
# fastlane
#
# It is recommended to not store the screenshots in the git repo. Instead, use fastlane to re-generate the
# screenshots whenever they are needed.
# For more information about the recommended setup visit:
# https://docs.fastlane.tools/best-practices/source-control/
**/fastlane/report.xml
**/fastlane/Preview.html
**/fastlane/screenshots
**/fastlane/test_output
# Bundle artifact
*.jsbundle
# Ruby / CocoaPods
**/Pods/
/vendor/bundle/
# Temporary files created by Metro to check the health of the file watcher
.metro-health-check*
# testing
/coverage
# Yarn
.yarn/*
!.yarn/patches
!.yarn/plugins
!.yarn/releases
!.yarn/sdks
!.yarn/versions

20
README.md Normal file
View File

@ -0,0 +1,20 @@
# Centralized Reporting Backend
Quick start
1. Copy env
- `cp .env.example .env`
2. Start MySQL and Redis
3. Run migration
- `node src/db/migrate.js`
4. Start dev server
- `npm run dev`
API
- Health: `GET /health`
- Users:
- `POST /api/v1/users/register` { email, password, firstName?, lastName? }
- `GET /api/v1/users/me` (Bearer token required)
- `PUT /api/v1/users/me` (Bearer token, form-data `profilePicture`)
- `DELETE /api/v1/users/me` (Bearer token)

9
config/app.js Normal file
View File

@ -0,0 +1,9 @@
require('dotenv').config();
module.exports = {
env: process.env.NODE_ENV || 'development',
port: parseInt(process.env.PORT || '4000', 10),
apiPrefix: process.env.API_PREFIX || '/api/v1',
appName: process.env.APP_NAME || 'CentralizedReporting'
};

11
config/database.js Normal file
View File

@ -0,0 +1,11 @@
require('dotenv').config();
module.exports = {
username: process.env.DB_USER || 'root',
password: process.env.DB_PASSWORD || 'Admin@123',
database: process.env.DB_NAME || 'centralized_reporting',
host: process.env.DB_HOST || '127.0.0.1',
port: parseInt(process.env.DB_PORT || '3306', 10),
dialect: 'mysql',
logging: false,
};

2559
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

35
package.json Normal file
View File

@ -0,0 +1,35 @@
{
"name": "centralized-reporting-backend",
"version": "1.0.0",
"description": "Centralized Reporting Backend - Express + Sequelize + MySQL + Redis",
"main": "src/server.js",
"license": "MIT",
"scripts": {
"start": "node src/server.js",
"dev": "nodemon src/server.js",
"migrate:sync": "node scripts/sync.js"
},
"dependencies": {
"axios": "^1.11.0",
"bcrypt": "^6.0.0",
"cors": "^2.8.5",
"dotenv": "^17.2.2",
"express": "^4.21.2",
"express-async-errors": "^3.1.1",
"express-rate-limit": "^8.1.0",
"helmet": "^8.1.0",
"ioredis": "^5.7.0",
"joi": "^18.0.1",
"jsonwebtoken": "^9.0.2",
"morgan": "^1.10.1",
"multer": "^2.0.2",
"mysql2": "^3.14.5",
"sequelize": "^6.37.7",
"swagger-jsdoc": "^6.2.8",
"swagger-ui-express": "^5.0.1",
"uuid": "^13.0.0"
},
"devDependencies": {
"nodemon": "^3.1.10"
}
}

View File

@ -0,0 +1,52 @@
const bcrypt = require('bcrypt');
const { success } = require('../../utils/response');
const repo = require('../../data/repositories/userRepository');
const jwtService = require('../../auth/jwt.service');
const session = require('../../auth/session.service');
async function login(req, res) {
const { email, password } = req.body;
const user = await repo.findByEmail(email);
if (!user) return res.status(401).json({ status: 'error', message: 'Invalid credentials', errorCode: 'BAD_CREDENTIALS', timestamp: new Date().toISOString() });
const ok = await bcrypt.compare(password, user.passwordHash);
if (!ok) return res.status(401).json({ status: 'error', message: 'Invalid credentials', errorCode: 'BAD_CREDENTIALS', timestamp: new Date().toISOString() });
const accessToken = jwtService.sign({ uuid: user.uuid, role: user.role });
const refreshToken = jwtService.sign({ uuid: user.uuid, type: 'refresh' }, { expiresIn: '7d' });
await session.storeRefreshToken(user.uuid, refreshToken);
const displayName = [user.firstName, user.lastName].filter(Boolean).join(' ');
res.json(
success('Logged in', {
accessToken,
refreshToken,
user: {
id: user.id,
uuid: user.uuid,
email: user.email,
displayName,
role: user.role
}
})
);
}
async function refresh(req, res) {
const { refreshToken } = req.body;
try {
const payload = jwtService.verify(refreshToken);
if (payload.type !== 'refresh') throw new Error('Invalid token');
const stored = await session.getRefreshToken(payload.uuid);
if (stored !== refreshToken) return res.status(401).json({ status: 'error', message: 'Invalid refresh token', errorCode: 'INVALID_REFRESH', timestamp: new Date().toISOString() });
const accessToken = jwtService.sign({ uuid: payload.uuid, role: payload.role });
res.json(success('Token refreshed', { accessToken }));
} catch (e) {
return res.status(401).json({ status: 'error', message: 'Invalid refresh token', errorCode: 'INVALID_REFRESH', timestamp: new Date().toISOString() });
}
}
async function logout(req, res) {
await session.revokeRefreshToken(req.user.uuid);
res.json(success('Logged out'));
}
module.exports = { login, refresh, logout };

View File

@ -0,0 +1,73 @@
const { success, failure } = require('../../utils/response');
const service = require('../../services/userService');
const axios = require('axios');
const userAuthTokenRepo = require('../../data/repositories/userAuthTokenRepository');
const { encrypt } = require('../../utils/crypto');
async function register(req, res) {
const user = await service.registerUser(req.body);
res.status(201).json(success('User registered', { uuid: user.uuid, email: user.email }));
}
async function me(req, res) {
const user = await service.getProfile(req.user.uuid);
res.json(success('Profile', user));
}
async function updateMe(req, res) {
const updates = { ...req.body };
if (req.file) {
updates.profilePicture = `/uploads/${req.file.filename}`;
}
const user = await service.updateProfile(req.user.uuid, updates);
res.json(success('Profile updated', user));
}
async function removeMe(req, res) {
await service.removeUser(req.user.uuid);
res.json(success('Account removed'));
}
module.exports = { register, me, updateMe, removeMe };
// Exchange Zoho authorization code for tokens and persist
async function exchangeZohoToken(req, res) {
const { authorization_code, id, service_name } = req.body;
// Optional: ensure the id belongs to the authenticated user (if business rule requires)
const params = new URLSearchParams();
params.append('code', authorization_code);
params.append('client_id', process.env.ZOHO_CLIENT_ID);
params.append('client_secret', process.env.ZOHO_CLIENT_SECRET);
params.append('redirect_uri', process.env.ZOHO_REDIRECT_URI || 'centralizedreportingsystem://oauth/callback');
params.append('grant_type', 'authorization_code');
try {
const resp = await axios.post('https://accounts.zoho.com/oauth/v2/token', params.toString(), {
headers: { 'Content-Type': 'application/x-www-form-urlencoded' }
});
const data = resp.data || {};
// Handle cases where Zoho returns an error payload (e.g., { error: 'invalid_code' })
if (data.error === 'invalid_code' || !data.access_token) {
return res.status(400).json(
failure('Invalid authorization code', 'ZOHO_INVALID_CODE', data)
);
}
const { access_token, refresh_token, expires_in } = data;
const expiresAt = expires_in ? new Date(Date.now() + expires_in * 1000) : null;
await userAuthTokenRepo.createToken({
userId: id,
serviceName: service_name,
accessToken: encrypt(access_token),
refreshToken: refresh_token ? encrypt(refresh_token) : null,
expiresAt
});
return res.json(success('Zoho tokens stored', data));
} catch (e) {
return res.status(400).json(failure('Zoho token exchange failed', 'ZOHO_TOKEN_EXCHANGE_FAILED', e.response?.data || e.message));
}
}
module.exports.exchangeZohoToken = exchangeZohoToken;

View File

@ -0,0 +1,18 @@
const jwt = require('jsonwebtoken');
const config = require('../../config');
module.exports = function auth(req, res, next) {
const header = req.headers.authorization || '';
const token = header.startsWith('Bearer ') ? header.slice(7) : null;
if (!token) {
return res.status(401).json({ status: 'error', message: 'Unauthorized', errorCode: 'NO_TOKEN', timestamp: new Date().toISOString() });
}
try {
const payload = jwt.verify(token, config.auth.jwtSecret);
req.user = payload;
next();
} catch (e) {
return res.status(401).json({ status: 'error', message: 'Invalid token', errorCode: 'INVALID_TOKEN', timestamp: new Date().toISOString() });
}
};

View File

@ -0,0 +1,20 @@
const { failure } = require('../utils/response');
const logger = require('../utils/logger');
module.exports = function errorHandler(err, req, res, next) {
const correlationId = logger.getCorrelationId(req);
const status = err.status || 500;
const errorCode = err.code || 'INTERNAL_SERVER_ERROR';
const message = status === 500 ? 'Something went wrong' : err.message || 'Error';
logger.error('Request failed', {
correlationId,
path: req.originalUrl,
method: req.method,
status,
errorCode,
stack: status === 500 ? err.stack : undefined
});
res.status(status).json(failure(message, errorCode));
};

View File

@ -0,0 +1,24 @@
const express = require('express');
const Joi = require('joi');
const { login, refresh, logout } = require('../controllers/authController');
const auth = require('../middlewares/auth');
const router = express.Router();
function validate(schema) {
return (req, res, next) => {
const { error, value } = schema.validate(req.body, { abortEarly: false, stripUnknown: true });
if (error) return res.status(400).json({ status: 'error', message: 'Validation failed', errorCode: 'VALIDATION_ERROR', details: error.details, timestamp: new Date().toISOString() });
req.body = value;
next();
};
}
const loginSchema = Joi.object({ email: Joi.string().email().required(), password: Joi.string().required() });
const refreshSchema = Joi.object({ refreshToken: Joi.string().required() });
router.post('/login', validate(loginSchema), login);
router.post('/refresh', validate(refreshSchema), refresh);
router.post('/logout', auth, logout);
module.exports = router;

View File

@ -0,0 +1,42 @@
const express = require('express');
const multer = require('multer');
const path = require('path');
const { register, me, updateMe, removeMe, exchangeZohoToken } = require('../controllers/userController');
const auth = require('../middlewares/auth');
const { registerSchema, updateSchema } = require('../validators/userValidator');
const Joi = require('joi');
const router = express.Router();
const storage = multer.diskStorage({
destination: (req, file, cb) => cb(null, path.join(process.cwd(), 'uploads')),
filename: (req, file, cb) => cb(null, `${Date.now()}-${file.originalname}`)
});
const upload = multer({ storage });
function validate(schema) {
return (req, res, next) => {
const toValidate = req.method === 'GET' ? req.query : req.body;
const { error, value } = schema.validate(toValidate, { abortEarly: false, stripUnknown: true });
if (error) {
return res.status(400).json({ status: 'error', message: 'Validation failed', errorCode: 'VALIDATION_ERROR', details: error.details, timestamp: new Date().toISOString() });
}
if (req.method === 'GET') req.query = value; else req.body = value;
next();
};
}
router.post('/register', validate(registerSchema), register);
router.get('/me', auth, me);
router.put('/me', auth, upload.single('profilePicture'), validate(updateSchema), updateMe);
router.delete('/me', auth, removeMe);
// OAuth token exchange (Zoho request currently)
const zohoTokenSchema = Joi.object({
authorization_code: Joi.string().required(),
id: Joi.number().required(),
service_name: Joi.string().valid('zoho', 'keka', 'bamboohr', 'hubspot', 'other').required()
});
router.post('/zoho/token', auth, validate(zohoTokenSchema), exchangeZohoToken);
module.exports = router;

View File

@ -0,0 +1,17 @@
const Joi = require('joi');
const registerSchema = Joi.object({
email: Joi.string().email().required(),
password: Joi.string().min(6).required(),
firstName: Joi.string().allow('', null),
lastName: Joi.string().allow('', null)
});
const updateSchema = Joi.object({
email: Joi.string().email(),
firstName: Joi.string().allow('', null),
lastName: Joi.string().allow('', null)
});
module.exports = { registerSchema, updateSchema };

44
src/app.js Normal file
View File

@ -0,0 +1,44 @@
require('dotenv').config();
require('express-async-errors');
const express = require('express');
const path = require('path');
const helmet = require('helmet');
const cors = require('cors');
const morgan = require('morgan');
const rateLimit = require('express-rate-limit');
const { success } = require('./utils/response');
const config = require('./config');
const userRoutes = require('./api/routes/userRoutes');
const authRoutes = require('./api/routes/authRoutes');
const sequelize = require('./db/pool');
const app = express();
app.use(helmet());
app.use(cors());
app.use(express.json());
app.use(express.urlencoded({ extended: true }));
app.use(morgan('combined'));
app.use('/uploads', express.static(path.join(process.cwd(), 'uploads')));
const limiter = rateLimit({ windowMs: 15 * 60 * 1000, max: 100 });
app.use(limiter);
app.get('/', (req, res) => {
res.send('Welcome to Centralized Reporting System Backend');
});
app.get('/health', async (req, res) => {
try {
await sequelize.authenticate();
res.json(success('OK', { db: 'up', env: config.app.env }));
} catch (e) {
res.status(500).json({ status: 'error', message: 'DB check failed', errorCode: 'HEALTH_FAIL', timestamp: new Date().toISOString() });
}
});
app.use(`${config.app.apiPrefix}/auth`, authRoutes);
app.use(`${config.app.apiPrefix}/users`, userRoutes);
module.exports = app;

13
src/auth/jwt.service.js Normal file
View File

@ -0,0 +1,13 @@
const jwt = require('jsonwebtoken');
const config = require('../config');
function sign(payload, opts = {}) {
return jwt.sign(payload, config.auth.jwtSecret, { expiresIn: config.auth.jwtExpiresIn, ...opts });
}
function verify(token) {
return jwt.verify(token, config.auth.jwtSecret);
}
module.exports = { sign, verify };

View File

@ -0,0 +1,69 @@
const Redis = require('ioredis');
const config = require('../config');
const logger = require('../utils/logger');
const REDIS_ENABLED = (process.env.REDIS_ENABLED || 'true').toLowerCase() !== 'false';
let redis = null;
let memoryStore = new Map();
if (REDIS_ENABLED) {
redis = new Redis({
host: config.redis.host,
port: config.redis.port,
password: config.redis.password,
lazyConnect: true,
enableOfflineQueue: false,
maxRetriesPerRequest: 1,
retryStrategy: () => null
});
redis.on('error', (err) => {
logger.warn('Redis error (using fallback if needed)', { message: err.message });
});
}
async function ensureRedis() {
if (!REDIS_ENABLED || !redis) return null;
if (redis.status !== 'ready') {
try {
await redis.connect();
} catch (e) {
logger.warn('Redis connect failed, using in-memory fallback', { message: e.message });
return null;
}
}
return redis;
}
async function storeRefreshToken(userUuid, token, ttlSeconds = 60 * 60 * 24 * 7) {
const key = `auth:refresh:${userUuid}`;
const client = await ensureRedis();
if (client) return client.set(key, token, 'EX', ttlSeconds);
memoryStore.set(key, { token, expiresAt: Date.now() + ttlSeconds * 1000 });
}
async function getRefreshToken(userUuid) {
const key = `auth:refresh:${userUuid}`;
const client = await ensureRedis();
if (client) return client.get(key);
const item = memoryStore.get(key);
if (!item) return null;
if (item.expiresAt < Date.now()) { memoryStore.delete(key); return null; }
return item.token;
}
async function revokeRefreshToken(userUuid) {
const key = `auth:refresh:${userUuid}`;
const client = await ensureRedis();
if (client) return client.del(key);
memoryStore.delete(key);
}
async function ping() {
const client = await ensureRedis();
if (client) return client.ping();
return 'PONG';
}
module.exports = { storeRefreshToken, getRefreshToken, revokeRefreshToken, ping };

22
src/config/index.js Normal file
View File

@ -0,0 +1,22 @@
require('dotenv').config();
const appConfig = require('../../config/app');
const dbConfig = require('../../config/database');
module.exports = {
app: appConfig,
db: dbConfig,
auth: {
jwtSecret: process.env.JWT_SECRET || 'changeme',
jwtExpiresIn: process.env.JWT_EXPIRES_IN || '1d'
},
redis: {
host: process.env.REDIS_HOST || '127.0.0.1',
port: parseInt(process.env.REDIS_PORT || '6379', 10),
password: process.env.REDIS_PASSWORD || undefined
},
n8n: {
baseUrl: process.env.N8N_BASE_URL || 'http://localhost:5678',
webhookSecret: process.env.N8N_WEBHOOK_SECRET || 'changeme'
}
};

31
src/data/models/user.js Normal file
View File

@ -0,0 +1,31 @@
const { DataTypes, Model } = require('sequelize');
const sequelize = require('../../db/pool');
class User extends Model {}
User.init(
{
id: { type: DataTypes.INTEGER, autoIncrement: true, primaryKey: true },
uuid: { type: DataTypes.UUID, defaultValue: DataTypes.UUIDV4, allowNull: false, unique: true },
email: { type: DataTypes.STRING(255), allowNull: false, unique: true, validate: { isEmail: true } },
passwordHash: { field: 'password_hash', type: DataTypes.STRING(255), allowNull: false },
firstName: { field: 'first_name', type: DataTypes.STRING(100), allowNull: true },
lastName: { field: 'last_name', type: DataTypes.STRING(100), allowNull: true },
profilePicture: { field: 'profile_picture', type: DataTypes.STRING(512), allowNull: true },
role: { type: DataTypes.ENUM('admin', 'manager', 'user'), defaultValue: 'user', allowNull: false },
isActive: { field: 'is_active', type: DataTypes.BOOLEAN, defaultValue: true },
createdAt: { field: 'created_at', type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
updatedAt: { field: 'updated_at', type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
deletedAt: { field: 'deleted_at', type: DataTypes.DATE, allowNull: true }
},
{
sequelize,
modelName: 'User',
tableName: 'users',
paranoid: true,
timestamps: true
}
);
module.exports = User;

View File

@ -0,0 +1,45 @@
const { DataTypes, Model } = require('sequelize');
const sequelize = require('../../db/pool');
const User = require('./user');
class UserAuthToken extends Model {}
UserAuthToken.init(
{
id: { type: DataTypes.INTEGER, autoIncrement: true, primaryKey: true },
userId: {
field: 'user_id',
type: DataTypes.INTEGER,
allowNull: false,
references: { model: 'users', key: 'id' },
onDelete: 'CASCADE'
},
serviceName: {
field: 'service_name',
type: DataTypes.ENUM('zoho', 'keka', 'bamboohr', 'hubspot', 'other'),
allowNull: false
},
accessToken: { field: 'access_token', type: DataTypes.TEXT, allowNull: false },
refreshToken: { field: 'refresh_token', type: DataTypes.TEXT, allowNull: true },
expiresAt: { field: 'expires_at', type: DataTypes.DATE, allowNull: true },
createdAt: { field: 'created_at', type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
updatedAt: { field: 'updated_at', type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW }
},
{
sequelize,
modelName: 'UserAuthToken',
tableName: 'user_auth_tokens',
timestamps: true,
paranoid: false,
indexes: [
{ fields: ['user_id'] },
{ fields: ['service_name'] }
]
}
);
UserAuthToken.belongsTo(User, { foreignKey: 'userId', as: 'user' });
module.exports = UserAuthToken;

View File

@ -0,0 +1,13 @@
const UserAuthToken = require('../models/userAuthToken');
async function createToken(payload) {
return UserAuthToken.create(payload);
}
async function findByUserAndService(userId, serviceName) {
return UserAuthToken.findOne({ where: { userId, serviceName } });
}
module.exports = { createToken, findByUserAndService };

View File

@ -0,0 +1,30 @@
const User = require('../models/user');
async function createUser(payload) {
return User.create(payload);
}
async function findByEmail(email) {
return User.findOne({ where: { email } });
}
async function findByUuid(uuid) {
return User.findOne({ where: { uuid } });
}
async function updateByUuid(uuid, updates) {
const user = await findByUuid(uuid);
if (!user) return null;
await user.update(updates);
return user;
}
async function deleteByUuid(uuid) {
const user = await findByUuid(uuid);
if (!user) return null;
await user.destroy();
return true;
}
module.exports = { createUser, findByEmail, findByUuid, updateByUuid, deleteByUuid };

32
src/db/migrate.js Normal file
View File

@ -0,0 +1,32 @@
const fs = require('fs');
const path = require('path');
const { Sequelize } = require('sequelize');
const dbConfig = require('../../config/database');
async function run() {
const sequelize = new Sequelize(dbConfig.database, dbConfig.username, dbConfig.password, {
host: dbConfig.host,
port: dbConfig.port,
dialect: 'mysql',
logging: dbConfig.logging
});
const migrationsDir = path.join(__dirname, 'migrations');
const files = fs.readdirSync(migrationsDir)
.filter((f) => f.endsWith('.sql'))
.sort();
for (const file of files) {
const sqlPath = path.join(migrationsDir, file);
const sql = fs.readFileSync(sqlPath, 'utf8');
await sequelize.query(sql);
}
await sequelize.close();
// eslint-disable-next-line no-console
console.log('Migrations completed');
}
run().catch((e) => {
// eslint-disable-next-line no-console
console.error(e);
process.exit(1);
});

View File

@ -0,0 +1,14 @@
CREATE TABLE IF NOT EXISTS users (
id INT AUTO_INCREMENT PRIMARY KEY,
uuid CHAR(36) NOT NULL UNIQUE,
email VARCHAR(255) NOT NULL UNIQUE,
password_hash VARCHAR(255) NOT NULL,
first_name VARCHAR(100) NULL,
last_name VARCHAR(100) NULL,
profile_picture VARCHAR(512) NULL,
role ENUM('admin','manager','user') NOT NULL DEFAULT 'user',
is_active TINYINT(1) NOT NULL DEFAULT 1,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
deleted_at DATETIME NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;

View File

@ -0,0 +1,13 @@
CREATE TABLE IF NOT EXISTS user_auth_tokens (
id INT AUTO_INCREMENT PRIMARY KEY,
user_id INT NOT NULL,
service_name ENUM('zoho','keka','bamboohr','hubspot','other') NOT NULL,
access_token TEXT NOT NULL,
refresh_token TEXT NULL,
expires_at DATETIME NULL,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
CONSTRAINT fk_user_auth_tokens_user FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
);

12
src/db/pool.js Normal file
View File

@ -0,0 +1,12 @@
const { Sequelize } = require('sequelize');
const dbConfig = require('../../config/database');
const sequelize = new Sequelize(dbConfig.database, dbConfig.username, dbConfig.password, {
host: dbConfig.host,
port: dbConfig.port,
dialect: 'mysql',
logging: dbConfig.logging,
pool: { max: 10, min: 0, acquire: 30000, idle: 10000 }
});
module.exports = sequelize;

View File

@ -0,0 +1,21 @@
const { failure } = require('../utils/response');
const logger = require('../utils/logger');
module.exports = function errorHandler(err, req, res, next) {
const correlationId = logger.getCorrelationId(req);
const status = err.status || 500;
const errorCode = err.code || 'INTERNAL_SERVER_ERROR';
const message = status === 500 ? 'Something went wrong' : err.message || 'Error';
logger.error('Request failed', {
correlationId,
path: req.originalUrl,
method: req.method,
status,
errorCode,
stack: status === 500 ? err.stack : undefined
});
res.status(status).json(failure(message, errorCode));
};

15
src/server.js Normal file
View File

@ -0,0 +1,15 @@
const app = require('./app');
const config = require('./config');
const server = app.listen(config.app.port, () => {
// eslint-disable-next-line no-console
console.log(`Server listening on port ${config.app.port}`);
});
process.on('SIGINT', () => {
server.close(() => process.exit(0));
});
process.on('SIGTERM', () => {
server.close(() => process.exit(0));
});

View File

@ -0,0 +1,52 @@
const bcrypt = require('bcrypt');
const repo = require('../data/repositories/userRepository');
async function registerUser({ email, password, firstName, lastName }) {
const exists = await repo.findByEmail(email);
if (exists) {
const err = new Error('Email already in use');
err.status = 409;
err.code = 'EMAIL_TAKEN';
throw err;
}
const passwordHash = await bcrypt.hash(password, 10);
const user = await repo.createUser({ email, passwordHash, firstName, lastName });
return user;
}
async function getProfile(uuid) {
const user = await repo.findByUuid(uuid);
if (!user) {
const err = new Error('User not found');
err.status = 404;
err.code = 'USER_NOT_FOUND';
throw err;
}
return user;
}
async function updateProfile(uuid, updates) {
const allowed = ['email', 'firstName', 'lastName', 'profilePicture'];
const filtered = Object.fromEntries(Object.entries(updates).filter(([k]) => allowed.includes(k)));
const user = await repo.updateByUuid(uuid, filtered);
if (!user) {
const err = new Error('User not found');
err.status = 404;
err.code = 'USER_NOT_FOUND';
throw err;
}
return user;
}
async function removeUser(uuid) {
const ok = await repo.deleteByUuid(uuid);
if (!ok) {
const err = new Error('User not found');
err.status = 404;
err.code = 'USER_NOT_FOUND';
throw err;
}
}
module.exports = { registerUser, getProfile, updateProfile, removeUser };

27
src/utils/crypto.js Normal file
View File

@ -0,0 +1,27 @@
const crypto = require('crypto');
const algorithm = 'aes-256-gcm';
const key = crypto.createHash('sha256').update(process.env.ENCRYPTION_KEY || 'changeme').digest();
function encrypt(plaintext) {
const iv = crypto.randomBytes(12);
const cipher = crypto.createCipheriv(algorithm, key, iv);
const encrypted = Buffer.concat([cipher.update(plaintext, 'utf8'), cipher.final()]);
const authTag = cipher.getAuthTag();
return Buffer.concat([iv, authTag, encrypted]).toString('base64');
}
function decrypt(ciphertext) {
const buf = Buffer.from(ciphertext, 'base64');
const iv = buf.subarray(0, 12);
const authTag = buf.subarray(12, 28);
const encrypted = buf.subarray(28);
const decipher = crypto.createDecipheriv(algorithm, key, iv);
decipher.setAuthTag(authTag);
const decrypted = Buffer.concat([decipher.update(encrypted), decipher.final()]);
return decrypted.toString('utf8');
}
module.exports = { encrypt, decrypt };

27
src/utils/logger.js Normal file
View File

@ -0,0 +1,27 @@
const { randomUUID } = require('crypto');
function getCorrelationId(req) {
if (!req) return uuidv4();
const headerId = req.headers['x-correlation-id'];
if (headerId) return headerId;
const existing = req.correlationId;
if (existing) return existing;
const generated = randomUUID();
req.correlationId = generated;
return generated;
}
function log(level, message, meta = {}) {
const timestamp = new Date().toISOString();
const payload = { level, message, timestamp, ...meta };
// eslint-disable-next-line no-console
console.log(JSON.stringify(payload));
}
module.exports = {
info: (msg, meta) => log('INFO', msg, meta),
warn: (msg, meta) => log('WARN', msg, meta),
error: (msg, meta) => log('ERROR', msg, meta),
debug: (msg, meta) => log('DEBUG', msg, meta),
getCorrelationId
};

9
src/utils/response.js Normal file
View File

@ -0,0 +1,9 @@
function success(message, data = null) {
return { status: 'success', message, data, timestamp: new Date().toISOString() };
}
function failure(message, errorCode = 'GENERIC_ERROR', details = null) {
return { status: 'error', message, errorCode, details, timestamp: new Date().toISOString() };
}
module.exports = { success, failure };