first commit -m
This commit is contained in:
commit
a00d81a3fd
24
.env.example
Normal file
24
.env.example
Normal file
@ -0,0 +1,24 @@
|
||||
NODE_ENV=development
|
||||
PORT=3000
|
||||
|
||||
DATABASE_URL="postgresql://postgres:Admin%40123@localhost:5434/Yaseen123
|
||||
|
||||
REDIS_URL=redis://localhost:6379
|
||||
|
||||
JWT_SECRET=your-secret-key
|
||||
API_KEY_PREFIX=vf_live_
|
||||
|
||||
GST_PROVIDER_URL=
|
||||
GST_PROVIDER_KEY=
|
||||
|
||||
PAN_PROVIDER_URL=
|
||||
PAN_PROVIDER_KEY=
|
||||
|
||||
BANK_PROVIDER_URL=
|
||||
BANK_PROVIDER_KEY=
|
||||
|
||||
RAZORPAY_KEY_ID=
|
||||
RAZORPAY_KEY_SECRET=
|
||||
|
||||
RESEND_API_KEY=
|
||||
FROM_EMAIL=
|
||||
3
.gitignore
vendored
Normal file
3
.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
node_modules/
|
||||
.env
|
||||
data/
|
||||
92
README.md
Normal file
92
README.md
Normal file
@ -0,0 +1,92 @@
|
||||
# VerifyIndia API
|
||||
|
||||
REST APIs for Indian data verification:
|
||||
- IFSC Lookup
|
||||
- Pincode Lookup
|
||||
- GST Verification
|
||||
- PAN Verification
|
||||
- Bank Account Verification
|
||||
|
||||
## Tech Stack
|
||||
|
||||
- **Runtime:** Node.js v20+
|
||||
- **Framework:** Express.js
|
||||
- **Database:** PostgreSQL
|
||||
- **Cache:** Redis
|
||||
- **Auth:** API Keys + JWT
|
||||
|
||||
## Setup
|
||||
|
||||
1. Install dependencies:
|
||||
```bash
|
||||
npm install
|
||||
```
|
||||
|
||||
2. Create a `.env` file with your settings (replace `<PASSWORD>`):
|
||||
```
|
||||
PORT=3000
|
||||
NODE_ENV=development
|
||||
DATABASE_URL=postgres://india_api_2025:<PASSWORD>@localhost:5434/india-api-tech4biz
|
||||
# optional: JWT_SECRET, REDIS_URL, etc.
|
||||
```
|
||||
|
||||
3. Start the server:
|
||||
```bash
|
||||
npm run dev
|
||||
```
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
verify-india-api/
|
||||
├── src/
|
||||
│ ├── index.js
|
||||
│ ├── routes/
|
||||
│ ├── middleware/
|
||||
│ ├── services/
|
||||
│ ├── database/
|
||||
│ └── cache/
|
||||
├── data/
|
||||
├── package.json
|
||||
├── .env.example
|
||||
└── README.md
|
||||
```
|
||||
|
||||
## API Key Format
|
||||
|
||||
```
|
||||
vf_live_a1b2c3d4e5f6g7h8i9j0k1l2m3n4o5p6
|
||||
```
|
||||
|
||||
Authentication via header:
|
||||
```
|
||||
X-API-Key: vf_live_xxx
|
||||
```
|
||||
|
||||
## Response Format
|
||||
|
||||
**Success:**
|
||||
```json
|
||||
{
|
||||
"success": true,
|
||||
"data": {},
|
||||
"meta": {
|
||||
"request_id": "req_xxx",
|
||||
"credits_used": 1,
|
||||
"credits_remaining": 999
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Error:**
|
||||
```json
|
||||
{
|
||||
"success": false,
|
||||
"error": {
|
||||
"code": "ERROR_CODE",
|
||||
"message": "Description"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
35
convert-pincode-csv-to-json.js
Normal file
35
convert-pincode-csv-to-json.js
Normal file
@ -0,0 +1,35 @@
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { parse } = require('csv-parse');
|
||||
|
||||
const CSV_FILE_PATH = path.join(__dirname, 'data', 'pincode.csv');
|
||||
const JSON_FILE_PATH = path.join(__dirname, 'data', 'pincodes.json');
|
||||
|
||||
async function convertCsvToJson() {
|
||||
const records = [];
|
||||
const parser = fs
|
||||
.createReadStream(CSV_FILE_PATH)
|
||||
.pipe(parse({ columns: true, skip_empty_lines: true }));
|
||||
|
||||
for await (const record of parser) {
|
||||
records.push({
|
||||
pincode: record.pincode,
|
||||
office_name: record.officename,
|
||||
office_type: record.officetype,
|
||||
district: record.district,
|
||||
division: record.divisionname,
|
||||
region: record.regionname,
|
||||
state: record.statename,
|
||||
latitude: record.latitude === 'NA' ? null : parseFloat(record.latitude),
|
||||
longitude: record.longitude === 'NA' ? null : parseFloat(record.longitude),
|
||||
});
|
||||
}
|
||||
|
||||
fs.writeFileSync(JSON_FILE_PATH, JSON.stringify(records, null, 2));
|
||||
console.log(`Converted ${records.length} records from CSV to JSON.`);
|
||||
}
|
||||
|
||||
convertCsvToJson().catch(console.error);
|
||||
|
||||
|
||||
2019
package-lock.json
generated
Normal file
2019
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
40
package.json
Normal file
40
package.json
Normal file
@ -0,0 +1,40 @@
|
||||
{
|
||||
"name": "verify-india-api",
|
||||
"version": "1.0.0",
|
||||
"description": "REST APIs for Indian data verification",
|
||||
"main": "src/index.js",
|
||||
"scripts": {
|
||||
"start": "node src/index.js",
|
||||
"dev": "nodemon src/index.js",
|
||||
"migrate": "node src/database/migrate.js up",
|
||||
"migrate:down": "node src/database/migrate.js down",
|
||||
"migrate:status": "node src/database/migrate.js status",
|
||||
"migrate:pincodes": "node src/database/migrate-pincodes.js",
|
||||
"create-test-key": "node scripts/create-test-api-key.js"
|
||||
},
|
||||
"keywords": [
|
||||
"api",
|
||||
"verification",
|
||||
"india"
|
||||
],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"axios": "^1.6.2",
|
||||
"bcryptjs": "^2.4.3",
|
||||
"cors": "^2.8.5",
|
||||
"csv-parser": "^3.2.0",
|
||||
"dotenv": "^16.3.1",
|
||||
"express": "^4.18.2",
|
||||
"helmet": "^7.1.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"morgan": "^1.10.0",
|
||||
"pg": "^8.16.3",
|
||||
"redis": "^4.6.10",
|
||||
"sequelize": "^6.37.7",
|
||||
"uuid": "^9.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"nodemon": "^3.0.2"
|
||||
}
|
||||
}
|
||||
100
scripts/create-test-api-key.js
Normal file
100
scripts/create-test-api-key.js
Normal file
@ -0,0 +1,100 @@
|
||||
/**
|
||||
* Helper script to create a test API key for development
|
||||
* Usage: node scripts/create-test-api-key.js
|
||||
*/
|
||||
|
||||
require('dotenv').config();
|
||||
const crypto = require('crypto');
|
||||
const { query, connectDB } = require('../src/database/connection');
|
||||
|
||||
function generateApiKey(type = 'test') {
|
||||
const prefix = type === 'test' ? 'vf_test_' : 'vf_live_';
|
||||
return prefix + crypto.randomBytes(24).toString('hex');
|
||||
}
|
||||
|
||||
async function createTestApiKey() {
|
||||
try {
|
||||
await connectDB();
|
||||
console.log('✅ Connected to database\n');
|
||||
|
||||
// Check if test user exists
|
||||
let testUser = await query(
|
||||
'SELECT * FROM users WHERE email = $1',
|
||||
['test@example.com']
|
||||
);
|
||||
|
||||
let userId;
|
||||
|
||||
if (testUser.rows.length === 0) {
|
||||
// Create test user
|
||||
console.log('Creating test user...');
|
||||
const bcrypt = require('bcryptjs');
|
||||
const passwordHash = await bcrypt.hash('testpassword123', 10);
|
||||
|
||||
const userResult = await query(
|
||||
`INSERT INTO users (email, password_hash, company_name, plan, monthly_quota, quota_reset_date, is_active)
|
||||
VALUES ($1, $2, $3, $4, $5, DATE(NOW() + INTERVAL '1 month'), true)
|
||||
RETURNING id, email, plan`,
|
||||
['test@example.com', passwordHash, 'Test Company', 'free', 10000]
|
||||
);
|
||||
|
||||
userId = userResult.rows[0].id;
|
||||
console.log(`✅ Created test user (ID: ${userId})`);
|
||||
} else {
|
||||
userId = testUser.rows[0].id;
|
||||
console.log(`✅ Using existing test user (ID: ${userId})`);
|
||||
}
|
||||
|
||||
// Check for existing test API key
|
||||
const existingKeys = await query(
|
||||
`SELECT ak.* FROM api_keys ak
|
||||
WHERE ak.user_id = $1 AND ak.is_test_key = true AND ak.is_active = true`,
|
||||
[userId]
|
||||
);
|
||||
|
||||
let apiKey;
|
||||
|
||||
if (existingKeys.rows.length > 0) {
|
||||
// Show existing key (we can't retrieve the original, so we'll create a new one)
|
||||
console.log('⚠️ Test API key already exists. Creating a new one...');
|
||||
|
||||
// Deactivate old keys
|
||||
await query(
|
||||
'UPDATE api_keys SET is_active = false WHERE user_id = $1 AND is_test_key = true',
|
||||
[userId]
|
||||
);
|
||||
}
|
||||
|
||||
// Generate new API key
|
||||
apiKey = generateApiKey('test');
|
||||
const keyHash = crypto.createHash('sha256').update(apiKey).digest('hex');
|
||||
|
||||
await query(
|
||||
`INSERT INTO api_keys (user_id, key_prefix, key_hash, key_hint, name, is_test_key, is_active)
|
||||
VALUES ($1, $2, $3, $4, $5, true, true)`,
|
||||
[userId, 'vf_test_', keyHash, apiKey.slice(-4), 'Test Key']
|
||||
);
|
||||
|
||||
console.log('\n✅ Test API Key Created Successfully!\n');
|
||||
console.log('=' .repeat(60));
|
||||
console.log('API Key:');
|
||||
console.log(apiKey);
|
||||
console.log('=' .repeat(60));
|
||||
console.log('\nUsage:');
|
||||
console.log('curl -H "x-api-key: ' + apiKey + '" http://localhost:3000/v1/gst/verify/27AAACM1234A1Z5');
|
||||
console.log('\nOr use in Postman/Thunder Client:');
|
||||
console.log('Header: x-api-key');
|
||||
console.log('Value: ' + apiKey);
|
||||
console.log('\n');
|
||||
|
||||
process.exit(0);
|
||||
} catch (error) {
|
||||
console.error('❌ Error:', error.message);
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
createTestApiKey();
|
||||
|
||||
|
||||
88
src/cache/redis.js
vendored
Normal file
88
src/cache/redis.js
vendored
Normal file
@ -0,0 +1,88 @@
|
||||
// src/cache/redis.js
|
||||
const dummyCache = new Map();
|
||||
const dummyExpiryTimers = new Map();
|
||||
let useDummy = !process.env.REDIS_URL;
|
||||
let redisClient = null;
|
||||
|
||||
async function connectRedis() {
|
||||
if (useDummy) {
|
||||
console.log('📦 Using dummy in-memory cache (no REDIS_URL set)');
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const { createClient } = require('redis');
|
||||
redisClient = createClient({ url: process.env.REDIS_URL });
|
||||
redisClient.on('error', (err) => console.error('Redis Error:', err));
|
||||
await redisClient.connect();
|
||||
console.log('✅ Redis connected');
|
||||
return redisClient;
|
||||
} catch (err) {
|
||||
console.error('❌ Redis connect failed, falling back to in-memory cache:', err.message);
|
||||
useDummy = true;
|
||||
redisClient = null;
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function getRedisClient() {
|
||||
return useDummy ? null : redisClient;
|
||||
}
|
||||
|
||||
function isDummyCache() {
|
||||
return useDummy;
|
||||
}
|
||||
|
||||
async function cacheGet(key) {
|
||||
if (useDummy) {
|
||||
return dummyCache.has(key) ? dummyCache.get(key) : null;
|
||||
}
|
||||
if (!redisClient) return null;
|
||||
|
||||
const data = await redisClient.get(key);
|
||||
if (!data) return null;
|
||||
|
||||
try {
|
||||
return JSON.parse(data);
|
||||
} catch (err) {
|
||||
console.error('Redis parse error:', err.message);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async function cacheSet(key, value, expirySeconds = 3600) {
|
||||
if (useDummy) {
|
||||
dummyCache.set(key, value);
|
||||
|
||||
if (dummyExpiryTimers.has(key)) {
|
||||
clearTimeout(dummyExpiryTimers.get(key));
|
||||
}
|
||||
const timer = setTimeout(() => {
|
||||
dummyCache.delete(key);
|
||||
dummyExpiryTimers.delete(key);
|
||||
}, expirySeconds * 1000);
|
||||
dummyExpiryTimers.set(key, timer);
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!redisClient) return false;
|
||||
|
||||
await redisClient.setEx(key, expirySeconds, JSON.stringify(value));
|
||||
return true;
|
||||
}
|
||||
|
||||
async function cacheDelete(key) {
|
||||
if (useDummy) {
|
||||
if (dummyExpiryTimers.has(key)) {
|
||||
clearTimeout(dummyExpiryTimers.get(key));
|
||||
dummyExpiryTimers.delete(key);
|
||||
}
|
||||
return dummyCache.delete(key);
|
||||
}
|
||||
|
||||
if (!redisClient) return false;
|
||||
await redisClient.del(key);
|
||||
return true;
|
||||
}
|
||||
|
||||
module.exports = { connectRedis, getRedisClient, cacheGet, cacheSet, cacheDelete, isDummyCache };
|
||||
26
src/database/connection.js
Normal file
26
src/database/connection.js
Normal file
@ -0,0 +1,26 @@
|
||||
const { Pool } = require('pg');
|
||||
|
||||
const pool = new Pool({
|
||||
connectionString: process.env.DATABASE_URL,
|
||||
ssl: process.env.NODE_ENV === 'production' ? { rejectUnauthorized: false } : false,
|
||||
max: 20,
|
||||
idleTimeoutMillis: 30000,
|
||||
connectionTimeoutMillis: 2000,
|
||||
});
|
||||
|
||||
async function connectDB() {
|
||||
const client = await pool.connect();
|
||||
await client.query('SELECT NOW()');
|
||||
client.release();
|
||||
return true;
|
||||
}
|
||||
|
||||
async function query(text, params) {
|
||||
return await pool.query(text, params);
|
||||
}
|
||||
|
||||
async function getClient() {
|
||||
return await pool.connect();
|
||||
}
|
||||
|
||||
module.exports = { connectDB, query, getClient, pool };
|
||||
105
src/database/migrate-pincodes.js
Normal file
105
src/database/migrate-pincodes.js
Normal file
@ -0,0 +1,105 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { query, connectDB } = require('../database/connection');
|
||||
|
||||
async function migratePincodes() {
|
||||
try {
|
||||
await connectDB();
|
||||
console.log('Database connected for migration.');
|
||||
|
||||
const csvFilePath = path.join(__dirname, '../../data/pincode.csv');
|
||||
const fileContent = fs.readFileSync(csvFilePath, 'utf8');
|
||||
const lines = fileContent.split('\n').filter(line => line.trim() !== '');
|
||||
|
||||
if (lines.length === 0) {
|
||||
console.log('No data found in pincode.csv');
|
||||
return;
|
||||
}
|
||||
|
||||
const headers = lines[0].split(',').map(header => header.trim().toLowerCase());
|
||||
const dataRows = lines.slice(1);
|
||||
|
||||
console.log(`Starting migration of ${dataRows.length} pincode records.`);
|
||||
|
||||
for (const row of dataRows) {
|
||||
const values = parseCsvLine(row);
|
||||
|
||||
if (values.length !== headers.length) {
|
||||
console.warn('Skipping row due to column mismatch:', row);
|
||||
continue;
|
||||
}
|
||||
|
||||
const record = {};
|
||||
headers.forEach((header, index) => {
|
||||
record[header] = values[index];
|
||||
});
|
||||
|
||||
const pincode = record.pincode;
|
||||
const office_name = record.officename;
|
||||
const office_type = record.officetype;
|
||||
const district = record.district;
|
||||
const division = record.divisionname;
|
||||
const region = record.regionname;
|
||||
const state = record.statename;
|
||||
const latitude = record.latitude && record.latitude.toLowerCase() !== 'na' ? parseFloat(record.latitude) : null;
|
||||
const longitude = record.longitude && record.longitude.toLowerCase() !== 'na' ? parseFloat(record.longitude) : null;
|
||||
|
||||
const insertQuery = `
|
||||
INSERT INTO pincodes (pincode, office_name, office_type, district, division, region, state, latitude, longitude)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
|
||||
ON CONFLICT (pincode) DO UPDATE SET
|
||||
office_name = EXCLUDED.office_name,
|
||||
office_type = EXCLUDED.office_type,
|
||||
district = EXCLUDED.district,
|
||||
division = EXCLUDED.division,
|
||||
region = EXCLUDED.region,
|
||||
state = EXCLUDED.state,
|
||||
latitude = EXCLUDED.latitude,
|
||||
longitude = EXCLUDED.longitude,
|
||||
updated_at = NOW()
|
||||
;
|
||||
`;
|
||||
const insertValues = [
|
||||
pincode,
|
||||
office_name,
|
||||
office_type,
|
||||
district,
|
||||
division,
|
||||
region,
|
||||
state,
|
||||
latitude,
|
||||
longitude
|
||||
];
|
||||
|
||||
await query(insertQuery, insertValues);
|
||||
}
|
||||
|
||||
console.log('Pincode migration completed successfully.');
|
||||
} catch (error) {
|
||||
console.error('Error during pincode migration:', error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Basic CSV parser to handle commas within quoted strings
|
||||
function parseCsvLine(line) {
|
||||
const values = [];
|
||||
let inQuote = false;
|
||||
let currentVal = '';
|
||||
for (let i = 0; i < line.length; i++) {
|
||||
const char = line[i];
|
||||
if (char === '"' && (i === 0 || line[i - 1] === ',')) {
|
||||
inQuote = !inQuote;
|
||||
} else if (char === ',' && !inQuote) {
|
||||
values.push(currentVal.trim());
|
||||
currentVal = '';
|
||||
} else {
|
||||
currentVal += char;
|
||||
}
|
||||
}
|
||||
values.push(currentVal.trim());
|
||||
return values.map(val => val.startsWith('"') && val.endsWith('"') ? val.slice(1, -1) : val);
|
||||
}
|
||||
|
||||
migratePincodes();
|
||||
|
||||
27
src/database/migrate.js
Normal file
27
src/database/migrate.js
Normal file
@ -0,0 +1,27 @@
|
||||
require('dotenv').config(); // <--- ADD THIS LINE FIRST
|
||||
const { runMigrations, migrationStatus } = require('./migrationRunner');
|
||||
|
||||
const direction = (process.argv[2] || 'up').toLowerCase();
|
||||
|
||||
async function main() {
|
||||
try {
|
||||
if (direction === 'status') {
|
||||
const status = await migrationStatus();
|
||||
console.log('Migration status:');
|
||||
status.forEach((row) => {
|
||||
console.log(`${row.applied ? '✅' : '⬜'} ${row.id} - ${row.name}`);
|
||||
});
|
||||
} else {
|
||||
console.log(`Starting migrations (${direction})...`);
|
||||
await runMigrations(direction);
|
||||
console.log(`Migrations ${direction} completed`);
|
||||
}
|
||||
process.exit(0);
|
||||
} catch (error) {
|
||||
// This is where your SASL error is currently being caught
|
||||
console.error(`Migration ${direction} failed:`, error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
104
src/database/migrationRunner.js
Normal file
104
src/database/migrationRunner.js
Normal file
@ -0,0 +1,104 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { pool } = require('./connection');
|
||||
|
||||
const MIGRATIONS_DIR = path.join(__dirname, 'migrations');
|
||||
const MIGRATIONS_TABLE = 'schema_migrations';
|
||||
|
||||
async function ensureMigrationsTable(client) {
|
||||
await client.query(`
|
||||
CREATE TABLE IF NOT EXISTS ${MIGRATIONS_TABLE} (
|
||||
id SERIAL PRIMARY KEY,
|
||||
migration_id TEXT UNIQUE NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
run_on TIMESTAMP DEFAULT NOW()
|
||||
);
|
||||
`);
|
||||
}
|
||||
|
||||
function loadMigrations() {
|
||||
const files = fs
|
||||
.readdirSync(MIGRATIONS_DIR)
|
||||
.filter((file) => file.endsWith('.js'))
|
||||
.sort();
|
||||
|
||||
return files.map((file) => {
|
||||
// eslint-disable-next-line import/no-dynamic-require, global-require
|
||||
const migration = require(path.join(MIGRATIONS_DIR, file));
|
||||
if (!migration.id || typeof migration.up !== 'function' || typeof migration.down !== 'function') {
|
||||
throw new Error(`Migration ${file} is missing required exports`);
|
||||
}
|
||||
return { ...migration, file };
|
||||
});
|
||||
}
|
||||
|
||||
async function getAppliedMigrations(client) {
|
||||
const { rows } = await client.query(`SELECT migration_id FROM ${MIGRATIONS_TABLE} ORDER BY run_on ASC`);
|
||||
return new Set(rows.map((row) => row.migration_id));
|
||||
}
|
||||
|
||||
async function runMigrations(direction = 'up') {
|
||||
const client = await pool.connect();
|
||||
|
||||
try {
|
||||
await client.query('BEGIN');
|
||||
await ensureMigrationsTable(client);
|
||||
|
||||
const migrations = loadMigrations();
|
||||
const applied = await getAppliedMigrations(client);
|
||||
|
||||
if (direction === 'up') {
|
||||
for (const migration of migrations) {
|
||||
if (applied.has(migration.id)) continue;
|
||||
await migration.up(client);
|
||||
await client.query(
|
||||
`INSERT INTO ${MIGRATIONS_TABLE} (migration_id, name) VALUES ($1, $2)`,
|
||||
[migration.id, migration.name || migration.file]
|
||||
);
|
||||
console.log(`⬆️ Applied migration ${migration.id}`);
|
||||
}
|
||||
} else if (direction === 'down') {
|
||||
const appliedList = Array.from(applied).reverse();
|
||||
const map = migrations.reduce((acc, m) => acc.set(m.id, m), new Map());
|
||||
|
||||
for (const migrationId of appliedList) {
|
||||
const migration = map.get(migrationId);
|
||||
if (!migration) continue;
|
||||
await migration.down(client);
|
||||
await client.query(`DELETE FROM ${MIGRATIONS_TABLE} WHERE migration_id = $1`, [migrationId]);
|
||||
console.log(`⬇️ Reverted migration ${migration.id}`);
|
||||
}
|
||||
} else {
|
||||
throw new Error(`Unknown migration direction: ${direction}`);
|
||||
}
|
||||
|
||||
await client.query('COMMIT');
|
||||
} catch (error) {
|
||||
await client.query('ROLLBACK');
|
||||
throw error;
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
}
|
||||
|
||||
async function migrationStatus() {
|
||||
const client = await pool.connect();
|
||||
|
||||
try {
|
||||
await ensureMigrationsTable(client);
|
||||
const migrations = loadMigrations();
|
||||
const applied = await getAppliedMigrations(client);
|
||||
|
||||
return migrations.map((migration) => ({
|
||||
id: migration.id,
|
||||
name: migration.name || migration.file,
|
||||
applied: applied.has(migration.id),
|
||||
}));
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { runMigrations, migrationStatus };
|
||||
|
||||
|
||||
203
src/database/migrations/20231216000000_initial_schema.js
Normal file
203
src/database/migrations/20231216000000_initial_schema.js
Normal file
@ -0,0 +1,203 @@
|
||||
const initialSchema = `
|
||||
-- Table: users
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id SERIAL PRIMARY KEY,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
password_hash VARCHAR(255) NOT NULL,
|
||||
company_name VARCHAR(255),
|
||||
phone VARCHAR(20),
|
||||
email_verified BOOLEAN DEFAULT FALSE,
|
||||
verification_token VARCHAR(255),
|
||||
plan VARCHAR(50) DEFAULT 'free',
|
||||
plan_started_at TIMESTAMP,
|
||||
plan_expires_at TIMESTAMP,
|
||||
monthly_quota INTEGER DEFAULT 100,
|
||||
calls_this_month INTEGER DEFAULT 0,
|
||||
quota_reset_date DATE,
|
||||
razorpay_customer_id VARCHAR(100),
|
||||
razorpay_subscription_id VARCHAR(100),
|
||||
created_at TIMESTAMP DEFAULT NOW(),
|
||||
updated_at TIMESTAMP DEFAULT NOW(),
|
||||
last_login_at TIMESTAMP,
|
||||
is_active BOOLEAN DEFAULT TRUE
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_users_email ON users(email);
|
||||
|
||||
-- Table: api_keys
|
||||
CREATE TABLE IF NOT EXISTS api_keys (
|
||||
id SERIAL PRIMARY KEY,
|
||||
user_id INTEGER REFERENCES users(id) ON DELETE CASCADE,
|
||||
key_prefix VARCHAR(20) NOT NULL,
|
||||
key_hash VARCHAR(255) NOT NULL,
|
||||
key_hint VARCHAR(10),
|
||||
name VARCHAR(100) DEFAULT 'Default',
|
||||
is_test_key BOOLEAN DEFAULT FALSE,
|
||||
is_active BOOLEAN DEFAULT TRUE,
|
||||
last_used_at TIMESTAMP,
|
||||
total_calls INTEGER DEFAULT 0,
|
||||
created_at TIMESTAMP DEFAULT NOW(),
|
||||
expires_at TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_user ON api_keys(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_hash ON api_keys(key_hash);
|
||||
|
||||
-- Table: api_calls
|
||||
CREATE TABLE IF NOT EXISTS api_calls (
|
||||
id SERIAL PRIMARY KEY,
|
||||
user_id INTEGER REFERENCES users(id),
|
||||
api_key_id INTEGER REFERENCES api_keys(id),
|
||||
endpoint VARCHAR(100) NOT NULL,
|
||||
method VARCHAR(10) NOT NULL,
|
||||
request_params JSONB,
|
||||
response_status INTEGER,
|
||||
response_time_ms INTEGER,
|
||||
success BOOLEAN,
|
||||
error_message VARCHAR(500),
|
||||
credits_used INTEGER DEFAULT 1,
|
||||
is_billable BOOLEAN DEFAULT TRUE,
|
||||
ip_address VARCHAR(45),
|
||||
user_agent VARCHAR(500),
|
||||
called_at TIMESTAMP DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_api_calls_user ON api_calls(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_api_calls_date ON api_calls(called_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_api_calls_endpoint ON api_calls(endpoint);
|
||||
|
||||
-- Table: ifsc_codes
|
||||
CREATE TABLE IF NOT EXISTS ifsc_codes (
|
||||
id SERIAL PRIMARY KEY,
|
||||
ifsc VARCHAR(11) UNIQUE NOT NULL,
|
||||
bank_name VARCHAR(255) NOT NULL,
|
||||
branch VARCHAR(255),
|
||||
address TEXT,
|
||||
city VARCHAR(100),
|
||||
district VARCHAR(100),
|
||||
state VARCHAR(100),
|
||||
contact VARCHAR(100),
|
||||
upi_enabled BOOLEAN DEFAULT FALSE,
|
||||
rtgs_enabled BOOLEAN DEFAULT TRUE,
|
||||
neft_enabled BOOLEAN DEFAULT TRUE,
|
||||
imps_enabled BOOLEAN DEFAULT TRUE,
|
||||
micr_code VARCHAR(20),
|
||||
swift_code VARCHAR(20),
|
||||
updated_at TIMESTAMP DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_ifsc ON ifsc_codes(ifsc);
|
||||
|
||||
-- Table: pincodes
|
||||
CREATE TABLE IF NOT EXISTS pincodes (
|
||||
id SERIAL PRIMARY KEY,
|
||||
pincode VARCHAR(6) NOT NULL,
|
||||
office_name VARCHAR(255),
|
||||
office_type VARCHAR(50),
|
||||
district VARCHAR(100),
|
||||
division VARCHAR(100),
|
||||
region VARCHAR(100),
|
||||
state VARCHAR(100),
|
||||
latitude DECIMAL(10, 8),
|
||||
longitude DECIMAL(11, 8),
|
||||
updated_at TIMESTAMP DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_pincode ON pincodes(pincode);
|
||||
|
||||
-- Table: subscriptions
|
||||
CREATE TABLE IF NOT EXISTS subscriptions (
|
||||
id SERIAL PRIMARY KEY,
|
||||
user_id INTEGER REFERENCES users(id),
|
||||
razorpay_subscription_id VARCHAR(100),
|
||||
razorpay_payment_id VARCHAR(100),
|
||||
razorpay_plan_id VARCHAR(100),
|
||||
plan_name VARCHAR(50),
|
||||
amount DECIMAL(10, 2),
|
||||
currency VARCHAR(3) DEFAULT 'INR',
|
||||
status VARCHAR(50),
|
||||
current_period_start TIMESTAMP,
|
||||
current_period_end TIMESTAMP,
|
||||
created_at TIMESTAMP DEFAULT NOW(),
|
||||
cancelled_at TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_subscriptions_user ON subscriptions(user_id);
|
||||
|
||||
-- Table: invoices
|
||||
CREATE TABLE IF NOT EXISTS invoices (
|
||||
id SERIAL PRIMARY KEY,
|
||||
user_id INTEGER REFERENCES users(id),
|
||||
subscription_id INTEGER REFERENCES subscriptions(id),
|
||||
invoice_number VARCHAR(50) UNIQUE,
|
||||
amount DECIMAL(10, 2),
|
||||
tax_amount DECIMAL(10, 2),
|
||||
total_amount DECIMAL(10, 2),
|
||||
currency VARCHAR(3) DEFAULT 'INR',
|
||||
status VARCHAR(50),
|
||||
razorpay_invoice_id VARCHAR(100),
|
||||
razorpay_payment_id VARCHAR(100),
|
||||
invoice_date DATE,
|
||||
due_date DATE,
|
||||
paid_at TIMESTAMP,
|
||||
pdf_url VARCHAR(500),
|
||||
created_at TIMESTAMP DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Table: gst_registrations
|
||||
CREATE TABLE IF NOT EXISTS gst_registrations (
|
||||
id SERIAL PRIMARY KEY,
|
||||
gstin VARCHAR(15) UNIQUE NOT NULL,
|
||||
legal_name VARCHAR(255),
|
||||
trade_name VARCHAR(255),
|
||||
status VARCHAR(50),
|
||||
registration_date DATE,
|
||||
last_updated TIMESTAMP,
|
||||
business_type VARCHAR(100),
|
||||
constitution VARCHAR(100),
|
||||
state VARCHAR(100),
|
||||
state_code VARCHAR(2),
|
||||
pan VARCHAR(10),
|
||||
address_building VARCHAR(255),
|
||||
address_floor VARCHAR(100),
|
||||
address_street VARCHAR(255),
|
||||
address_locality VARCHAR(255),
|
||||
address_city VARCHAR(100),
|
||||
address_district VARCHAR(100),
|
||||
address_state_code VARCHAR(2),
|
||||
address_pincode VARCHAR(10),
|
||||
nature_of_business TEXT,
|
||||
filing_status_gstr1 VARCHAR(50),
|
||||
filing_status_gstr3b VARCHAR(50),
|
||||
filing_last_filed_date DATE,
|
||||
created_at TIMESTAMP DEFAULT NOW()
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS idx_gst_gstin ON gst_registrations(gstin);
|
||||
`;
|
||||
|
||||
async function up(client) {
|
||||
await client.query(initialSchema);
|
||||
}
|
||||
|
||||
async function down(client) {
|
||||
await client.query(`
|
||||
DROP TABLE IF EXISTS invoices;
|
||||
DROP TABLE IF EXISTS subscriptions;
|
||||
DROP TABLE IF EXISTS api_calls;
|
||||
DROP TABLE IF EXISTS api_keys;
|
||||
DROP TABLE IF EXISTS ifsc_codes;
|
||||
DROP TABLE IF EXISTS pincodes;
|
||||
DROP TABLE IF EXISTS users;
|
||||
DROP TABLE IF EXISTS gst_registrations;
|
||||
`);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
id: '20231216000000_initial_schema',
|
||||
name: 'initial schema',
|
||||
up,
|
||||
down,
|
||||
};
|
||||
|
||||
|
||||
|
||||
72
src/database/migrations/20231217000000_seed_ifsc_data.js
Normal file
72
src/database/migrations/20231217000000_seed_ifsc_data.js
Normal file
@ -0,0 +1,72 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const csv = require('csv-parser');
|
||||
|
||||
async function up(client) {
|
||||
// IFSC.csv lives in the top-level `data` folder of the project.
|
||||
// This resolves from: src/database/migrations -> project root -> data/IFSC.csv
|
||||
const csvFilePath = path.join(__dirname, '../../../data/IFSC.csv');
|
||||
|
||||
// Check if file exists before proceeding
|
||||
if (!fs.existsSync(csvFilePath)) {
|
||||
console.warn(`⚠️ IFSC.csv not found at ${csvFilePath}. Skipping seed.`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('⏳ Starting IFSC data import...');
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const promises = [];
|
||||
|
||||
fs.createReadStream(csvFilePath)
|
||||
.pipe(csv())
|
||||
.on('data', (row) => {
|
||||
// Map CSV columns to Database columns
|
||||
const query = `
|
||||
INSERT INTO ifsc_codes (
|
||||
ifsc, bank_name, branch, address, city, district, state,
|
||||
upi_enabled, rtgs_enabled, neft_enabled, imps_enabled
|
||||
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
|
||||
ON CONFLICT (ifsc) DO NOTHING;
|
||||
`;
|
||||
|
||||
const values = [
|
||||
row.IFSC || row.ifsc,
|
||||
row.BANK || row.bank_name,
|
||||
row.BRANCH || row.branch,
|
||||
row.ADDRESS || row.address,
|
||||
row.CITY || row.city,
|
||||
row.DISTRICT || row.district,
|
||||
row.STATE || row.state,
|
||||
row.UPI === 'true', // Convert string to boolean if necessary
|
||||
row.RTGS === 'true',
|
||||
row.NEFT === 'true',
|
||||
row.IMPS === 'true'
|
||||
];
|
||||
|
||||
promises.push(client.query(query, values));
|
||||
})
|
||||
.on('end', async () => {
|
||||
try {
|
||||
await Promise.all(promises);
|
||||
console.log('✅ IFSC data import completed.');
|
||||
resolve();
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
})
|
||||
.on('error', reject);
|
||||
});
|
||||
}
|
||||
|
||||
async function down(client) {
|
||||
// Optional: Clear the table on rollback
|
||||
await client.query('TRUNCATE TABLE ifsc_codes;');
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
id: '20231217000000_seed_ifsc_data',
|
||||
name: 'seed ifsc data from csv',
|
||||
up,
|
||||
down,
|
||||
};
|
||||
135
src/database/migrations/20251216000000_seed_reference_data.js
Normal file
135
src/database/migrations/20251216000000_seed_reference_data.js
Normal file
@ -0,0 +1,135 @@
|
||||
const IFSC_DATA = [
|
||||
{
|
||||
ifsc: 'HDFC0000001',
|
||||
bank_name: 'HDFC Bank',
|
||||
branch: 'HDFC BANK LTD',
|
||||
address: 'SANDOZ HOUSE, SHIVSAGAR ESTATE, WORLI, MUMBAI - 400018',
|
||||
city: 'MUMBAI',
|
||||
district: 'MUMBAI',
|
||||
state: 'MAHARASHTRA',
|
||||
contact: '022-24910409',
|
||||
upi_enabled: true,
|
||||
rtgs_enabled: true,
|
||||
neft_enabled: true,
|
||||
imps_enabled: true,
|
||||
micr_code: '400240001',
|
||||
swift_code: 'HDFCINBB',
|
||||
},
|
||||
{
|
||||
ifsc: 'SBIN0000001',
|
||||
bank_name: 'State Bank of India',
|
||||
branch: 'Main Branch',
|
||||
address: 'MUMBAI MAIN BRANCH, FORT, MUMBAI - 400001',
|
||||
city: 'MUMBAI',
|
||||
district: 'MUMBAI',
|
||||
state: 'MAHARASHTRA',
|
||||
contact: '022-22621111',
|
||||
upi_enabled: true,
|
||||
rtgs_enabled: true,
|
||||
neft_enabled: true,
|
||||
imps_enabled: true,
|
||||
micr_code: '400002000',
|
||||
swift_code: 'SBININBB',
|
||||
},
|
||||
{
|
||||
ifsc: 'ICIC0000001',
|
||||
bank_name: 'ICICI Bank',
|
||||
branch: 'Corporate Office',
|
||||
address: 'ICICI BANK TOWERS, BANDRA-KURLA COMPLEX, MUMBAI - 400051',
|
||||
city: 'MUMBAI',
|
||||
district: 'MUMBAI',
|
||||
state: 'MAHARASHTRA',
|
||||
contact: '022-33667777',
|
||||
upi_enabled: true,
|
||||
rtgs_enabled: true,
|
||||
neft_enabled: true,
|
||||
imps_enabled: true,
|
||||
micr_code: '400229002',
|
||||
swift_code: 'ICICINBB',
|
||||
},
|
||||
];
|
||||
|
||||
const PINCODE_DATA = [
|
||||
{
|
||||
pincode: '400001',
|
||||
office_name: 'G.P.O.',
|
||||
office_type: 'Head Office',
|
||||
district: 'Mumbai',
|
||||
division: 'Mumbai',
|
||||
region: 'Mumbai',
|
||||
state: 'Maharashtra',
|
||||
latitude: 18.9398,
|
||||
longitude: 72.8355,
|
||||
},
|
||||
];
|
||||
|
||||
async function up(client) {
|
||||
for (const row of IFSC_DATA) {
|
||||
await client.query(
|
||||
`
|
||||
INSERT INTO ifsc_codes
|
||||
(ifsc, bank_name, branch, address, city, district, state, contact, upi_enabled, rtgs_enabled, neft_enabled, imps_enabled, micr_code, swift_code)
|
||||
VALUES
|
||||
($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)
|
||||
ON CONFLICT (ifsc) DO NOTHING;
|
||||
`,
|
||||
[
|
||||
row.ifsc,
|
||||
row.bank_name,
|
||||
row.branch,
|
||||
row.address,
|
||||
row.city,
|
||||
row.district,
|
||||
row.state,
|
||||
row.contact,
|
||||
row.upi_enabled,
|
||||
row.rtgs_enabled,
|
||||
row.neft_enabled,
|
||||
row.imps_enabled,
|
||||
row.micr_code,
|
||||
row.swift_code,
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
for (const row of PINCODE_DATA) {
|
||||
await client.query(
|
||||
`
|
||||
INSERT INTO pincodes
|
||||
(pincode, office_name, office_type, district, division, region, state, latitude, longitude)
|
||||
VALUES
|
||||
($1, $2, $3, $4, $5, $6, $7, $8, $9)
|
||||
ON CONFLICT DO NOTHING;
|
||||
`,
|
||||
[
|
||||
row.pincode,
|
||||
row.office_name,
|
||||
row.office_type,
|
||||
row.district,
|
||||
row.division,
|
||||
row.region,
|
||||
row.state,
|
||||
row.latitude,
|
||||
row.longitude,
|
||||
]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async function down(client) {
|
||||
await client.query('DELETE FROM ifsc_codes WHERE ifsc = ANY($1)', [
|
||||
IFSC_DATA.map((r) => r.ifsc),
|
||||
]);
|
||||
|
||||
await client.query('DELETE FROM pincodes WHERE pincode = ANY($1)', [
|
||||
PINCODE_DATA.map((r) => r.pincode),
|
||||
]);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
id: '20251216000000_seed_reference_data',
|
||||
name: 'seed reference data',
|
||||
up,
|
||||
down,
|
||||
};
|
||||
|
||||
100
src/database/migrations/20251217010000_seed_pincode_data.js
Normal file
100
src/database/migrations/20251217010000_seed_pincode_data.js
Normal file
@ -0,0 +1,100 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const csv = require('csv-parser');
|
||||
|
||||
// Helper to safely parse and clamp decimal values to a given scale and range
|
||||
function cleanDecimal(value, scale = 8, maxAbs) {
|
||||
if (!value || value === 'NA') return null;
|
||||
|
||||
const num = parseFloat(value);
|
||||
if (Number.isNaN(num)) return null;
|
||||
|
||||
// If we expect a reasonable geographic coordinate, drop clearly invalid values
|
||||
if (typeof maxAbs === 'number' && Math.abs(num) > maxAbs) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Clamp to the allowed scale (e.g. DECIMAL(10, 8) or DECIMAL(11, 8))
|
||||
const fixed = Number(num.toFixed(scale));
|
||||
return fixed;
|
||||
}
|
||||
|
||||
async function up(client) {
|
||||
// pincode.csv lives in the top-level `data` folder of the project.
|
||||
// This resolves from: src/database/migrations -> project root -> data/pincode.csv
|
||||
const csvFilePath = path.join(__dirname, '../../../data/pincode.csv');
|
||||
|
||||
// Check if file exists before proceeding
|
||||
if (!fs.existsSync(csvFilePath)) {
|
||||
console.warn(`⚠️ pincode.csv not found at ${csvFilePath}. Skipping seed.`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('⏳ Starting pincode data import...');
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const promises = [];
|
||||
|
||||
fs.createReadStream(csvFilePath)
|
||||
.pipe(csv())
|
||||
.on('data', (row) => {
|
||||
// Convert/clean values from CSV
|
||||
const pincode = String(row.pincode || '').padStart(6, '0');
|
||||
|
||||
// Latitude expected roughly between -90 and 90, longitude between -180 and 180
|
||||
const latitude = cleanDecimal(row.latitude, 8, 90);
|
||||
const longitude = cleanDecimal(row.longitude, 8, 180);
|
||||
|
||||
const query = `
|
||||
INSERT INTO pincodes (
|
||||
pincode,
|
||||
office_name,
|
||||
office_type,
|
||||
district,
|
||||
division,
|
||||
region,
|
||||
state,
|
||||
latitude,
|
||||
longitude
|
||||
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9);
|
||||
`;
|
||||
|
||||
const values = [
|
||||
pincode,
|
||||
row.officename || null,
|
||||
row.officetype || null,
|
||||
row.district || null,
|
||||
row.divisionname || null,
|
||||
// Prefer regionname, fall back to circlename if needed
|
||||
row.regionname || row.circlename || null,
|
||||
row.statename || null,
|
||||
latitude,
|
||||
longitude,
|
||||
];
|
||||
|
||||
promises.push(client.query(query, values));
|
||||
})
|
||||
.on('end', async () => {
|
||||
try {
|
||||
await Promise.all(promises);
|
||||
console.log('✅ Pincode data import completed.');
|
||||
resolve();
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
})
|
||||
.on('error', reject);
|
||||
});
|
||||
}
|
||||
|
||||
async function down(client) {
|
||||
// Optional: Clear the table on rollback
|
||||
await client.query('TRUNCATE TABLE pincodes;');
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
id: '20251217010000_seed_pincode_data',
|
||||
name: 'seed pincode data from csv',
|
||||
up,
|
||||
down,
|
||||
};
|
||||
117
src/database/migrations/20251217020000_seed_gst_data.js
Normal file
117
src/database/migrations/20251217020000_seed_gst_data.js
Normal file
@ -0,0 +1,117 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const csv = require('csv-parser');
|
||||
|
||||
async function up(client) {
|
||||
// gst.csv lives in the top-level `data` folder of the project.
|
||||
// This resolves from: src/database/migrations -> project root -> data/gst.csv
|
||||
const csvFilePath = path.join(__dirname, '../../../data/gst.csv');
|
||||
|
||||
if (!fs.existsSync(csvFilePath)) {
|
||||
console.warn(`⚠️ gst.csv not found at ${csvFilePath}. Skipping GST seed.`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('⏳ Starting GST data import...');
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const promises = [];
|
||||
|
||||
fs.createReadStream(csvFilePath)
|
||||
.pipe(csv())
|
||||
.on('data', (row) => {
|
||||
// Map CSV columns to database columns
|
||||
// CSV header:
|
||||
// gstin,legal_name,trade_name,status,registration_date,last_updated,
|
||||
// business_type,constitution,state,state_code,pan,
|
||||
// address_building,address_floor,address_street,address_locality,
|
||||
// address_city,address_district,address_state_code,address_pincode,
|
||||
// nature_of_business,filing_status_gstr1,filing_status_gstr3b,filing_last_filed_date
|
||||
|
||||
const query = `
|
||||
INSERT INTO gst_registrations (
|
||||
gstin,
|
||||
legal_name,
|
||||
trade_name,
|
||||
status,
|
||||
registration_date,
|
||||
last_updated,
|
||||
business_type,
|
||||
constitution,
|
||||
state,
|
||||
state_code,
|
||||
pan,
|
||||
address_building,
|
||||
address_floor,
|
||||
address_street,
|
||||
address_locality,
|
||||
address_city,
|
||||
address_district,
|
||||
address_state_code,
|
||||
address_pincode,
|
||||
nature_of_business,
|
||||
filing_status_gstr1,
|
||||
filing_status_gstr3b,
|
||||
filing_last_filed_date
|
||||
) VALUES (
|
||||
$1, $2, $3, $4, $5, $6,
|
||||
$7, $8, $9, $10, $11,
|
||||
$12, $13, $14, $15, $16,
|
||||
$17, $18, $19, $20, $21,
|
||||
$22, $23
|
||||
)
|
||||
ON CONFLICT (gstin) DO NOTHING;
|
||||
`;
|
||||
|
||||
const values = [
|
||||
row.gstin || null,
|
||||
row.legal_name || null,
|
||||
row.trade_name || null,
|
||||
row.status || null,
|
||||
row.registration_date || null, // 'YYYY-MM-DD' string; Postgres will cast to DATE
|
||||
row.last_updated || null, // ISO string; Postgres will cast to TIMESTAMP
|
||||
row.business_type || null,
|
||||
row.constitution || null,
|
||||
row.state || null,
|
||||
row.state_code || null,
|
||||
row.pan || null,
|
||||
row.address_building || null,
|
||||
row.address_floor || null,
|
||||
row.address_street || null,
|
||||
row.address_locality || null,
|
||||
row.address_city || null,
|
||||
row.address_district || null,
|
||||
row.address_state_code || null,
|
||||
row.address_pincode || null,
|
||||
row.nature_of_business || null, // e.g. "Manufacturing|Services"
|
||||
row.filing_status_gstr1 || null,
|
||||
row.filing_status_gstr3b || null,
|
||||
row.filing_last_filed_date || null // 'YYYY-MM-DD'; Postgres will cast to DATE
|
||||
];
|
||||
|
||||
promises.push(client.query(query, values));
|
||||
})
|
||||
.on('end', async () => {
|
||||
try {
|
||||
await Promise.all(promises);
|
||||
console.log('✅ GST data import completed.');
|
||||
resolve();
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
})
|
||||
.on('error', reject);
|
||||
});
|
||||
}
|
||||
|
||||
async function down(client) {
|
||||
// Optional: Clear the table on rollback
|
||||
await client.query('TRUNCATE TABLE gst_registrations;');
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
id: '20251217020000_seed_gst_data',
|
||||
name: 'seed gst data from csv',
|
||||
up,
|
||||
down,
|
||||
};
|
||||
@ -0,0 +1,55 @@
|
||||
const panBankSchema = `
|
||||
-- Table: pan_verifications
|
||||
CREATE TABLE IF NOT EXISTS pan_verifications (
|
||||
id SERIAL PRIMARY KEY,
|
||||
pan VARCHAR(10) NOT NULL,
|
||||
name VARCHAR(255),
|
||||
status VARCHAR(50),
|
||||
pan_type VARCHAR(50),
|
||||
name_match BOOLEAN,
|
||||
name_match_score INTEGER,
|
||||
requested_by INTEGER REFERENCES users(id),
|
||||
requested_at TIMESTAMP DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_pan_verifications_pan ON pan_verifications(pan);
|
||||
|
||||
-- Table: bank_verifications
|
||||
CREATE TABLE IF NOT EXISTS bank_verifications (
|
||||
id SERIAL PRIMARY KEY,
|
||||
account_number VARCHAR(34) NOT NULL,
|
||||
ifsc VARCHAR(11) NOT NULL,
|
||||
name VARCHAR(255),
|
||||
account_exists BOOLEAN,
|
||||
name_match BOOLEAN,
|
||||
name_match_score INTEGER,
|
||||
bank_name VARCHAR(255),
|
||||
branch VARCHAR(255),
|
||||
requested_by INTEGER REFERENCES users(id),
|
||||
requested_at TIMESTAMP DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_bank_verifications_ifsc_account
|
||||
ON bank_verifications(ifsc, account_number);
|
||||
`;
|
||||
|
||||
async function up(client) {
|
||||
await client.query(panBankSchema);
|
||||
}
|
||||
|
||||
async function down(client) {
|
||||
await client.query(`
|
||||
DROP TABLE IF EXISTS bank_verifications;
|
||||
DROP TABLE IF EXISTS pan_verifications;
|
||||
`);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
id: '20251217030000_add_pan_bank_verifications',
|
||||
name: 'add pan and bank verification tables',
|
||||
up,
|
||||
down,
|
||||
};
|
||||
|
||||
|
||||
|
||||
14
src/database/setup.js
Normal file
14
src/database/setup.js
Normal file
@ -0,0 +1,14 @@
|
||||
const { runMigrations } = require('./migrationRunner');
|
||||
|
||||
async function setupDatabase() {
|
||||
try {
|
||||
await runMigrations('up');
|
||||
console.log('✅ Database migrations executed');
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error('❌ Database setup failed:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { setupDatabase };
|
||||
87
src/index.js
Normal file
87
src/index.js
Normal file
@ -0,0 +1,87 @@
|
||||
// Main entry point for VerifyIndia API
|
||||
|
||||
require('dotenv').config();
|
||||
|
||||
const express = require('express');
|
||||
const helmet = require('helmet');
|
||||
const cors = require('cors');
|
||||
const morgan = require('morgan');
|
||||
|
||||
const { connectDB } = require('./database/connection');
|
||||
|
||||
const authRoutes = require('./routes/auth');
|
||||
const ifscRoutes = require('./routes/ifsc');
|
||||
const pincodeRoutes = require('./routes/pincode');
|
||||
const gstRoutes = require('./routes/gst');
|
||||
const panRoutes = require('./routes/pan');
|
||||
const bankRoutes = require('./routes/bank');
|
||||
const userRoutes = require('./routes/user');
|
||||
|
||||
const { errorHandler } = require('./middleware/errorHandler');
|
||||
|
||||
const app = express();
|
||||
|
||||
app.use(helmet());
|
||||
app.use(cors());
|
||||
app.use(express.json());
|
||||
app.use(morgan('combined'));
|
||||
|
||||
app.get('/health', (req, res) => {
|
||||
res.json({ status: 'healthy', timestamp: new Date().toISOString() });
|
||||
});
|
||||
|
||||
app.get('/', (req, res) => {
|
||||
res.json({
|
||||
message: 'VerifyIndia API',
|
||||
version: 'v1',
|
||||
endpoints: {
|
||||
ifsc: '/v1/ifsc/:ifsc_code',
|
||||
pincode: '/v1/pincode/:pincode',
|
||||
gst: '/v1/gst/verify/:gstin',
|
||||
pan: '/v1/pan/verify',
|
||||
bank: '/v1/bank/verify'
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
app.use('/v1/auth', authRoutes);
|
||||
app.use('/v1/user', userRoutes);
|
||||
app.use('/v1/ifsc', ifscRoutes);
|
||||
app.use('/v1/pincode', pincodeRoutes);
|
||||
app.use('/v1/gst', gstRoutes);
|
||||
app.use('/v1/pan', panRoutes);
|
||||
app.use('/v1/bank', bankRoutes);
|
||||
|
||||
app.use('*', (req, res) => {
|
||||
res.status(404).json({
|
||||
success: false,
|
||||
error: { code: 'NOT_FOUND', message: `Route ${req.originalUrl} not found` }
|
||||
});
|
||||
});
|
||||
|
||||
app.use(errorHandler);
|
||||
|
||||
const PORT = process.env.PORT || 3000;
|
||||
|
||||
|
||||
async function startServer() {
|
||||
try {
|
||||
await connectDB();
|
||||
console.log('✅ PostgreSQL connected');
|
||||
// await connectRedis();
|
||||
// console.log('✅ Redis connected', isDummyCache() ? 'using dummy cache' : 'using real cache');
|
||||
|
||||
app.listen(PORT, () => {
|
||||
console.log(`✅ Server running on port ${PORT}`);
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('❌ Failed to start:', error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
startServer();
|
||||
|
||||
|
||||
|
||||
|
||||
86
src/middleware/auth.js
Normal file
86
src/middleware/auth.js
Normal file
@ -0,0 +1,86 @@
|
||||
const crypto = require('crypto');
|
||||
const { query } = require('../database/connection');
|
||||
const { cacheGet, cacheSet } = require('../cache/redis');
|
||||
|
||||
async function authenticateApiKey(req, res, next) {
|
||||
try {
|
||||
const apiKey = req.headers['x-api-key'];
|
||||
|
||||
if (!apiKey) {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
error: { code: 'MISSING_API_KEY', message: 'API key required' }
|
||||
});
|
||||
}
|
||||
|
||||
if (!apiKey.startsWith('vf_live_') && !apiKey.startsWith('vf_test_')) {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
error: { code: 'INVALID_API_KEY_FORMAT', message: 'Invalid API key format' }
|
||||
});
|
||||
}
|
||||
|
||||
const cacheKey = `apikey:${apiKey}`;
|
||||
let keyData = await cacheGet(cacheKey);
|
||||
|
||||
if (!keyData) {
|
||||
const keyHash = crypto.createHash('sha256').update(apiKey).digest('hex');
|
||||
|
||||
const result = await query(
|
||||
`SELECT ak.*, u.plan, u.monthly_quota, u.calls_this_month, u.is_active as user_active
|
||||
FROM api_keys ak
|
||||
JOIN users u ON ak.user_id = u.id
|
||||
WHERE ak.key_hash = $1 AND ak.is_active = true`,
|
||||
[keyHash]
|
||||
);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
error: { code: 'INVALID_API_KEY', message: 'Invalid or inactive API key' }
|
||||
});
|
||||
}
|
||||
|
||||
keyData = result.rows[0];
|
||||
await cacheSet(cacheKey, keyData, 300);
|
||||
}
|
||||
|
||||
if (!keyData.user_active) {
|
||||
return res.status(403).json({
|
||||
success: false,
|
||||
error: { code: 'ACCOUNT_INACTIVE', message: 'Account inactive' }
|
||||
});
|
||||
}
|
||||
|
||||
if (keyData.calls_this_month >= keyData.monthly_quota) {
|
||||
return res.status(429).json({
|
||||
success: false,
|
||||
error: {
|
||||
code: 'QUOTA_EXCEEDED',
|
||||
message: 'Monthly quota exceeded',
|
||||
details: { used: keyData.calls_this_month, limit: keyData.monthly_quota }
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
req.user = {
|
||||
id: keyData.user_id,
|
||||
plan: keyData.plan,
|
||||
apiKeyId: keyData.id,
|
||||
isTestKey: keyData.is_test_key,
|
||||
quota: keyData.monthly_quota,
|
||||
used: keyData.calls_this_month,
|
||||
remaining: keyData.monthly_quota - keyData.calls_this_month
|
||||
};
|
||||
|
||||
next();
|
||||
} catch (error) {
|
||||
console.error('Auth error:', error);
|
||||
return res.status(500).json({
|
||||
success: false,
|
||||
error: { code: 'AUTH_ERROR', message: 'Authentication failed' }
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { authenticateApiKey };
|
||||
23
src/middleware/errorHandler.js
Normal file
23
src/middleware/errorHandler.js
Normal file
@ -0,0 +1,23 @@
|
||||
function errorHandler(err, req, res, next) {
|
||||
console.error('Error:', err.message);
|
||||
|
||||
const statusCode = err.statusCode || 500;
|
||||
const message = process.env.NODE_ENV === 'production' && statusCode === 500
|
||||
? 'Internal server error'
|
||||
: err.message;
|
||||
|
||||
res.status(statusCode).json({
|
||||
success: false,
|
||||
error: { code: err.code || 'INTERNAL_ERROR', message }
|
||||
});
|
||||
}
|
||||
|
||||
class ApiError extends Error {
|
||||
constructor(statusCode, code, message) {
|
||||
super(message);
|
||||
this.statusCode = statusCode;
|
||||
this.code = code;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { errorHandler, ApiError };
|
||||
45
src/middleware/rateLimit.js
Normal file
45
src/middleware/rateLimit.js
Normal file
@ -0,0 +1,45 @@
|
||||
const { getRedisClient } = require('../cache/redis');
|
||||
|
||||
const RATE_LIMITS = {
|
||||
free: 10,
|
||||
starter: 60,
|
||||
growth: 120,
|
||||
business: 300,
|
||||
enterprise: 1000
|
||||
};
|
||||
|
||||
async function rateLimit(req, res, next) {
|
||||
const redis = getRedisClient();
|
||||
if (!redis) return next();
|
||||
|
||||
try {
|
||||
const userId = req.user?.id || req.ip;
|
||||
const plan = req.user?.plan || 'free';
|
||||
const limit = RATE_LIMITS[plan] || RATE_LIMITS.free;
|
||||
const key = `ratelimit:${userId}`;
|
||||
|
||||
const current = await redis.incr(key);
|
||||
if (current === 1) await redis.expire(key, 60);
|
||||
|
||||
const ttl = await redis.ttl(key);
|
||||
|
||||
res.set({
|
||||
'X-RateLimit-Limit': limit,
|
||||
'X-RateLimit-Remaining': Math.max(0, limit - current),
|
||||
'X-RateLimit-Reset': Math.floor(Date.now() / 1000) + ttl
|
||||
});
|
||||
|
||||
if (current > limit) {
|
||||
return res.status(429).json({
|
||||
success: false,
|
||||
error: { code: 'RATE_LIMIT_EXCEEDED', message: `Limit: ${limit}/minute`, retry_after: ttl }
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
} catch (error) {
|
||||
next();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { rateLimit };
|
||||
111
src/routes/auth.js
Normal file
111
src/routes/auth.js
Normal file
@ -0,0 +1,111 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const bcrypt = require('bcryptjs');
|
||||
const jwt = require('jsonwebtoken');
|
||||
const crypto = require('crypto');
|
||||
const { query } = require('../database/connection');
|
||||
const { ApiError } = require('../middleware/errorHandler');
|
||||
|
||||
function generateApiKey(type = 'live') {
|
||||
const prefix = type === 'test' ? 'vf_test_' : 'vf_live_';
|
||||
return prefix + crypto.randomBytes(24).toString('hex');
|
||||
}
|
||||
|
||||
router.post('/signup', async (req, res, next) => {
|
||||
try {
|
||||
const { email, password, company_name, phone } = req.body;
|
||||
|
||||
if (!email || !password) {
|
||||
throw new ApiError(400, 'MISSING_FIELDS', 'Email and password required');
|
||||
}
|
||||
|
||||
if (password.length < 8) {
|
||||
throw new ApiError(400, 'WEAK_PASSWORD', 'Password must be 8+ characters');
|
||||
}
|
||||
|
||||
const existing = await query('SELECT id FROM users WHERE email = $1', [email.toLowerCase()]);
|
||||
if (existing.rows.length > 0) {
|
||||
throw new ApiError(409, 'EMAIL_EXISTS', 'Email already registered');
|
||||
}
|
||||
|
||||
const passwordHash = await bcrypt.hash(password, 10);
|
||||
|
||||
const result = await query(
|
||||
`INSERT INTO users (email, password_hash, company_name, phone, plan, monthly_quota, quota_reset_date)
|
||||
VALUES ($1, $2, $3, $4, 'free', 100, DATE(NOW() + INTERVAL '1 month'))
|
||||
RETURNING id, email, company_name, plan`,
|
||||
[email.toLowerCase(), passwordHash, company_name, phone]
|
||||
);
|
||||
|
||||
const user = result.rows[0];
|
||||
const apiKey = generateApiKey('live');
|
||||
const keyHash = crypto.createHash('sha256').update(apiKey).digest('hex');
|
||||
|
||||
await query(
|
||||
`INSERT INTO api_keys (user_id, key_prefix, key_hash, key_hint, name)
|
||||
VALUES ($1, $2, $3, $4, 'Default')`,
|
||||
[user.id, 'vf_live_', keyHash, apiKey.slice(-4)]
|
||||
);
|
||||
|
||||
const token = jwt.sign({ userId: user.id, email: user.email }, process.env.JWT_SECRET, { expiresIn: '7d' });
|
||||
|
||||
res.status(201).json({
|
||||
success: true,
|
||||
data: {
|
||||
user: { id: user.id, email: user.email, company_name: user.company_name, plan: user.plan },
|
||||
api_key: apiKey,
|
||||
token
|
||||
}
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/login', async (req, res, next) => {
|
||||
try {
|
||||
const { email, password } = req.body;
|
||||
|
||||
if (!email || !password) {
|
||||
throw new ApiError(400, 'MISSING_FIELDS', 'Email and password required');
|
||||
}
|
||||
|
||||
const result = await query('SELECT * FROM users WHERE email = $1 AND is_active = true', [email.toLowerCase()]);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
throw new ApiError(401, 'INVALID_CREDENTIALS', 'Invalid email or password');
|
||||
}
|
||||
|
||||
const user = result.rows[0];
|
||||
const validPassword = await bcrypt.compare(password, user.password_hash);
|
||||
|
||||
if (!validPassword) {
|
||||
throw new ApiError(401, 'INVALID_CREDENTIALS', 'Invalid email or password');
|
||||
}
|
||||
|
||||
await query('UPDATE users SET last_login_at = NOW() WHERE id = $1', [user.id]);
|
||||
|
||||
const token = jwt.sign({ userId: user.id, email: user.email }, process.env.JWT_SECRET, { expiresIn: '7d' });
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: {
|
||||
user: {
|
||||
id: user.id,
|
||||
email: user.email,
|
||||
company_name: user.company_name,
|
||||
plan: user.plan,
|
||||
quota: user.monthly_quota,
|
||||
used: user.calls_this_month
|
||||
},
|
||||
token
|
||||
}
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
63
src/routes/bank.js
Normal file
63
src/routes/bank.js
Normal file
@ -0,0 +1,63 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { authenticateApiKey } = require('../middleware/auth');
|
||||
const { rateLimit } = require('../middleware/rateLimit');
|
||||
const { verifyBankAccount } = require('../services/bankService');
|
||||
const { logApiCall } = require('../services/analytics');
|
||||
|
||||
router.use(authenticateApiKey);
|
||||
router.use(rateLimit);
|
||||
|
||||
router.post('/verify', async (req, res, next) => {
|
||||
const startTime = Date.now();
|
||||
let success = false;
|
||||
|
||||
try {
|
||||
const { account_number, ifsc, name } = req.body;
|
||||
|
||||
if (!account_number || !ifsc) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: { code: 'MISSING_FIELDS', message: 'Account number and IFSC are required' }
|
||||
});
|
||||
}
|
||||
|
||||
const result = await verifyBankAccount(account_number, ifsc, name);
|
||||
|
||||
if (!result.success) {
|
||||
return res.status(result.statusCode || 404).json({
|
||||
success: false,
|
||||
error: { code: result.errorCode, message: result.message }
|
||||
});
|
||||
}
|
||||
|
||||
success = true;
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: result.data,
|
||||
meta: {
|
||||
request_id: `req_bank_${Date.now()}`,
|
||||
credits_used: 2,
|
||||
credits_remaining: req.user.remaining - 2
|
||||
}
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
next(error);
|
||||
} finally {
|
||||
await logApiCall({
|
||||
userId: req.user.id,
|
||||
apiKeyId: req.user.apiKeyId,
|
||||
endpoint: '/v1/bank/verify',
|
||||
method: 'POST',
|
||||
params: { account_number: req.body.account_number, ifsc: req.body.ifsc },
|
||||
status: success ? 200 : 500,
|
||||
duration: Date.now() - startTime,
|
||||
success,
|
||||
isTestKey: req.user.isTestKey
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
65
src/routes/gst.js
Normal file
65
src/routes/gst.js
Normal file
@ -0,0 +1,65 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { authenticateApiKey } = require('../middleware/auth');
|
||||
const { rateLimit } = require('../middleware/rateLimit');
|
||||
const { verifyGSTIN } = require('../services/gstService');
|
||||
const { logApiCall } = require('../services/analytics');
|
||||
|
||||
router.use(authenticateApiKey);
|
||||
router.use(rateLimit);
|
||||
|
||||
router.get('/verify/:gstin', async (req, res, next) => {
|
||||
const startTime = Date.now();
|
||||
let success = false;
|
||||
|
||||
try {
|
||||
const { gstin } = req.params;
|
||||
const gstinRegex = /^[0-9]{2}[A-Z]{5}[0-9]{4}[A-Z]{1}[1-9A-Z]{1}Z[0-9A-Z]{1}$/;
|
||||
|
||||
if (!gstinRegex.test(gstin.toUpperCase())) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: { code: 'INVALID_GSTIN', message: 'Invalid GSTIN format' }
|
||||
});
|
||||
}
|
||||
|
||||
const result = await verifyGSTIN(gstin.toUpperCase());
|
||||
|
||||
if (!result.success) {
|
||||
return res.status(result.statusCode || 404).json({
|
||||
success: false,
|
||||
error: { code: result.errorCode, message: result.message }
|
||||
});
|
||||
}
|
||||
|
||||
success = true;
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: result.data,
|
||||
meta: {
|
||||
request_id: `req_gst_${Date.now()}`,
|
||||
credits_used: 1,
|
||||
credits_remaining: req.user.remaining - 1,
|
||||
source: 'gstn'
|
||||
}
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
next(error);
|
||||
} finally {
|
||||
await logApiCall({
|
||||
userId: req.user.id,
|
||||
apiKeyId: req.user.apiKeyId,
|
||||
endpoint: '/v1/gst/verify',
|
||||
method: 'GET',
|
||||
params: { gstin: req.params.gstin },
|
||||
status: success ? 200 : 500,
|
||||
duration: Date.now() - startTime,
|
||||
success,
|
||||
isTestKey: req.user.isTestKey
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
89
src/routes/ifsc.js
Normal file
89
src/routes/ifsc.js
Normal file
@ -0,0 +1,89 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { authenticateApiKey } = require('../middleware/auth');
|
||||
const { rateLimit } = require('../middleware/rateLimit');
|
||||
const { query } = require('../database/connection');
|
||||
const { cacheGet, cacheSet } = require('../cache/redis');
|
||||
const { logApiCall } = require('../services/analytics');
|
||||
|
||||
router.use(authenticateApiKey);
|
||||
router.use(rateLimit);
|
||||
|
||||
router.get('/:ifsc_code', async (req, res, next) => {
|
||||
const startTime = Date.now();
|
||||
let success = false;
|
||||
|
||||
try {
|
||||
const { ifsc_code } = req.params;
|
||||
const ifscRegex = /^[A-Z]{4}0[A-Z0-9]{6}$/;
|
||||
|
||||
if (!ifscRegex.test(ifsc_code.toUpperCase())) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: { code: 'INVALID_IFSC', message: 'Invalid IFSC format' }
|
||||
});
|
||||
}
|
||||
|
||||
const ifsc = ifsc_code.toUpperCase();
|
||||
const cacheKey = `ifsc:${ifsc}`;
|
||||
let data = await cacheGet(cacheKey);
|
||||
|
||||
if (!data) {
|
||||
const result = await query('SELECT * FROM ifsc_codes WHERE ifsc = $1', [ifsc]);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
error: { code: 'IFSC_NOT_FOUND', message: 'IFSC not found' }
|
||||
});
|
||||
}
|
||||
|
||||
data = result.rows[0];
|
||||
await cacheSet(cacheKey, data, 86400);
|
||||
}
|
||||
|
||||
success = true;
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: {
|
||||
ifsc: data.ifsc,
|
||||
bank: data.bank_name,
|
||||
branch: data.branch,
|
||||
address: data.address,
|
||||
city: data.city,
|
||||
district: data.district,
|
||||
state: data.state,
|
||||
contact: data.contact,
|
||||
upi: data.upi_enabled,
|
||||
rtgs: data.rtgs_enabled,
|
||||
neft: data.neft_enabled,
|
||||
imps: data.imps_enabled,
|
||||
micr: data.micr_code,
|
||||
swift: data.swift_code
|
||||
},
|
||||
meta: {
|
||||
request_id: `req_ifsc_${Date.now()}`,
|
||||
credits_used: 1,
|
||||
credits_remaining: req.user.remaining - 1
|
||||
}
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
next(error);
|
||||
} finally {
|
||||
await logApiCall({
|
||||
userId: req.user.id,
|
||||
apiKeyId: req.user.apiKeyId,
|
||||
endpoint: '/v1/ifsc',
|
||||
method: 'GET',
|
||||
params: { ifsc: req.params.ifsc_code },
|
||||
status: success ? 200 : 500,
|
||||
duration: Date.now() - startTime,
|
||||
success,
|
||||
isTestKey: req.user.isTestKey
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
71
src/routes/pan.js
Normal file
71
src/routes/pan.js
Normal file
@ -0,0 +1,71 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { authenticateApiKey } = require('../middleware/auth');
|
||||
const { rateLimit } = require('../middleware/rateLimit');
|
||||
const { verifyPAN } = require('../services/panService');
|
||||
const { logApiCall } = require('../services/analytics');
|
||||
|
||||
router.use(authenticateApiKey);
|
||||
router.use(rateLimit);
|
||||
|
||||
router.post('/verify', async (req, res, next) => {
|
||||
const startTime = Date.now();
|
||||
let success = false;
|
||||
|
||||
try {
|
||||
const { pan, name, dob } = req.body;
|
||||
const panRegex = /^[A-Z]{5}[0-9]{4}[A-Z]{1}$/;
|
||||
|
||||
if (!pan) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: { code: 'MISSING_PAN', message: 'PAN is required' }
|
||||
});
|
||||
}
|
||||
|
||||
if (!panRegex.test(pan.toUpperCase())) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: { code: 'INVALID_PAN', message: 'Invalid PAN format' }
|
||||
});
|
||||
}
|
||||
|
||||
const result = await verifyPAN(pan.toUpperCase(), name, dob);
|
||||
|
||||
if (!result.success) {
|
||||
return res.status(result.statusCode || 404).json({
|
||||
success: false,
|
||||
error: { code: result.errorCode, message: result.message }
|
||||
});
|
||||
}
|
||||
|
||||
success = true;
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: result.data,
|
||||
meta: {
|
||||
request_id: `req_pan_${Date.now()}`,
|
||||
credits_used: 1,
|
||||
credits_remaining: req.user.remaining - 1
|
||||
}
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
next(error);
|
||||
} finally {
|
||||
await logApiCall({
|
||||
userId: req.user.id,
|
||||
apiKeyId: req.user.apiKeyId,
|
||||
endpoint: '/v1/pan/verify',
|
||||
method: 'POST',
|
||||
params: { pan: req.body.pan },
|
||||
status: success ? 200 : 500,
|
||||
duration: Date.now() - startTime,
|
||||
success,
|
||||
isTestKey: req.user.isTestKey
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
190
src/routes/pincode.js
Normal file
190
src/routes/pincode.js
Normal file
@ -0,0 +1,190 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { authenticateApiKey } = require('../middleware/auth');
|
||||
const { rateLimit } = require('../middleware/rateLimit');
|
||||
const { query } = require('../database/connection');
|
||||
const { cacheGet, cacheSet, cacheDelete } = require('../cache/redis');
|
||||
const { logApiCall } = require('../services/analytics');
|
||||
|
||||
const STATE_CODES = {
|
||||
'Andhra Pradesh': 'AP', 'Arunachal Pradesh': 'AR', 'Assam': 'AS', 'Bihar': 'BR',
|
||||
'Chhattisgarh': 'CG', 'Delhi': 'DL', 'Goa': 'GA', 'Gujarat': 'GJ', 'Haryana': 'HR',
|
||||
'Himachal Pradesh': 'HP', 'Jharkhand': 'JH', 'Karnataka': 'KA', 'Kerala': 'KL',
|
||||
'Madhya Pradesh': 'MP', 'Maharashtra': 'MH', 'Manipur': 'MN', 'Meghalaya': 'ML',
|
||||
'Mizoram': 'MZ', 'Nagaland': 'NL', 'Odisha': 'OD', 'Punjab': 'PB', 'Rajasthan': 'RJ',
|
||||
'Sikkim': 'SK', 'Tamil Nadu': 'TN', 'Telangana': 'TS', 'Tripura': 'TR',
|
||||
'Uttar Pradesh': 'UP', 'Uttarakhand': 'UK', 'West Bengal': 'WB'
|
||||
};
|
||||
|
||||
router.use(authenticateApiKey);
|
||||
router.use(rateLimit);
|
||||
|
||||
router.get('/:pincode', async (req, res, next) => {
|
||||
const startTime = Date.now();
|
||||
let success = false;
|
||||
|
||||
try {
|
||||
const { pincode } = req.params;
|
||||
|
||||
if (!/^\d{6}$/.test(pincode)) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: { code: 'INVALID_PINCODE', message: 'Pincode must be 6 digits' }
|
||||
});
|
||||
}
|
||||
|
||||
const cacheKey = `pincode:${pincode}`;
|
||||
let data = await cacheGet(cacheKey);
|
||||
|
||||
if (!data) {
|
||||
const result = await query('SELECT * FROM pincodes WHERE pincode = $1', [pincode]);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
error: { code: 'PINCODE_NOT_FOUND', message: 'Pincode not found' }
|
||||
});
|
||||
}
|
||||
|
||||
data = result.rows;
|
||||
await cacheSet(cacheKey, data, 604800);
|
||||
}
|
||||
|
||||
success = true;
|
||||
const primary = data[0];
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: {
|
||||
pincode,
|
||||
locations: data.map(row => ({
|
||||
office_name: row.office_name,
|
||||
office_type: row.office_type,
|
||||
district: row.district,
|
||||
state: row.state,
|
||||
latitude: parseFloat(row.latitude) || null,
|
||||
longitude: parseFloat(row.longitude) || null
|
||||
})),
|
||||
primary: {
|
||||
district: primary.district,
|
||||
state: primary.state,
|
||||
state_code: STATE_CODES[primary.state] || ''
|
||||
}
|
||||
},
|
||||
meta: {
|
||||
request_id: `req_pin_${Date.now()}`,
|
||||
credits_used: 1,
|
||||
credits_remaining: req.user.remaining - 1
|
||||
}
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
next(error);
|
||||
} finally {
|
||||
await logApiCall({
|
||||
userId: req.user.id,
|
||||
apiKeyId: req.user.apiKeyId,
|
||||
endpoint: '/v1/pincode',
|
||||
method: 'GET',
|
||||
params: { pincode: req.params.pincode },
|
||||
status: success ? 200 : 500,
|
||||
duration: Date.now() - startTime,
|
||||
success,
|
||||
isTestKey: req.user.isTestKey
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/', async (req, res, next) => {
|
||||
const startTime = Date.now();
|
||||
let success = false;
|
||||
|
||||
try {
|
||||
const { pincode, office_name, office_type, district, division, region, state, latitude, longitude } = req.body;
|
||||
|
||||
if (!pincode || !/^\d{6}$/.test(pincode)) {
|
||||
return res.status(400).json({ success: false, error: { code: 'INVALID_PINCODE', message: 'Pincode must be a 6-digit number.' } });
|
||||
}
|
||||
if (!office_name || office_name.trim() === '') {
|
||||
return res.status(400).json({ success: false, error: { code: 'INVALID_OFFICE_NAME', message: 'Office name is required.' } });
|
||||
}
|
||||
if (!office_type || office_type.trim() === '') {
|
||||
return res.status(400).json({ success: false, error: { code: 'INVALID_OFFICE_TYPE', message: 'Office type is required.' } });
|
||||
}
|
||||
if (!district || district.trim() === '') {
|
||||
return res.status(400).json({ success: false, error: { code: 'INVALID_DISTRICT', message: 'District is required.' } });
|
||||
}
|
||||
if (!division || division.trim() === '') {
|
||||
return res.status(400).json({ success: false, error: { code: 'INVALID_DIVISION', message: 'Division is required.' } });
|
||||
}
|
||||
if (!region || region.trim() === '') {
|
||||
return res.status(400).json({ success: false, error: { code: 'INVALID_REGION', message: 'Region is required.' } });
|
||||
}
|
||||
if (!state || state.trim() === '') {
|
||||
return res.status(400).json({ success: false, error: { code: 'INVALID_STATE', message: 'State is required.' } });
|
||||
}
|
||||
if (latitude !== undefined && (isNaN(parseFloat(latitude)) || parseFloat(latitude) < -90 || parseFloat(latitude) > 90)) {
|
||||
return res.status(400).json({ success: false, error: { code: 'INVALID_LATITUDE', message: 'Latitude must be a number between -90 and 90.' } });
|
||||
}
|
||||
if (longitude !== undefined && (isNaN(parseFloat(longitude)) || parseFloat(longitude) < -180 || parseFloat(longitude) > 180)) {
|
||||
return res.status(400).json({ success: false, error: { code: 'INVALID_LONGITUDE', message: 'Longitude must be a number between -180 and 180.' } });
|
||||
}
|
||||
|
||||
const insertQuery = `
|
||||
INSERT INTO pincodes (pincode, office_name, office_type, district, division, region, state, latitude, longitude)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
|
||||
ON CONFLICT (pincode) DO UPDATE SET
|
||||
office_name = EXCLUDED.office_name,
|
||||
office_type = EXCLUDED.office_type,
|
||||
district = EXCLUDED.district,
|
||||
division = EXCLUDED.division,
|
||||
region = EXCLUDED.region,
|
||||
state = EXCLUDED.state,
|
||||
latitude = EXCLUDED.latitude,
|
||||
longitude = EXCLUDED.longitude,
|
||||
updated_at = NOW()
|
||||
RETURNING *;
|
||||
`;
|
||||
|
||||
const values = [
|
||||
pincode,
|
||||
office_name,
|
||||
office_type,
|
||||
district,
|
||||
division,
|
||||
region,
|
||||
state,
|
||||
latitude ? parseFloat(latitude) : null,
|
||||
longitude ? parseFloat(longitude) : null
|
||||
];
|
||||
|
||||
const result = await query(insertQuery, values);
|
||||
|
||||
// Invalidate cache for this pincode
|
||||
await cacheDelete(`pincode:${pincode}`);
|
||||
|
||||
success = true;
|
||||
res.status(201).json({
|
||||
success: true,
|
||||
message: 'Pincode data stored successfully',
|
||||
data: result.rows[0]
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
next(error);
|
||||
} finally {
|
||||
await logApiCall({
|
||||
userId: req.user.id,
|
||||
apiKeyId: req.user.apiKeyId,
|
||||
endpoint: '/v1/pincode',
|
||||
method: 'POST',
|
||||
params: req.body,
|
||||
status: success ? 201 : 500,
|
||||
duration: Date.now() - startTime,
|
||||
success,
|
||||
isTestKey: req.user.isTestKey
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
86
src/routes/user.js
Normal file
86
src/routes/user.js
Normal file
@ -0,0 +1,86 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const jwt = require('jsonwebtoken');
|
||||
const { query } = require('../database/connection');
|
||||
const { ApiError } = require('../middleware/errorHandler');
|
||||
|
||||
function authenticateToken(req, res, next) {
|
||||
const authHeader = req.headers['authorization'];
|
||||
const token = authHeader && authHeader.split(' ')[1];
|
||||
|
||||
if (!token) {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
error: { code: 'MISSING_TOKEN', message: 'Authorization token required' }
|
||||
});
|
||||
}
|
||||
|
||||
jwt.verify(token, process.env.JWT_SECRET, (err, user) => {
|
||||
if (err) {
|
||||
return res.status(403).json({
|
||||
success: false,
|
||||
error: { code: 'INVALID_TOKEN', message: 'Invalid or expired token' }
|
||||
});
|
||||
}
|
||||
req.userId = user.userId;
|
||||
next();
|
||||
});
|
||||
}
|
||||
|
||||
router.use(authenticateToken);
|
||||
|
||||
router.get('/usage', async (req, res, next) => {
|
||||
try {
|
||||
const userId = req.userId;
|
||||
|
||||
const usageResult = await query(
|
||||
`SELECT
|
||||
COUNT(*) as total_calls,
|
||||
COUNT(*) FILTER (WHERE success = true) as successful_calls,
|
||||
COUNT(*) FILTER (WHERE success = false) as failed_calls,
|
||||
SUM(credits_used) as credits_used
|
||||
FROM api_calls
|
||||
WHERE user_id = $1 AND called_at >= DATE_TRUNC('month', NOW())`,
|
||||
[userId]
|
||||
);
|
||||
|
||||
const endpointResult = await query(
|
||||
`SELECT endpoint, COUNT(*) as count
|
||||
FROM api_calls
|
||||
WHERE user_id = $1 AND called_at >= DATE_TRUNC('month', NOW())
|
||||
GROUP BY endpoint`,
|
||||
[userId]
|
||||
);
|
||||
|
||||
const userResult = await query(
|
||||
'SELECT monthly_quota, calls_this_month FROM users WHERE id = $1',
|
||||
[userId]
|
||||
);
|
||||
|
||||
const user = userResult.rows[0];
|
||||
const usage = usageResult.rows[0];
|
||||
const byEndpoint = {};
|
||||
endpointResult.rows.forEach(row => {
|
||||
byEndpoint[row.endpoint] = parseInt(row.count);
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: {
|
||||
period: 'month',
|
||||
total_calls: parseInt(usage.total_calls) || 0,
|
||||
successful_calls: parseInt(usage.successful_calls) || 0,
|
||||
failed_calls: parseInt(usage.failed_calls) || 0,
|
||||
credits_used: parseInt(usage.credits_used) || 0,
|
||||
quota: user.monthly_quota,
|
||||
remaining: Math.max(0, user.monthly_quota - user.calls_this_month),
|
||||
by_endpoint: byEndpoint
|
||||
}
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
21
src/services/analytics.js
Normal file
21
src/services/analytics.js
Normal file
@ -0,0 +1,21 @@
|
||||
const { query } = require('../database/connection');
|
||||
|
||||
async function logApiCall({ userId, apiKeyId, endpoint, method, params, status, duration, success, isTestKey, errorMessage = null }) {
|
||||
try {
|
||||
await query(
|
||||
`INSERT INTO api_calls
|
||||
(user_id, api_key_id, endpoint, method, request_params, response_status, response_time_ms, success, error_message, credits_used, is_billable)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)`,
|
||||
[userId, apiKeyId, endpoint, method, JSON.stringify(params || {}), status, duration, success, errorMessage, success ? 1 : 0, !isTestKey && success]
|
||||
);
|
||||
|
||||
if (success && !isTestKey) {
|
||||
await query('UPDATE users SET calls_this_month = calls_this_month + 1 WHERE id = $1', [userId]);
|
||||
await query('UPDATE api_keys SET last_used_at = NOW(), total_calls = total_calls + 1 WHERE id = $1', [apiKeyId]);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Log error:', error.message);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { logApiCall };
|
||||
131
src/services/bankService.js
Normal file
131
src/services/bankService.js
Normal file
@ -0,0 +1,131 @@
|
||||
|
||||
const axios = require('axios');
|
||||
axios.defaults.headers.common['Authorization'] = `Bearer ${process.env.BANK_PROVIDER_KEY}`;
|
||||
axios.defaults.headers.post['Content-Type'] = 'application/json';
|
||||
const { cacheGet, cacheSet } = require('../cache/redis');
|
||||
const { query } = require('../database/connection');
|
||||
|
||||
async function verifyBankAccount(accountNumber, ifsc, name = null) {
|
||||
try {
|
||||
const cacheKey = `bank:${ifsc}:${accountNumber}`;
|
||||
const cached = await cacheGet(cacheKey);
|
||||
if (cached) {
|
||||
if (name) {
|
||||
cached.name_match = cached.name_at_bank === name.toUpperCase();
|
||||
cached.name_match_score = cached.name_match ? 100 : 0;
|
||||
}
|
||||
return { success: true, data: cached };
|
||||
}
|
||||
|
||||
// Get bank details from IFSC
|
||||
const ifscResult = await query('SELECT bank_name, branch FROM ifsc_codes WHERE ifsc = $1', [ifsc.toUpperCase()]);
|
||||
|
||||
if (ifscResult.rows.length === 0) {
|
||||
return { success: false, statusCode: 400, errorCode: 'INVALID_IFSC', message: 'IFSC not found' };
|
||||
}
|
||||
|
||||
const bankInfo = ifscResult.rows[0];
|
||||
|
||||
// Temporarily mock external bank provider response for testing
|
||||
const mockResponseData = {
|
||||
status_code: 200,
|
||||
data: {
|
||||
account_exists: true,
|
||||
name_at_bank: name || "DUMMY ACCOUNT HOLDER",
|
||||
account_holder_name: name || "DUMMY ACCOUNT HOLDER",
|
||||
branch: bankInfo.branch
|
||||
}
|
||||
};
|
||||
const response = { data: mockResponseData };
|
||||
|
||||
// Commenting out the actual external API call for now
|
||||
// const response = await axios.post(
|
||||
// process.env.BANK_PROVIDER_URL,
|
||||
// {
|
||||
// account_number: accountNumber,
|
||||
// ifsc: ifsc.toUpperCase(),
|
||||
// name: name
|
||||
// },
|
||||
// {
|
||||
// headers: {
|
||||
// 'Authorization': `Bearer ${process.env.BANK_PROVIDER_KEY}`,
|
||||
// 'Content-Type': 'application/json'
|
||||
// },
|
||||
// timeout: 30000
|
||||
// }
|
||||
// );
|
||||
|
||||
if (!response.data || response.data.status_code !== 200) {
|
||||
return { success: false, statusCode: 404, errorCode: 'ACCOUNT_NOT_FOUND', message: 'Account not found' };
|
||||
}
|
||||
|
||||
const d = response.data.data;
|
||||
|
||||
const data = {
|
||||
account_number: accountNumber,
|
||||
ifsc: ifsc.toUpperCase(),
|
||||
account_exists: d.account_exists !== false,
|
||||
name_at_bank: d.name_at_bank || d.account_holder_name || '',
|
||||
name_match: name ? (d.name_at_bank || d.account_holder_name || '').toUpperCase() === name.toUpperCase() : null,
|
||||
name_match_score: name ? (d.name_at_bank || d.account_holder_name || '').toUpperCase() === name.toUpperCase() ? 100 : 0 : null,
|
||||
bank_name: bankInfo.bank_name,
|
||||
branch: bankInfo.branch || d.branch || ''
|
||||
};
|
||||
|
||||
await cacheSet(cacheKey, data, 86400);
|
||||
|
||||
// Persist bank verification result to Postgres (best-effort, non-blocking)
|
||||
try {
|
||||
await query(
|
||||
`INSERT INTO bank_verifications (
|
||||
account_number,
|
||||
ifsc,
|
||||
name,
|
||||
account_exists,
|
||||
name_match,
|
||||
name_match_score,
|
||||
bank_name,
|
||||
branch,
|
||||
requested_by
|
||||
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)`,
|
||||
[
|
||||
data.account_number,
|
||||
data.ifsc,
|
||||
data.name_at_bank,
|
||||
data.account_exists,
|
||||
data.name_match,
|
||||
data.name_match_score,
|
||||
data.bank_name,
|
||||
data.branch,
|
||||
null, // requested_by (user id) - can be wired from route later if needed
|
||||
]
|
||||
);
|
||||
} catch (dbError) {
|
||||
console.error('Failed to store bank verification in database:', dbError.message || dbError);
|
||||
}
|
||||
|
||||
return { success: true, data };
|
||||
|
||||
} catch (error) {
|
||||
// Surface provider details when available to avoid generic 500s
|
||||
if (error.code === 'ECONNABORTED') {
|
||||
return { success: false, statusCode: 504, errorCode: 'PROVIDER_TIMEOUT', message: 'Service timeout' };
|
||||
}
|
||||
|
||||
const providerStatus = error.response?.status || error.response?.data?.status_code;
|
||||
const providerMessage = error.response?.data?.message || error.message;
|
||||
|
||||
if (providerStatus) {
|
||||
return {
|
||||
success: false,
|
||||
statusCode: providerStatus,
|
||||
errorCode: error.response?.data?.error_code || 'PROVIDER_ERROR',
|
||||
message: providerMessage || 'Provider error'
|
||||
};
|
||||
}
|
||||
|
||||
return { success: false, statusCode: 500, errorCode: 'VERIFICATION_FAILED', message: 'Verification failed' };
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { verifyBankAccount };
|
||||
191
src/services/gstService.js
Normal file
191
src/services/gstService.js
Normal file
@ -0,0 +1,191 @@
|
||||
// const axios = require('axios');
|
||||
// const { cacheGet, cacheSet } = require('../cache/redis');
|
||||
|
||||
// const STATE_NAMES = {
|
||||
// '01': 'Jammu & Kashmir', '02': 'Himachal Pradesh', '03': 'Punjab',
|
||||
// '04': 'Chandigarh', '05': 'Uttarakhand', '06': 'Haryana', '07': 'Delhi',
|
||||
// '08': 'Rajasthan', '09': 'Uttar Pradesh', '10': 'Bihar', '11': 'Sikkim',
|
||||
// '12': 'Arunachal Pradesh', '13': 'Nagaland', '14': 'Manipur', '15': 'Mizoram',
|
||||
// '16': 'Tripura', '17': 'Meghalaya', '18': 'Assam', '19': 'West Bengal',
|
||||
// '20': 'Jharkhand', '21': 'Odisha', '22': 'Chhattisgarh', '23': 'Madhya Pradesh',
|
||||
// '24': 'Gujarat', '26': 'Dadra & Nagar Haveli', '27': 'Maharashtra',
|
||||
// '29': 'Karnataka', '30': 'Goa', '31': 'Lakshadweep', '32': 'Kerala',
|
||||
// '33': 'Tamil Nadu', '34': 'Puducherry', '35': 'Andaman & Nicobar',
|
||||
// '36': 'Telangana', '37': 'Andhra Pradesh', '38': 'Ladakh'
|
||||
// };
|
||||
|
||||
// async function verifyGSTIN(gstin) {
|
||||
// try {
|
||||
// const cacheKey = `gst:${gstin}`;
|
||||
// const cached = await cacheGet(cacheKey);
|
||||
// if (cached) return { success: true, data: cached };
|
||||
|
||||
// const response = await axios.post(
|
||||
// process.env.GST_PROVIDER_URL,
|
||||
// { id_number: gstin },
|
||||
// {
|
||||
// headers: {
|
||||
// 'Authorization': `Bearer ${process.env.GST_PROVIDER_KEY}`,
|
||||
// 'Content-Type': 'application/json'
|
||||
// },
|
||||
// timeout: 30000
|
||||
// }
|
||||
// );
|
||||
|
||||
// if (!response.data || response.data.status_code !== 200) {
|
||||
// return { success: false, statusCode: 404, errorCode: 'GSTIN_NOT_FOUND', message: 'GSTIN not found' };
|
||||
// }
|
||||
|
||||
// const d = response.data.data;
|
||||
|
||||
// const data = {
|
||||
// gstin,
|
||||
// legal_name: d.legal_name || d.lgnm,
|
||||
// trade_name: d.trade_name || d.tradeNam,
|
||||
// status: d.status || d.sts,
|
||||
// registration_date: d.registration_date || d.rgdt,
|
||||
// last_updated: d.last_update || d.lstupdt,
|
||||
// business_type: d.business_type || d.ctb,
|
||||
// constitution: d.constitution || d.ctj,
|
||||
// state: d.state || STATE_NAMES[gstin.substring(0, 2)],
|
||||
// state_code: gstin.substring(0, 2),
|
||||
// pan: gstin.substring(2, 12),
|
||||
// address: {
|
||||
// building: d.address?.bno || d.pradr?.addr?.bno || '',
|
||||
// floor: d.address?.flno || d.pradr?.addr?.flno || '',
|
||||
// street: d.address?.st || d.pradr?.addr?.st || '',
|
||||
// locality: d.address?.loc || d.pradr?.addr?.loc || '',
|
||||
// city: d.address?.city || d.pradr?.addr?.city || '',
|
||||
// district: d.address?.dst || d.pradr?.addr?.dst || '',
|
||||
// state: d.address?.stcd || d.pradr?.addr?.stcd || '',
|
||||
// pincode: d.address?.pncd || d.pradr?.addr?.pncd || ''
|
||||
// },
|
||||
// nature_of_business: d.nature_of_business || d.nba || [],
|
||||
// filing_status: {
|
||||
// gstr1: d.filing_status?.gstr1 || 'Unknown',
|
||||
// gstr3b: d.filing_status?.gstr3b || 'Unknown',
|
||||
// last_filed_date: d.filing_status?.last_filed || null
|
||||
// }
|
||||
// };
|
||||
|
||||
// await cacheSet(cacheKey, data, 86400);
|
||||
// return { success: true, data };
|
||||
|
||||
// } catch (error) {
|
||||
// // Surface provider details when available to avoid generic 500s
|
||||
// if (error.code === 'ECONNABORTED') {
|
||||
// return { success: false, statusCode: 504, errorCode: 'PROVIDER_TIMEOUT', message: 'Service timeout' };
|
||||
// }
|
||||
|
||||
// const providerStatus = error.response?.status || error.response?.data?.status_code;
|
||||
// const providerMessage = error.response?.data?.message || error.message;
|
||||
|
||||
// if (providerStatus) {
|
||||
// return {
|
||||
// success: false,
|
||||
// statusCode: providerStatus,
|
||||
// errorCode: error.response?.data?.error_code || 'PROVIDER_ERROR',
|
||||
// message: providerMessage || 'Provider error'
|
||||
// };
|
||||
// }
|
||||
|
||||
// return { success: false, statusCode: 500, errorCode: 'VERIFICATION_FAILED', message: 'Verification failed' };
|
||||
// }
|
||||
// }
|
||||
|
||||
// module.exports = { verifyGSTIN };
|
||||
const { cacheGet, cacheSet } = require('../cache/redis');
|
||||
const { query } = require('../database/connection');
|
||||
|
||||
const STATE_NAMES = {
|
||||
'01': 'Jammu & Kashmir', '02': 'Himachal Pradesh', '03': 'Punjab',
|
||||
'04': 'Chandigarh', '05': 'Uttarakhand', '06': 'Haryana', '07': 'Delhi',
|
||||
'08': 'Rajasthan', '09': 'Uttar Pradesh', '10': 'Bihar', '11': 'Sikkim',
|
||||
'12': 'Arunachal Pradesh', '13': 'Nagaland', '14': 'Manipur', '15': 'Mizoram',
|
||||
'16': 'Tripura', '17': 'Meghalaya', '18': 'Assam', '19': 'West Bengal',
|
||||
'20': 'Jharkhand', '21': 'Odisha', '22': 'Chhattisgarh', '23': 'Madhya Pradesh',
|
||||
'24': 'Gujarat', '26': 'Dadra & Nagar Haveli', '27': 'Maharashtra',
|
||||
'29': 'Karnataka', '30': 'Goa', '31': 'Lakshadweep', '32': 'Kerala',
|
||||
'33': 'Tamil Nadu', '34': 'Puducherry', '35': 'Andaman & Nicobar',
|
||||
'36': 'Telangana', '37': 'Andhra Pradesh', '38': 'Ladakh'
|
||||
};
|
||||
|
||||
async function verifyGSTIN(gstin) {
|
||||
try {
|
||||
const cacheKey = `gst:${gstin}`;
|
||||
const cached = await cacheGet(cacheKey);
|
||||
if (cached) return { success: true, data: cached };
|
||||
|
||||
// Look up GSTIN in the local database seeded from gst.csv
|
||||
const result = await query(
|
||||
'SELECT * FROM gst_registrations WHERE gstin = $1',
|
||||
[gstin]
|
||||
);
|
||||
|
||||
if (!result.rows.length) {
|
||||
return { success: false, statusCode: 404, errorCode: 'GSTIN_NOT_FOUND', message: 'GSTIN not found' };
|
||||
}
|
||||
|
||||
const d = result.rows[0];
|
||||
|
||||
// nature_of_business in CSV is stored as a single string like "Manufacturing|Services"
|
||||
const natureOfBusinessArray = d.nature_of_business
|
||||
? String(d.nature_of_business).split('|').map((v) => v.trim()).filter(Boolean)
|
||||
: [];
|
||||
|
||||
const data = {
|
||||
gstin,
|
||||
legal_name: d.legal_name,
|
||||
trade_name: d.trade_name,
|
||||
status: d.status,
|
||||
registration_date: d.registration_date,
|
||||
last_updated: d.last_updated,
|
||||
business_type: d.business_type,
|
||||
constitution: d.constitution,
|
||||
state: d.state || STATE_NAMES[gstin.substring(0, 2)],
|
||||
state_code: gstin.substring(0, 2),
|
||||
pan: gstin.substring(2, 12),
|
||||
address: {
|
||||
building: d.address_building || '',
|
||||
floor: d.address_floor || '',
|
||||
street: d.address_street || '',
|
||||
locality: d.address_locality || '',
|
||||
city: d.address_city || '',
|
||||
district: d.address_district || '',
|
||||
state: d.address_state_code || '',
|
||||
pincode: d.address_pincode || ''
|
||||
},
|
||||
nature_of_business: natureOfBusinessArray,
|
||||
filing_status: {
|
||||
gstr1: d.filing_status_gstr1 || 'Unknown',
|
||||
gstr3b: d.filing_status_gstr3b || 'Unknown',
|
||||
last_filed_date: d.filing_last_filed_date || null
|
||||
}
|
||||
};
|
||||
|
||||
await cacheSet(cacheKey, data, 86400);
|
||||
return { success: true, data };
|
||||
|
||||
} catch (error) {
|
||||
// Surface provider details when available to avoid generic 500s
|
||||
if (error.code === 'ECONNABORTED') {
|
||||
return { success: false, statusCode: 504, errorCode: 'PROVIDER_TIMEOUT', message: 'Service timeout' };
|
||||
}
|
||||
|
||||
const providerStatus = error.response?.status || error.response?.data?.status_code;
|
||||
const providerMessage = error.response?.data?.message || error.message;
|
||||
|
||||
if (providerStatus) {
|
||||
return {
|
||||
success: false,
|
||||
statusCode: providerStatus,
|
||||
errorCode: error.response?.data?.error_code || 'PROVIDER_ERROR',
|
||||
message: providerMessage || 'Provider error'
|
||||
};
|
||||
}
|
||||
|
||||
return { success: false, statusCode: 500, errorCode: 'VERIFICATION_FAILED', message: 'Verification failed' };
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { verifyGSTIN };
|
||||
140
src/services/panService.js
Normal file
140
src/services/panService.js
Normal file
@ -0,0 +1,140 @@
|
||||
const axios = require('axios');
|
||||
const { cacheGet, cacheSet } = require('../cache/redis');
|
||||
const { query } = require('../database/connection');
|
||||
|
||||
const PAN_TYPES = {
|
||||
'P': 'Individual',
|
||||
'C': 'Company',
|
||||
'H': 'HUF',
|
||||
'A': 'AOP',
|
||||
'B': 'BOI',
|
||||
'G': 'Government',
|
||||
'J': 'Artificial Juridical Person',
|
||||
'L': 'Local Authority',
|
||||
'F': 'Firm/Partnership',
|
||||
'T': 'Trust'
|
||||
};
|
||||
|
||||
async function verifyPAN(pan, name = null, dob = null) {
|
||||
try {
|
||||
const cacheKey = `pan:${pan}`;
|
||||
const cached = await cacheGet(cacheKey);
|
||||
if (cached) {
|
||||
if (name) {
|
||||
cached.name_match = cached.name === name.toUpperCase();
|
||||
cached.name_match_score = cached.name_match ? 100 : 0;
|
||||
}
|
||||
return { success: true, data: cached };
|
||||
}
|
||||
|
||||
|
||||
|
||||
const parts = name.trim().split(" ");
|
||||
|
||||
const firstName = parts[0] || "DUMMY";
|
||||
const lastName = parts.slice(1).join(" ") || "NAME";
|
||||
|
||||
const mockResponseData = {
|
||||
status_code: 200,
|
||||
data: {
|
||||
name: name || "DUMMY NAME",
|
||||
status: "ACTIVE",
|
||||
type: PAN_TYPES[pan[3]] || 'Individual',
|
||||
full_name: name || "DUMMY NAME",
|
||||
last_name: lastName,
|
||||
first_name: firstName,
|
||||
}
|
||||
};
|
||||
const response = { data: mockResponseData };
|
||||
|
||||
// Commenting out the actual external API call for now
|
||||
// const response = await axios.post(
|
||||
// process.env.PAN_PROVIDER_URL,
|
||||
// {
|
||||
// id_number: pan,
|
||||
// name: name,
|
||||
// dob: dob
|
||||
// },
|
||||
// {
|
||||
// headers: {
|
||||
// 'Authorization': `Bearer ${process.env.PAN_PROVIDER_KEY}`,
|
||||
// 'Content-Type': 'application/json'
|
||||
// },
|
||||
// timeout: 30000
|
||||
// }
|
||||
// );
|
||||
|
||||
if (!response.data || response.data.status_code !== 200) {
|
||||
return { success: false, statusCode: 404, errorCode: 'PAN_NOT_FOUND', message: 'PAN not found' };
|
||||
}
|
||||
|
||||
const d = response.data.data;
|
||||
const panType = PAN_TYPES[pan[3]] || 'Unknown';
|
||||
|
||||
const data = {
|
||||
pan,
|
||||
name: d.name || d.full_name || '',
|
||||
status: d.status || 'Valid',
|
||||
type: d.type || panType,
|
||||
name_match: name ? (d.name || d.full_name || '').toUpperCase() === name.toUpperCase() : null,
|
||||
name_match_score: name ? (d.name || d.full_name || '').toUpperCase() === name.toUpperCase() ? 100 : 0 : null,
|
||||
last_name: d.last_name || d.surname || '',
|
||||
first_name: d.first_name || '',
|
||||
middle_name: d.middle_name || '',
|
||||
title: d.title || ''
|
||||
};
|
||||
|
||||
await cacheSet(cacheKey, data, 86400);
|
||||
|
||||
// Persist PAN verification result to Postgres (best-effort, non-blocking)
|
||||
try {
|
||||
await query(
|
||||
`INSERT INTO pan_verifications (
|
||||
pan,
|
||||
name,
|
||||
status,
|
||||
pan_type,
|
||||
name_match,
|
||||
name_match_score,
|
||||
requested_by
|
||||
) VALUES ($1, $2, $3, $4, $5, $6, $7)`,
|
||||
[
|
||||
data.pan,
|
||||
data.name,
|
||||
data.status,
|
||||
data.type,
|
||||
data.name_match,
|
||||
data.name_match_score,
|
||||
null, // requested_by (user id) - can be wired from route later if needed
|
||||
]
|
||||
);
|
||||
} catch (dbError) {
|
||||
console.error('Failed to store PAN verification in database:', dbError.message || dbError);
|
||||
}
|
||||
|
||||
return { success: true, data };
|
||||
|
||||
} catch (error) {
|
||||
// Surface provider details when available to avoid generic 500s
|
||||
if (error.code === 'ECONNABORTED') {
|
||||
return { success: false, statusCode: 504, errorCode: 'PROVIDER_TIMEOUT', message: 'Service timeout' };
|
||||
}
|
||||
|
||||
const providerStatus = error.response?.status || error.response?.data?.status_code;
|
||||
const providerMessage = error.response?.data?.message || error.message;
|
||||
|
||||
if (providerStatus) {
|
||||
return {
|
||||
success: false,
|
||||
statusCode: providerStatus,
|
||||
errorCode: error.response?.data?.error_code || 'PROVIDER_ERROR',
|
||||
message: providerMessage || 'Provider error'
|
||||
};
|
||||
}
|
||||
|
||||
return { success: false, statusCode: 500, errorCode: 'VERIFICATION_FAILED', message: 'Verification failed' };
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { verifyPAN };
|
||||
|
||||
Loading…
Reference in New Issue
Block a user