Final Version - Ready for Deployment
This commit is contained in:
commit
dc49d6f432
23
.env.example
Normal file
23
.env.example
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
NODE_ENV=development
|
||||||
|
PORT=3000
|
||||||
|
|
||||||
|
DATABASE_URL=postgresql://user:pass@host:5432/verify_india
|
||||||
|
REDIS_URL=redis://localhost:6379
|
||||||
|
|
||||||
|
JWT_SECRET=your-secret-key
|
||||||
|
API_KEY_PREFIX=vf_live_
|
||||||
|
|
||||||
|
GST_PROVIDER_URL=
|
||||||
|
GST_PROVIDER_KEY=
|
||||||
|
|
||||||
|
PAN_PROVIDER_URL=
|
||||||
|
PAN_PROVIDER_KEY=
|
||||||
|
|
||||||
|
BANK_PROVIDER_URL=
|
||||||
|
BANK_PROVIDER_KEY=
|
||||||
|
|
||||||
|
RAZORPAY_KEY_ID=
|
||||||
|
RAZORPAY_KEY_SECRET=
|
||||||
|
|
||||||
|
RESEND_API_KEY=
|
||||||
|
FROM_EMAIL=
|
||||||
35
.gitignore
vendored
Normal file
35
.gitignore
vendored
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
# Dependencies
|
||||||
|
node_modules/
|
||||||
|
|
||||||
|
# Environment variables (NEVER upload secrets)
|
||||||
|
.env
|
||||||
|
.env.local
|
||||||
|
.env.production
|
||||||
|
|
||||||
|
# IDE/Editor
|
||||||
|
.vscode/
|
||||||
|
.idea/
|
||||||
|
*.swp
|
||||||
|
*.swo
|
||||||
|
|
||||||
|
# OS files
|
||||||
|
.DS_Store
|
||||||
|
Thumbs.db
|
||||||
|
|
||||||
|
# Logs
|
||||||
|
logs/
|
||||||
|
*.log
|
||||||
|
npm-debug.log*
|
||||||
|
|
||||||
|
# Testing
|
||||||
|
coverage/
|
||||||
|
.nyc_output/
|
||||||
|
|
||||||
|
# Build outputs
|
||||||
|
dist/
|
||||||
|
build/
|
||||||
|
|
||||||
|
# Temporary files
|
||||||
|
tmp/
|
||||||
|
temp/
|
||||||
|
*.tmp
|
||||||
95
README.md
Normal file
95
README.md
Normal file
@ -0,0 +1,95 @@
|
|||||||
|
# VerifyIndia API
|
||||||
|
|
||||||
|
A comprehensive verification API for Indian documents and data including IFSC codes, PIN codes, GST numbers, PAN cards, and bank account verification.
|
||||||
|
|
||||||
|
## Tech Stack
|
||||||
|
|
||||||
|
- **Runtime**: Node.js
|
||||||
|
- **Framework**: Express.js
|
||||||
|
- **Database**: PostgreSQL
|
||||||
|
- **Cache**: Redis
|
||||||
|
- **Authentication**: JWT (JSON Web Tokens)
|
||||||
|
|
||||||
|
## Project Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
verify-india-api/
|
||||||
|
├── src/
|
||||||
|
│ ├── index.js # Application entry point
|
||||||
|
│ ├── routes/ # API route handlers
|
||||||
|
│ │ ├── auth.js # Authentication routes
|
||||||
|
│ │ ├── ifsc.js # IFSC code lookup
|
||||||
|
│ │ ├── pincode.js # PIN code lookup
|
||||||
|
│ │ ├── gst.js # GST verification
|
||||||
|
│ │ ├── pan.js # PAN verification
|
||||||
|
│ │ ├── bank.js # Bank account verification
|
||||||
|
│ │ └── user.js # User management
|
||||||
|
│ ├── middleware/ # Express middleware
|
||||||
|
│ │ ├── auth.js # Authentication middleware
|
||||||
|
│ │ ├── rateLimit.js # Rate limiting
|
||||||
|
│ │ └── errorHandler.js # Error handling
|
||||||
|
│ ├── services/ # Business logic services
|
||||||
|
│ │ ├── gstService.js # GST verification service
|
||||||
|
│ │ ├── panService.js # PAN verification service
|
||||||
|
│ │ ├── bankService.js # Bank verification service
|
||||||
|
│ │ └── analytics.js # Analytics service
|
||||||
|
│ ├── database/ # Database configuration
|
||||||
|
│ │ ├── connection.js # Database connection
|
||||||
|
│ │ └── setup.js # Database setup scripts
|
||||||
|
│ └── cache/ # Caching layer
|
||||||
|
│ └── redis.js # Redis configuration
|
||||||
|
├── data/ # Static data files
|
||||||
|
│ ├── ifsc.json # IFSC code database
|
||||||
|
│ └── pincodes.json # PIN code database
|
||||||
|
├── package.json
|
||||||
|
├── .env.example
|
||||||
|
└── README.md
|
||||||
|
```
|
||||||
|
|
||||||
|
## Getting Started
|
||||||
|
|
||||||
|
### Prerequisites
|
||||||
|
|
||||||
|
- Node.js >= 18.0.0
|
||||||
|
- PostgreSQL
|
||||||
|
- Redis
|
||||||
|
|
||||||
|
### Installation
|
||||||
|
|
||||||
|
1. Clone the repository
|
||||||
|
2. Install dependencies:
|
||||||
|
```bash
|
||||||
|
npm install
|
||||||
|
```
|
||||||
|
3. Copy `.env.example` to `.env` and configure your environment variables:
|
||||||
|
```bash
|
||||||
|
cp .env.example .env
|
||||||
|
```
|
||||||
|
4. Start the development server:
|
||||||
|
```bash
|
||||||
|
npm start
|
||||||
|
```
|
||||||
|
|
||||||
|
## Environment Variables
|
||||||
|
|
||||||
|
| Variable | Description | Example |
|
||||||
|
|----------|-------------|---------|
|
||||||
|
| `PORT` | Server port | `3000` |
|
||||||
|
| `DATABASE_URL` | PostgreSQL connection string | `postgresql://user:pass@localhost:5432/db` |
|
||||||
|
| `REDIS_URL` | Redis connection string | `redis://localhost:6379` |
|
||||||
|
| `JWT_SECRET` | Secret key for JWT signing | `your_secret_key` |
|
||||||
|
| `API_KEY_PREFIX` | Prefix for API keys | `vi_` |
|
||||||
|
|
||||||
|
## API Endpoints
|
||||||
|
|
||||||
|
- `/api/auth` - Authentication endpoints
|
||||||
|
- `/api/ifsc` - IFSC code lookup
|
||||||
|
- `/api/pincode` - PIN code lookup
|
||||||
|
- `/api/gst` - GST verification
|
||||||
|
- `/api/pan` - PAN verification
|
||||||
|
- `/api/bank` - Bank account verification
|
||||||
|
- `/api/user` - User management
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
ISC
|
||||||
164837
data/ifsc.csv
Normal file
164837
data/ifsc.csv
Normal file
File diff suppressed because it is too large
Load Diff
3
data/ifsc.json
Normal file
3
data/ifsc.json
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"_comment": "TODO: Add IFSC code database"
|
||||||
|
}
|
||||||
1001
data/pan.csv
Normal file
1001
data/pan.csv
Normal file
File diff suppressed because it is too large
Load Diff
165628
data/pincode.csv
Normal file
165628
data/pincode.csv
Normal file
File diff suppressed because it is too large
Load Diff
3
data/pincodes.json
Normal file
3
data/pincodes.json
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"_comment": "TODO: Add PIN code database"
|
||||||
|
}
|
||||||
1810
package-lock.json
generated
Normal file
1810
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
44
package.json
Normal file
44
package.json
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
{
|
||||||
|
"name": "verify-india-api",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "VerifyIndia API - A comprehensive verification API for Indian documents and data",
|
||||||
|
"main": "src/index.js",
|
||||||
|
"scripts": {
|
||||||
|
"start": "node src/index.js",
|
||||||
|
"dev": "nodemon src/index.js",
|
||||||
|
"db:setup": "node src/database/setup.js",
|
||||||
|
"db:import-csv": "node src/database/importCSV.js"
|
||||||
|
},
|
||||||
|
"keywords": [
|
||||||
|
"verification",
|
||||||
|
"india",
|
||||||
|
"api",
|
||||||
|
"ifsc",
|
||||||
|
"pincode",
|
||||||
|
"gst",
|
||||||
|
"pan",
|
||||||
|
"bank"
|
||||||
|
],
|
||||||
|
"author": "",
|
||||||
|
"license": "ISC",
|
||||||
|
"dependencies": {
|
||||||
|
"axios": "^1.6.2",
|
||||||
|
"bcryptjs": "^2.4.3",
|
||||||
|
"cors": "^2.8.5",
|
||||||
|
"csv-parser": "^3.2.0",
|
||||||
|
"dotenv": "^16.6.1",
|
||||||
|
"express": "^4.18.2",
|
||||||
|
"helmet": "^7.1.0",
|
||||||
|
"jsonwebtoken": "^9.0.2",
|
||||||
|
"morgan": "^1.10.0",
|
||||||
|
"pg": "^8.16.3",
|
||||||
|
"redis": "^4.6.12",
|
||||||
|
"uuid": "^9.0.1"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"nodemon": "^3.0.2"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=20.0.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
62
setup.ps1
Normal file
62
setup.ps1
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
# VerifyIndia API - Project Setup Script (PowerShell)
|
||||||
|
# This script creates the complete folder structure for the project
|
||||||
|
|
||||||
|
Write-Host "🚀 Creating VerifyIndia API project structure..." -ForegroundColor Cyan
|
||||||
|
|
||||||
|
# Create directories
|
||||||
|
New-Item -ItemType Directory -Force -Path "src/routes" | Out-Null
|
||||||
|
New-Item -ItemType Directory -Force -Path "src/middleware" | Out-Null
|
||||||
|
New-Item -ItemType Directory -Force -Path "src/services" | Out-Null
|
||||||
|
New-Item -ItemType Directory -Force -Path "src/database" | Out-Null
|
||||||
|
New-Item -ItemType Directory -Force -Path "src/cache" | Out-Null
|
||||||
|
New-Item -ItemType Directory -Force -Path "data" | Out-Null
|
||||||
|
|
||||||
|
# Create source files
|
||||||
|
New-Item -ItemType File -Force -Path "src/index.js" | Out-Null
|
||||||
|
|
||||||
|
# Create route files
|
||||||
|
New-Item -ItemType File -Force -Path "src/routes/auth.js" | Out-Null
|
||||||
|
New-Item -ItemType File -Force -Path "src/routes/ifsc.js" | Out-Null
|
||||||
|
New-Item -ItemType File -Force -Path "src/routes/pincode.js" | Out-Null
|
||||||
|
New-Item -ItemType File -Force -Path "src/routes/gst.js" | Out-Null
|
||||||
|
New-Item -ItemType File -Force -Path "src/routes/pan.js" | Out-Null
|
||||||
|
New-Item -ItemType File -Force -Path "src/routes/bank.js" | Out-Null
|
||||||
|
New-Item -ItemType File -Force -Path "src/routes/user.js" | Out-Null
|
||||||
|
|
||||||
|
# Create middleware files
|
||||||
|
New-Item -ItemType File -Force -Path "src/middleware/auth.js" | Out-Null
|
||||||
|
New-Item -ItemType File -Force -Path "src/middleware/rateLimit.js" | Out-Null
|
||||||
|
New-Item -ItemType File -Force -Path "src/middleware/errorHandler.js" | Out-Null
|
||||||
|
|
||||||
|
# Create service files
|
||||||
|
New-Item -ItemType File -Force -Path "src/services/gstService.js" | Out-Null
|
||||||
|
New-Item -ItemType File -Force -Path "src/services/panService.js" | Out-Null
|
||||||
|
New-Item -ItemType File -Force -Path "src/services/bankService.js" | Out-Null
|
||||||
|
New-Item -ItemType File -Force -Path "src/services/analytics.js" | Out-Null
|
||||||
|
|
||||||
|
# Create database files
|
||||||
|
New-Item -ItemType File -Force -Path "src/database/connection.js" | Out-Null
|
||||||
|
New-Item -ItemType File -Force -Path "src/database/setup.js" | Out-Null
|
||||||
|
|
||||||
|
# Create cache files
|
||||||
|
New-Item -ItemType File -Force -Path "src/cache/redis.js" | Out-Null
|
||||||
|
|
||||||
|
# Create data files
|
||||||
|
New-Item -ItemType File -Force -Path "data/ifsc.json" | Out-Null
|
||||||
|
New-Item -ItemType File -Force -Path "data/pincodes.json" | Out-Null
|
||||||
|
|
||||||
|
# Create root config files
|
||||||
|
New-Item -ItemType File -Force -Path "package.json" | Out-Null
|
||||||
|
New-Item -ItemType File -Force -Path ".env.example" | Out-Null
|
||||||
|
New-Item -ItemType File -Force -Path "README.md" | Out-Null
|
||||||
|
|
||||||
|
Write-Host "✅ Project structure created successfully!" -ForegroundColor Green
|
||||||
|
Write-Host ""
|
||||||
|
Write-Host "📁 Directory structure:" -ForegroundColor Yellow
|
||||||
|
Get-ChildItem -Recurse -File | Where-Object { $_.Extension -in ".js", ".json", ".md" -or $_.Name -like ".env*" } | ForEach-Object { Write-Host $_.FullName.Replace((Get-Location).Path + "\", "") }
|
||||||
|
Write-Host ""
|
||||||
|
Write-Host "📦 Next steps:" -ForegroundColor Yellow
|
||||||
|
Write-Host " 1. Edit package.json with your dependencies"
|
||||||
|
Write-Host " 2. Copy .env.example to .env and configure variables"
|
||||||
|
Write-Host " 3. Run 'npm install' to install dependencies"
|
||||||
|
Write-Host " 4. Run 'npm start' to start the development server"
|
||||||
64
setup.sh
Normal file
64
setup.sh
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# VerifyIndia API - Project Setup Script
|
||||||
|
# This script creates the complete folder structure for the project
|
||||||
|
|
||||||
|
echo "🚀 Creating VerifyIndia API project structure..."
|
||||||
|
|
||||||
|
# Create directories
|
||||||
|
mkdir -p src/routes
|
||||||
|
mkdir -p src/middleware
|
||||||
|
mkdir -p src/services
|
||||||
|
mkdir -p src/database
|
||||||
|
mkdir -p src/cache
|
||||||
|
mkdir -p data
|
||||||
|
|
||||||
|
# Create source files
|
||||||
|
touch src/index.js
|
||||||
|
|
||||||
|
# Create route files
|
||||||
|
touch src/routes/auth.js
|
||||||
|
touch src/routes/ifsc.js
|
||||||
|
touch src/routes/pincode.js
|
||||||
|
touch src/routes/gst.js
|
||||||
|
touch src/routes/pan.js
|
||||||
|
touch src/routes/bank.js
|
||||||
|
touch src/routes/user.js
|
||||||
|
|
||||||
|
# Create middleware files
|
||||||
|
touch src/middleware/auth.js
|
||||||
|
touch src/middleware/rateLimit.js
|
||||||
|
touch src/middleware/errorHandler.js
|
||||||
|
|
||||||
|
# Create service files
|
||||||
|
touch src/services/gstService.js
|
||||||
|
touch src/services/panService.js
|
||||||
|
touch src/services/bankService.js
|
||||||
|
touch src/services/analytics.js
|
||||||
|
|
||||||
|
# Create database files
|
||||||
|
touch src/database/connection.js
|
||||||
|
touch src/database/setup.js
|
||||||
|
|
||||||
|
# Create cache files
|
||||||
|
touch src/cache/redis.js
|
||||||
|
|
||||||
|
# Create data files
|
||||||
|
touch data/ifsc.json
|
||||||
|
touch data/pincodes.json
|
||||||
|
|
||||||
|
# Create root config files
|
||||||
|
touch package.json
|
||||||
|
touch .env.example
|
||||||
|
touch README.md
|
||||||
|
|
||||||
|
echo "✅ Project structure created successfully!"
|
||||||
|
echo ""
|
||||||
|
echo "📁 Directory structure:"
|
||||||
|
find . -type f -name "*.js" -o -name "*.json" -o -name ".env*" -o -name "*.md" | sort
|
||||||
|
echo ""
|
||||||
|
echo "📦 Next steps:"
|
||||||
|
echo " 1. Edit package.json with your dependencies"
|
||||||
|
echo " 2. Copy .env.example to .env and configure variables"
|
||||||
|
echo " 3. Run 'npm install' to install dependencies"
|
||||||
|
echo " 4. Run 'npm start' to start the development server"
|
||||||
39
src/cache/redis.js
vendored
Normal file
39
src/cache/redis.js
vendored
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
/**
|
||||||
|
* Redis Cache Module
|
||||||
|
* Handles Redis connection and caching operations
|
||||||
|
*/
|
||||||
|
|
||||||
|
const { createClient } = require('redis');
|
||||||
|
|
||||||
|
let redisClient = null;
|
||||||
|
|
||||||
|
async function connectRedis() {
|
||||||
|
redisClient = createClient({ url: process.env.REDIS_URL });
|
||||||
|
redisClient.on('error', (err) => console.error('Redis Error:', err));
|
||||||
|
await redisClient.connect();
|
||||||
|
return redisClient;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getRedisClient() {
|
||||||
|
return redisClient;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function cacheGet(key) {
|
||||||
|
if (!redisClient) return null;
|
||||||
|
const data = await redisClient.get(key);
|
||||||
|
return data ? JSON.parse(data) : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function cacheSet(key, value, expirySeconds = 3600) {
|
||||||
|
if (!redisClient) return false;
|
||||||
|
await redisClient.setEx(key, expirySeconds, JSON.stringify(value));
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function cacheDelete(key) {
|
||||||
|
if (!redisClient) return false;
|
||||||
|
await redisClient.del(key);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { connectRedis, getRedisClient, cacheGet, cacheSet, cacheDelete };
|
||||||
31
src/database/connection.js
Normal file
31
src/database/connection.js
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
/**
|
||||||
|
* Database Connection Module
|
||||||
|
* Handles PostgreSQL connection using pg library
|
||||||
|
*/
|
||||||
|
|
||||||
|
const { Pool } = require('pg');
|
||||||
|
|
||||||
|
const pool = new Pool({
|
||||||
|
connectionString: process.env.DATABASE_URL,
|
||||||
|
ssl: process.env.NODE_ENV === 'production' ? { rejectUnauthorized: false } : false,
|
||||||
|
max: 20,
|
||||||
|
idleTimeoutMillis: 30000,
|
||||||
|
connectionTimeoutMillis: 2000,
|
||||||
|
});
|
||||||
|
|
||||||
|
async function connectDB() {
|
||||||
|
const client = await pool.connect();
|
||||||
|
await client.query('SELECT NOW()');
|
||||||
|
client.release();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function query(text, params) {
|
||||||
|
return await pool.query(text, params);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getClient() {
|
||||||
|
return await pool.connect();
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { connectDB, query, getClient, pool };
|
||||||
394
src/database/importCSV.js
Normal file
394
src/database/importCSV.js
Normal file
@ -0,0 +1,394 @@
|
|||||||
|
/**
|
||||||
|
* CSV Data Import Script
|
||||||
|
* Imports IFSC and Pincode data from CSV files into PostgreSQL
|
||||||
|
* Usage: npm run db:import-csv
|
||||||
|
*/
|
||||||
|
|
||||||
|
require('dotenv').config();
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
const csv = require('csv-parser');
|
||||||
|
const { pool } = require('./connection');
|
||||||
|
|
||||||
|
const BATCH_SIZE = 500;
|
||||||
|
|
||||||
|
// Helper function to truncate decimal places and avoid numeric overflow
|
||||||
|
function truncateDecimal(value, decimals) {
|
||||||
|
if (!value || value === '') return null;
|
||||||
|
const num = parseFloat(value);
|
||||||
|
if (isNaN(num)) return null;
|
||||||
|
return Math.round(num * Math.pow(10, decimals)) / Math.pow(10, decimals);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================
|
||||||
|
// IFSC IMPORT
|
||||||
|
// ============================================
|
||||||
|
async function importIFSC() {
|
||||||
|
const filePath = path.join(__dirname, '../../data/ifsc.csv');
|
||||||
|
|
||||||
|
if (!fs.existsSync(filePath)) {
|
||||||
|
console.log('⚠️ IFSC CSV file not found at:', filePath);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('📦 Starting IFSC import...');
|
||||||
|
|
||||||
|
let batch = [];
|
||||||
|
let totalInserted = 0;
|
||||||
|
let isFirstRow = true;
|
||||||
|
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
fs.createReadStream(filePath)
|
||||||
|
.pipe(csv())
|
||||||
|
.on('data', async (row) => {
|
||||||
|
// DEBUG: Log first row to see CSV column names
|
||||||
|
if (isFirstRow) {
|
||||||
|
console.log('📋 IFSC CSV Headers:', Object.keys(row));
|
||||||
|
console.log('📋 First Row Sample:', row);
|
||||||
|
isFirstRow = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================
|
||||||
|
// COLUMN MAPPING - CUSTOMIZE HERE!
|
||||||
|
// Map your CSV column names to database columns
|
||||||
|
// Left side = DB column, Right side = CSV column name
|
||||||
|
// ============================================
|
||||||
|
const mappedRow = {
|
||||||
|
ifsc: row['IFSC'] || row['ifsc'] || row['IFSC_CODE'] || row['ifsc_code'],
|
||||||
|
bank_name: row['BANK'] || row['bank'] || row['BANK_NAME'] || row['bank_name'],
|
||||||
|
branch: row['BRANCH'] || row['branch'] || row['BRANCH_NAME'] || row['branch_name'],
|
||||||
|
address: row['ADDRESS'] || row['address'] || row['FULL_ADDRESS'],
|
||||||
|
city: row['CITY'] || row['city'] || row['CITY_NAME'],
|
||||||
|
district: row['DISTRICT'] || row['district'] || row['DISTRICT_NAME'],
|
||||||
|
state: row['STATE'] || row['state'] || row['STATE_NAME'],
|
||||||
|
contact: row['CONTACT'] || row['contact'] || row['PHONE'] || row['phone'],
|
||||||
|
upi_enabled: row['UPI'] === 'true' || row['UPI'] === 'TRUE' || row['UPI'] === '1' || false,
|
||||||
|
rtgs_enabled: row['RTGS'] === 'true' || row['RTGS'] === 'TRUE' || row['RTGS'] === '1' || true,
|
||||||
|
neft_enabled: row['NEFT'] === 'true' || row['NEFT'] === 'TRUE' || row['NEFT'] === '1' || true,
|
||||||
|
imps_enabled: row['IMPS'] === 'true' || row['IMPS'] === 'TRUE' || row['IMPS'] === '1' || true,
|
||||||
|
micr_code: row['MICR'] || row['micr'] || row['MICR_CODE'] || null,
|
||||||
|
swift_code: row['SWIFT'] || row['swift'] || row['SWIFT_CODE'] || null
|
||||||
|
};
|
||||||
|
|
||||||
|
// Skip if no IFSC code
|
||||||
|
if (!mappedRow.ifsc) return;
|
||||||
|
|
||||||
|
batch.push(mappedRow);
|
||||||
|
|
||||||
|
// Insert batch when full
|
||||||
|
if (batch.length >= BATCH_SIZE) {
|
||||||
|
const currentBatch = [...batch];
|
||||||
|
batch = [];
|
||||||
|
await insertIFSCBatch(currentBatch);
|
||||||
|
totalInserted += currentBatch.length;
|
||||||
|
process.stdout.write(`\r Inserted: ${totalInserted} records`);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.on('end', async () => {
|
||||||
|
// Insert remaining rows
|
||||||
|
if (batch.length > 0) {
|
||||||
|
await insertIFSCBatch(batch);
|
||||||
|
totalInserted += batch.length;
|
||||||
|
}
|
||||||
|
console.log(`\n✅ IFSC import complete! Total records: ${totalInserted}`);
|
||||||
|
resolve(totalInserted);
|
||||||
|
})
|
||||||
|
.on('error', (error) => {
|
||||||
|
console.error('❌ IFSC import error:', error.message);
|
||||||
|
reject(error);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async function insertIFSCBatch(rows) {
|
||||||
|
if (rows.length === 0) return;
|
||||||
|
|
||||||
|
const values = [];
|
||||||
|
const placeholders = [];
|
||||||
|
let paramIndex = 1;
|
||||||
|
|
||||||
|
for (const row of rows) {
|
||||||
|
placeholders.push(`($${paramIndex}, $${paramIndex + 1}, $${paramIndex + 2}, $${paramIndex + 3}, $${paramIndex + 4}, $${paramIndex + 5}, $${paramIndex + 6}, $${paramIndex + 7}, $${paramIndex + 8}, $${paramIndex + 9}, $${paramIndex + 10}, $${paramIndex + 11}, $${paramIndex + 12}, $${paramIndex + 13})`);
|
||||||
|
values.push(
|
||||||
|
row.ifsc,
|
||||||
|
row.bank_name,
|
||||||
|
row.branch,
|
||||||
|
row.address,
|
||||||
|
row.city,
|
||||||
|
row.district,
|
||||||
|
row.state,
|
||||||
|
row.contact,
|
||||||
|
row.upi_enabled,
|
||||||
|
row.rtgs_enabled,
|
||||||
|
row.neft_enabled,
|
||||||
|
row.imps_enabled,
|
||||||
|
row.micr_code,
|
||||||
|
row.swift_code
|
||||||
|
);
|
||||||
|
paramIndex += 14;
|
||||||
|
}
|
||||||
|
|
||||||
|
const query = `
|
||||||
|
INSERT INTO ifsc_codes (ifsc, bank_name, branch, address, city, district, state, contact, upi_enabled, rtgs_enabled, neft_enabled, imps_enabled, micr_code, swift_code)
|
||||||
|
VALUES ${placeholders.join(', ')}
|
||||||
|
ON CONFLICT (ifsc) DO NOTHING
|
||||||
|
`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await pool.query(query, values);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('\n❌ Batch insert error:', error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================
|
||||||
|
// PINCODE IMPORT
|
||||||
|
// ============================================
|
||||||
|
async function importPincode() {
|
||||||
|
const filePath = path.join(__dirname, '../../data/pincode.csv');
|
||||||
|
|
||||||
|
if (!fs.existsSync(filePath)) {
|
||||||
|
console.log('⚠️ Pincode CSV file not found at:', filePath);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('📦 Starting Pincode import...');
|
||||||
|
|
||||||
|
let batch = [];
|
||||||
|
let totalInserted = 0;
|
||||||
|
let isFirstRow = true;
|
||||||
|
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
fs.createReadStream(filePath)
|
||||||
|
.pipe(csv())
|
||||||
|
.on('data', async (row) => {
|
||||||
|
// DEBUG: Log first row to see CSV column names
|
||||||
|
if (isFirstRow) {
|
||||||
|
console.log('📋 Pincode CSV Headers:', Object.keys(row));
|
||||||
|
console.log('📋 First Row Sample:', row);
|
||||||
|
isFirstRow = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================
|
||||||
|
// COLUMN MAPPING - CUSTOMIZE HERE!
|
||||||
|
// Map your CSV column names to database columns
|
||||||
|
// Left side = DB column, Right side = CSV column name
|
||||||
|
// ============================================
|
||||||
|
const mappedRow = {
|
||||||
|
pincode: row['pincode'] || row['Pincode'] || row['PINCODE'] || row['PIN_CODE'],
|
||||||
|
office_name: row['officename'] || row['OfficeName'] || row['OFFICE_NAME'] || row['office_name'] || row['PostOfficeName'],
|
||||||
|
office_type: row['officetype'] || row['officeType'] || row['OfficeType'] || row['OFFICE_TYPE'] || row['office_type'],
|
||||||
|
district: row['district'] || row['Districtname'] || row['District'] || row['DISTRICT'] || row['district_name'],
|
||||||
|
division: row['divisionname'] || row['Division'] || row['DIVISION'] || row['division'],
|
||||||
|
region: row['regionname'] || row['Region'] || row['REGION'] || row['region'],
|
||||||
|
state: row['statename'] || row['state'] || row['State'] || row['STATE'] || row['state_name'],
|
||||||
|
// Truncate lat/long to 6 decimal places to prevent numeric overflow
|
||||||
|
latitude: truncateDecimal(row['latitude'] || row['Latitude'] || row['LAT'], 6),
|
||||||
|
longitude: truncateDecimal(row['longitude'] || row['Longitude'] || row['LNG'] || row['LONG'], 6)
|
||||||
|
};
|
||||||
|
|
||||||
|
// Skip if no pincode
|
||||||
|
if (!mappedRow.pincode) return;
|
||||||
|
|
||||||
|
batch.push(mappedRow);
|
||||||
|
|
||||||
|
// Insert batch when full
|
||||||
|
if (batch.length >= BATCH_SIZE) {
|
||||||
|
const currentBatch = [...batch];
|
||||||
|
batch = [];
|
||||||
|
await insertPincodeBatch(currentBatch);
|
||||||
|
totalInserted += currentBatch.length;
|
||||||
|
process.stdout.write(`\r Inserted: ${totalInserted} records`);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.on('end', async () => {
|
||||||
|
// Insert remaining rows
|
||||||
|
if (batch.length > 0) {
|
||||||
|
await insertPincodeBatch(batch);
|
||||||
|
totalInserted += batch.length;
|
||||||
|
}
|
||||||
|
console.log(`\n✅ Pincode import complete! Total records: ${totalInserted}`);
|
||||||
|
resolve(totalInserted);
|
||||||
|
})
|
||||||
|
.on('error', (error) => {
|
||||||
|
console.error('❌ Pincode import error:', error.message);
|
||||||
|
reject(error);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async function insertPincodeBatch(rows) {
|
||||||
|
if (rows.length === 0) return;
|
||||||
|
|
||||||
|
const values = [];
|
||||||
|
const placeholders = [];
|
||||||
|
let paramIndex = 1;
|
||||||
|
|
||||||
|
for (const row of rows) {
|
||||||
|
placeholders.push(`($${paramIndex}, $${paramIndex + 1}, $${paramIndex + 2}, $${paramIndex + 3}, $${paramIndex + 4}, $${paramIndex + 5}, $${paramIndex + 6}, $${paramIndex + 7}, $${paramIndex + 8})`);
|
||||||
|
values.push(
|
||||||
|
row.pincode,
|
||||||
|
row.office_name,
|
||||||
|
row.office_type,
|
||||||
|
row.district,
|
||||||
|
row.division,
|
||||||
|
row.region,
|
||||||
|
row.state,
|
||||||
|
row.latitude,
|
||||||
|
row.longitude
|
||||||
|
);
|
||||||
|
paramIndex += 9;
|
||||||
|
}
|
||||||
|
|
||||||
|
const query = `
|
||||||
|
INSERT INTO pincodes (pincode, office_name, office_type, district, division, region, state, latitude, longitude)
|
||||||
|
VALUES ${placeholders.join(', ')}
|
||||||
|
ON CONFLICT DO NOTHING
|
||||||
|
`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await pool.query(query, values);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('\n❌ Batch insert error:', error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================
|
||||||
|
// PAN IMPORT
|
||||||
|
// ============================================
|
||||||
|
// ============================================
|
||||||
|
// PAN IMPORT
|
||||||
|
// ============================================
|
||||||
|
async function importPAN() {
|
||||||
|
const filePath = path.join(__dirname, '../../data/pan.csv');
|
||||||
|
|
||||||
|
if (!fs.existsSync(filePath)) {
|
||||||
|
console.log('⚠️ PAN CSV file not found at:', filePath);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('📦 Starting PAN import...');
|
||||||
|
|
||||||
|
let batch = [];
|
||||||
|
let totalInserted = 0;
|
||||||
|
let isFirstRow = true;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const stream = fs.createReadStream(filePath).pipe(csv());
|
||||||
|
|
||||||
|
for await (const row of stream) {
|
||||||
|
// DEBUG: Log first row to see CSV column names
|
||||||
|
if (isFirstRow) {
|
||||||
|
console.log('📋 PAN CSV Headers:', Object.keys(row));
|
||||||
|
console.log('📋 First Row Sample:', row);
|
||||||
|
isFirstRow = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const mappedRow = {
|
||||||
|
pan_number: row['pan_number'] || row['PAN_NUMBER'],
|
||||||
|
full_name: row['full_name'] || row['FULL_NAME'],
|
||||||
|
father_name: row['father_name'] || row['FATHER_NAME'],
|
||||||
|
date_of_birth: row['date_of_birth'] || row['DATE_OF_BIRTH'] || row['dob'],
|
||||||
|
gender: row['gender'] || row['GENDER'],
|
||||||
|
category: row['category'] || row['CATEGORY'] || null,
|
||||||
|
status: row['status'] || row['STATUS'] || 'Active'
|
||||||
|
};
|
||||||
|
|
||||||
|
// Skip if no PAN number
|
||||||
|
if (!mappedRow.pan_number) continue;
|
||||||
|
|
||||||
|
batch.push(mappedRow);
|
||||||
|
|
||||||
|
// Insert batch when full
|
||||||
|
if (batch.length >= BATCH_SIZE) {
|
||||||
|
const currentBatch = [...batch];
|
||||||
|
batch = [];
|
||||||
|
await insertPANBatch(currentBatch);
|
||||||
|
totalInserted += currentBatch.length;
|
||||||
|
process.stdout.write(`\r Inserted: ${totalInserted} records`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Insert remaining rows
|
||||||
|
if (batch.length > 0) {
|
||||||
|
await insertPANBatch(batch);
|
||||||
|
totalInserted += batch.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`\n✅ PAN import complete! Total records: ${totalInserted}`);
|
||||||
|
return totalInserted;
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('❌ PAN import error:', error.message);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function insertPANBatch(rows) {
|
||||||
|
if (rows.length === 0) return;
|
||||||
|
|
||||||
|
const values = [];
|
||||||
|
const placeholders = [];
|
||||||
|
let paramIndex = 1;
|
||||||
|
|
||||||
|
for (const row of rows) {
|
||||||
|
placeholders.push(`($${paramIndex}, $${paramIndex + 1}, $${paramIndex + 2}, $${paramIndex + 3}, $${paramIndex + 4}, $${paramIndex + 5}, $${paramIndex + 6})`);
|
||||||
|
values.push(
|
||||||
|
row.pan_number,
|
||||||
|
row.full_name,
|
||||||
|
row.father_name,
|
||||||
|
row.date_of_birth,
|
||||||
|
row.gender,
|
||||||
|
row.category,
|
||||||
|
row.status
|
||||||
|
);
|
||||||
|
paramIndex += 7;
|
||||||
|
}
|
||||||
|
|
||||||
|
const query = `
|
||||||
|
INSERT INTO pan_data (pan_number, full_name, father_name, date_of_birth, gender, category, status)
|
||||||
|
VALUES ${placeholders.join(', ')}
|
||||||
|
ON CONFLICT (pan_number) DO NOTHING
|
||||||
|
`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await pool.query(query, values);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('\n❌ Batch insert error:', error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================
|
||||||
|
// MAIN EXECUTION
|
||||||
|
// ============================================
|
||||||
|
async function runImport() {
|
||||||
|
console.log('🚀 Starting CSV Data Import...\n');
|
||||||
|
console.log('='.repeat(50));
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Import IFSC codes first
|
||||||
|
await importIFSC();
|
||||||
|
|
||||||
|
console.log('');
|
||||||
|
|
||||||
|
// Then import Pincodes
|
||||||
|
await importPincode();
|
||||||
|
|
||||||
|
console.log('');
|
||||||
|
|
||||||
|
// Import PAN Data
|
||||||
|
await importPAN();
|
||||||
|
|
||||||
|
console.log('\n' + '='.repeat(50));
|
||||||
|
console.log('✅ All imports completed successfully!');
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('\n❌ Import failed:', error.message);
|
||||||
|
process.exit(1);
|
||||||
|
} finally {
|
||||||
|
await pool.end();
|
||||||
|
process.exit(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
runImport();
|
||||||
244
src/database/setup.js
Normal file
244
src/database/setup.js
Normal file
@ -0,0 +1,244 @@
|
|||||||
|
/**
|
||||||
|
* Database Schema Setup Script
|
||||||
|
* Run this script once to create all required tables
|
||||||
|
* Usage: npm run db:setup
|
||||||
|
*/
|
||||||
|
|
||||||
|
require('dotenv').config();
|
||||||
|
const { pool } = require('./connection');
|
||||||
|
|
||||||
|
// SQL statements to create tables (matching Developer Guide Part 2)
|
||||||
|
const createTablesSQL = `
|
||||||
|
-- ============================================
|
||||||
|
-- TABLE: users
|
||||||
|
-- ============================================
|
||||||
|
CREATE TABLE IF NOT EXISTS users (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
email VARCHAR(255) UNIQUE NOT NULL,
|
||||||
|
password_hash VARCHAR(255) NOT NULL,
|
||||||
|
company_name VARCHAR(255),
|
||||||
|
phone VARCHAR(20),
|
||||||
|
email_verified BOOLEAN DEFAULT FALSE,
|
||||||
|
verification_token VARCHAR(255),
|
||||||
|
plan VARCHAR(50) DEFAULT 'free',
|
||||||
|
plan_started_at TIMESTAMP,
|
||||||
|
plan_expires_at TIMESTAMP,
|
||||||
|
monthly_quota INTEGER DEFAULT 100,
|
||||||
|
calls_this_month INTEGER DEFAULT 0,
|
||||||
|
quota_reset_date DATE,
|
||||||
|
razorpay_customer_id VARCHAR(100),
|
||||||
|
razorpay_subscription_id VARCHAR(100),
|
||||||
|
created_at TIMESTAMP DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMP DEFAULT NOW(),
|
||||||
|
last_login_at TIMESTAMP,
|
||||||
|
is_active BOOLEAN DEFAULT TRUE
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_users_email ON users(email);
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- TABLE: api_keys
|
||||||
|
-- ============================================
|
||||||
|
CREATE TABLE IF NOT EXISTS api_keys (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
user_id INTEGER REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
key_prefix VARCHAR(20) NOT NULL,
|
||||||
|
key_hash VARCHAR(255) NOT NULL,
|
||||||
|
key_hint VARCHAR(10),
|
||||||
|
name VARCHAR(100) DEFAULT 'Default',
|
||||||
|
is_test_key BOOLEAN DEFAULT FALSE,
|
||||||
|
is_active BOOLEAN DEFAULT TRUE,
|
||||||
|
last_used_at TIMESTAMP,
|
||||||
|
total_calls INTEGER DEFAULT 0,
|
||||||
|
created_at TIMESTAMP DEFAULT NOW(),
|
||||||
|
expires_at TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_api_keys_user ON api_keys(user_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_api_keys_hash ON api_keys(key_hash);
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- TABLE: api_calls
|
||||||
|
-- ============================================
|
||||||
|
CREATE TABLE IF NOT EXISTS api_calls (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
user_id INTEGER REFERENCES users(id),
|
||||||
|
api_key_id INTEGER REFERENCES api_keys(id),
|
||||||
|
endpoint VARCHAR(100) NOT NULL,
|
||||||
|
method VARCHAR(10) NOT NULL,
|
||||||
|
request_params JSONB,
|
||||||
|
response_status INTEGER,
|
||||||
|
response_time_ms INTEGER,
|
||||||
|
success BOOLEAN,
|
||||||
|
error_message VARCHAR(500),
|
||||||
|
credits_used INTEGER DEFAULT 1,
|
||||||
|
is_billable BOOLEAN DEFAULT TRUE,
|
||||||
|
ip_address VARCHAR(45),
|
||||||
|
user_agent VARCHAR(500),
|
||||||
|
called_at TIMESTAMP DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_api_calls_user ON api_calls(user_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_api_calls_date ON api_calls(called_at);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_api_calls_endpoint ON api_calls(endpoint);
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- TABLE: ifsc_codes
|
||||||
|
-- ============================================
|
||||||
|
CREATE TABLE IF NOT EXISTS ifsc_codes (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
ifsc VARCHAR(11) UNIQUE NOT NULL,
|
||||||
|
bank_name VARCHAR(255) NOT NULL,
|
||||||
|
branch VARCHAR(255),
|
||||||
|
address TEXT,
|
||||||
|
city VARCHAR(100),
|
||||||
|
district VARCHAR(100),
|
||||||
|
state VARCHAR(100),
|
||||||
|
contact VARCHAR(100),
|
||||||
|
upi_enabled BOOLEAN DEFAULT FALSE,
|
||||||
|
rtgs_enabled BOOLEAN DEFAULT TRUE,
|
||||||
|
neft_enabled BOOLEAN DEFAULT TRUE,
|
||||||
|
imps_enabled BOOLEAN DEFAULT TRUE,
|
||||||
|
micr_code VARCHAR(20),
|
||||||
|
swift_code VARCHAR(20),
|
||||||
|
updated_at TIMESTAMP DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_ifsc ON ifsc_codes(ifsc);
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- TABLE: pincodes
|
||||||
|
-- ============================================
|
||||||
|
CREATE TABLE IF NOT EXISTS pincodes (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
pincode VARCHAR(6) NOT NULL,
|
||||||
|
office_name VARCHAR(255),
|
||||||
|
office_type VARCHAR(50),
|
||||||
|
district VARCHAR(100),
|
||||||
|
division VARCHAR(100),
|
||||||
|
region VARCHAR(100),
|
||||||
|
state VARCHAR(100),
|
||||||
|
latitude DECIMAL(10, 8),
|
||||||
|
longitude DECIMAL(11, 8),
|
||||||
|
updated_at TIMESTAMP DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_pincode ON pincodes(pincode);
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- TABLE: pan_data
|
||||||
|
-- ============================================
|
||||||
|
CREATE TABLE IF NOT EXISTS pan_data (
|
||||||
|
pan_number VARCHAR(10) PRIMARY KEY,
|
||||||
|
full_name VARCHAR(255),
|
||||||
|
father_name VARCHAR(255),
|
||||||
|
date_of_birth DATE,
|
||||||
|
gender VARCHAR(10),
|
||||||
|
category VARCHAR(50),
|
||||||
|
status VARCHAR(50),
|
||||||
|
updated_at TIMESTAMP DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_pan_number ON pan_data(pan_number);
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- TABLE: subscriptions
|
||||||
|
-- ============================================
|
||||||
|
CREATE TABLE IF NOT EXISTS subscriptions (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
user_id INTEGER REFERENCES users(id),
|
||||||
|
razorpay_subscription_id VARCHAR(100),
|
||||||
|
razorpay_payment_id VARCHAR(100),
|
||||||
|
razorpay_plan_id VARCHAR(100),
|
||||||
|
plan_name VARCHAR(50),
|
||||||
|
amount DECIMAL(10, 2),
|
||||||
|
currency VARCHAR(3) DEFAULT 'INR',
|
||||||
|
status VARCHAR(50),
|
||||||
|
current_period_start TIMESTAMP,
|
||||||
|
current_period_end TIMESTAMP,
|
||||||
|
created_at TIMESTAMP DEFAULT NOW(),
|
||||||
|
cancelled_at TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_subscriptions_user ON subscriptions(user_id);
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- TABLE: invoices
|
||||||
|
-- ============================================
|
||||||
|
CREATE TABLE IF NOT EXISTS invoices (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
user_id INTEGER REFERENCES users(id),
|
||||||
|
subscription_id INTEGER REFERENCES subscriptions(id),
|
||||||
|
invoice_number VARCHAR(50) UNIQUE,
|
||||||
|
amount DECIMAL(10, 2),
|
||||||
|
tax_amount DECIMAL(10, 2),
|
||||||
|
total_amount DECIMAL(10, 2),
|
||||||
|
currency VARCHAR(3) DEFAULT 'INR',
|
||||||
|
status VARCHAR(50),
|
||||||
|
razorpay_invoice_id VARCHAR(100),
|
||||||
|
razorpay_payment_id VARCHAR(100),
|
||||||
|
invoice_date DATE,
|
||||||
|
due_date DATE,
|
||||||
|
paid_at TIMESTAMP,
|
||||||
|
pdf_url VARCHAR(500),
|
||||||
|
created_at TIMESTAMP DEFAULT NOW()
|
||||||
|
);
|
||||||
|
`;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Run database setup
|
||||||
|
*/
|
||||||
|
const setupDatabase = async () => {
|
||||||
|
console.log('🚀 Starting database setup...\n');
|
||||||
|
|
||||||
|
const client = await pool.connect();
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Start transaction
|
||||||
|
await client.query('BEGIN');
|
||||||
|
|
||||||
|
// Create tables
|
||||||
|
console.log('📦 Creating tables...');
|
||||||
|
await client.query(createTablesSQL);
|
||||||
|
console.log('✅ Tables created successfully\n');
|
||||||
|
|
||||||
|
// Commit transaction
|
||||||
|
await client.query('COMMIT');
|
||||||
|
|
||||||
|
// Display created tables
|
||||||
|
const tablesResult = await client.query(`
|
||||||
|
SELECT table_name
|
||||||
|
FROM information_schema.tables
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_type = 'BASE TABLE'
|
||||||
|
ORDER BY table_name;
|
||||||
|
`);
|
||||||
|
|
||||||
|
console.log('📋 Created tables:');
|
||||||
|
tablesResult.rows.forEach((row, index) => {
|
||||||
|
console.log(` ${index + 1}. ${row.table_name}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log('\n✅ Database setup completed successfully!');
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
// Rollback on error
|
||||||
|
await client.query('ROLLBACK');
|
||||||
|
console.error('\n❌ Database setup failed:', error.message);
|
||||||
|
console.error('🔄 Transaction rolled back');
|
||||||
|
throw error;
|
||||||
|
} finally {
|
||||||
|
client.release();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Run setup
|
||||||
|
setupDatabase()
|
||||||
|
.then(() => {
|
||||||
|
console.log('\n👋 Exiting...');
|
||||||
|
process.exit(0);
|
||||||
|
})
|
||||||
|
.catch((error) => {
|
||||||
|
console.error('\n💥 Fatal error:', error);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
126
src/index.js
Normal file
126
src/index.js
Normal file
@ -0,0 +1,126 @@
|
|||||||
|
/**
|
||||||
|
* VerifyIndia API - Main Server Entry Point
|
||||||
|
*/
|
||||||
|
|
||||||
|
require('dotenv').config();
|
||||||
|
|
||||||
|
const express = require('express');
|
||||||
|
const helmet = require('helmet');
|
||||||
|
const cors = require('cors');
|
||||||
|
const morgan = require('morgan');
|
||||||
|
|
||||||
|
const { connectDB } = require('./database/connection');
|
||||||
|
const { connectRedis } = require('./cache/redis');
|
||||||
|
|
||||||
|
// Routes
|
||||||
|
const authRoutes = require('./routes/auth');
|
||||||
|
const userRoutes = require('./routes/user');
|
||||||
|
const ifscRoutes = require('./routes/ifsc');
|
||||||
|
const pincodeRoutes = require('./routes/pincode');
|
||||||
|
const gstRoutes = require('./routes/gst');
|
||||||
|
const panRoutes = require('./routes/pan');
|
||||||
|
const bankRoutes = require('./routes/bank');
|
||||||
|
|
||||||
|
const { errorHandler } = require('./middleware/errorHandler');
|
||||||
|
|
||||||
|
const app = express();
|
||||||
|
|
||||||
|
// Middleware
|
||||||
|
app.use(helmet());
|
||||||
|
app.use(cors());
|
||||||
|
app.use(express.json());
|
||||||
|
app.use(morgan('combined'));
|
||||||
|
|
||||||
|
// Health check route
|
||||||
|
app.get('/health', (req, res) => {
|
||||||
|
res.json({ status: 'healthy', timestamp: new Date().toISOString() });
|
||||||
|
});
|
||||||
|
|
||||||
|
// API info route
|
||||||
|
app.get('/', (req, res) => {
|
||||||
|
res.json({
|
||||||
|
message: 'VerifyIndia API',
|
||||||
|
version: 'v1',
|
||||||
|
endpoints: {
|
||||||
|
auth: {
|
||||||
|
signup: 'POST /v1/auth/signup',
|
||||||
|
login: 'POST /v1/auth/login'
|
||||||
|
},
|
||||||
|
user: {
|
||||||
|
usage: 'GET /v1/user/usage',
|
||||||
|
profile: 'GET /v1/user/profile',
|
||||||
|
apiKeys: 'GET /v1/user/api-keys'
|
||||||
|
},
|
||||||
|
verification: {
|
||||||
|
ifsc: 'GET /v1/ifsc/:ifsc_code',
|
||||||
|
pincode: 'GET /v1/pincode/:pincode',
|
||||||
|
gst: 'GET /v1/gst/verify/:gstin',
|
||||||
|
pan: 'POST /v1/pan/verify',
|
||||||
|
bank: 'POST /v1/bank/verify'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// API Routes
|
||||||
|
app.use('/v1/auth', authRoutes);
|
||||||
|
app.use('/v1/user', userRoutes);
|
||||||
|
app.use('/v1/ifsc', ifscRoutes);
|
||||||
|
app.use('/v1/pincode', pincodeRoutes);
|
||||||
|
app.use('/v1/gst', gstRoutes);
|
||||||
|
app.use('/v1/pan', panRoutes);
|
||||||
|
app.use('/v1/bank', bankRoutes);
|
||||||
|
|
||||||
|
// 404 handler
|
||||||
|
app.use('*', (req, res) => {
|
||||||
|
res.status(404).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: 'NOT_FOUND', message: `Route ${req.originalUrl} not found` }
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Error handler
|
||||||
|
app.use(errorHandler);
|
||||||
|
|
||||||
|
const PORT = process.env.PORT || 3000;
|
||||||
|
|
||||||
|
async function startServer() {
|
||||||
|
try {
|
||||||
|
await connectDB();
|
||||||
|
console.log('✅ PostgreSQL connected');
|
||||||
|
|
||||||
|
await connectRedis();
|
||||||
|
console.log('✅ Redis connected');
|
||||||
|
|
||||||
|
app.listen(PORT, () => {
|
||||||
|
console.log(`
|
||||||
|
\x1b[36m
|
||||||
|
__ __ _ __ _ _ _
|
||||||
|
\\ \\ / / (_)/ _| (_) | (_)
|
||||||
|
\\ \\ / /__ _ __ _| |_ _ _ _ __ __| |_ __ _
|
||||||
|
\\ \\/ / _ \ '__|| | _| | | | '_ \\ / _\` | |/ _\` |
|
||||||
|
\\ / __/ | | | | | |_| | | | | (_| | | (_| |
|
||||||
|
\\/ \\___|_| |_|_| \\__, |_| |_|\\__,_|_|\\__,_|
|
||||||
|
__/ |
|
||||||
|
|___/
|
||||||
|
\x1b[0m`);
|
||||||
|
console.log(`✅ Server running on port ${PORT}`);
|
||||||
|
console.log(`📍 API Key Prefix: ${process.env.API_KEY_PREFIX || 'vi_'}`);
|
||||||
|
console.log('');
|
||||||
|
console.log('🚀 All API endpoints active:');
|
||||||
|
console.log(' • POST /v1/auth/signup');
|
||||||
|
console.log(' • POST /v1/auth/login');
|
||||||
|
console.log(' • GET /v1/user/usage');
|
||||||
|
console.log(' • GET /v1/ifsc/:ifsc_code');
|
||||||
|
console.log(' • GET /v1/pincode/:pincode');
|
||||||
|
console.log(' • GET /v1/gst/verify/:gstin');
|
||||||
|
console.log(' • POST /v1/pan/verify');
|
||||||
|
console.log(' • POST /v1/bank/verify');
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error('❌ Failed to start:', error);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
startServer();
|
||||||
154
src/middleware/auth.js
Normal file
154
src/middleware/auth.js
Normal file
@ -0,0 +1,154 @@
|
|||||||
|
/**
|
||||||
|
* API Key Authentication Middleware
|
||||||
|
* Validates API keys and attaches user info to request
|
||||||
|
*/
|
||||||
|
|
||||||
|
const crypto = require('crypto');
|
||||||
|
const { query } = require('../database/connection');
|
||||||
|
const { cacheGet, cacheSet } = require('../cache/redis');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Authenticate requests using X-API-Key header
|
||||||
|
*/
|
||||||
|
async function authenticateApiKey(req, res, next) {
|
||||||
|
try {
|
||||||
|
const apiKey = req.headers['x-api-key'];
|
||||||
|
|
||||||
|
// Check if API key is provided
|
||||||
|
if (!apiKey) {
|
||||||
|
return res.status(401).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: 'MISSING_API_KEY', message: 'API key required' }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate API key format using env prefix
|
||||||
|
const expectedPrefix = process.env.API_KEY_PREFIX || 'vi_';
|
||||||
|
if (!apiKey.startsWith(expectedPrefix)) {
|
||||||
|
return res.status(401).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: 'INVALID_API_KEY_FORMAT', message: 'Invalid API key format' }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to get from Redis cache first
|
||||||
|
const cacheKey = `apikey:${apiKey}`;
|
||||||
|
let keyData = await cacheGet(cacheKey);
|
||||||
|
|
||||||
|
if (!keyData) {
|
||||||
|
// Cache miss - query database
|
||||||
|
const keyHash = crypto.createHash('sha256').update(apiKey).digest('hex');
|
||||||
|
|
||||||
|
const result = await query(
|
||||||
|
`SELECT ak.*, u.plan, u.monthly_quota, u.calls_this_month, u.is_active as user_active
|
||||||
|
FROM api_keys ak
|
||||||
|
JOIN users u ON ak.user_id = u.id
|
||||||
|
WHERE ak.key_hash = $1 AND ak.is_active = true`,
|
||||||
|
[keyHash]
|
||||||
|
);
|
||||||
|
|
||||||
|
if (result.rows.length === 0) {
|
||||||
|
return res.status(401).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: 'INVALID_API_KEY', message: 'Invalid or inactive API key' }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
keyData = result.rows[0];
|
||||||
|
|
||||||
|
// Cache for 5 minutes
|
||||||
|
await cacheSet(cacheKey, keyData, 300);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if user account is active
|
||||||
|
if (!keyData.user_active) {
|
||||||
|
return res.status(403).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: 'ACCOUNT_INACTIVE', message: 'Account inactive' }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check quota
|
||||||
|
if (keyData.calls_this_month >= keyData.monthly_quota) {
|
||||||
|
return res.status(429).json({
|
||||||
|
success: false,
|
||||||
|
error: {
|
||||||
|
code: 'QUOTA_EXCEEDED',
|
||||||
|
message: 'Monthly quota exceeded',
|
||||||
|
details: { used: keyData.calls_this_month, limit: keyData.monthly_quota }
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Attach user info to request
|
||||||
|
req.user = {
|
||||||
|
id: keyData.user_id,
|
||||||
|
plan: keyData.plan,
|
||||||
|
apiKeyId: keyData.id,
|
||||||
|
isTestKey: keyData.is_test_key,
|
||||||
|
quota: keyData.monthly_quota,
|
||||||
|
used: keyData.calls_this_month,
|
||||||
|
remaining: keyData.monthly_quota - keyData.calls_this_month
|
||||||
|
};
|
||||||
|
|
||||||
|
next();
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Auth error:', error);
|
||||||
|
return res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: 'AUTH_ERROR', message: 'Authentication failed' }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Authenticate requests using JWT Bearer token
|
||||||
|
*/
|
||||||
|
const jwt = require('jsonwebtoken');
|
||||||
|
|
||||||
|
async function authenticateJWT(req, res, next) {
|
||||||
|
try {
|
||||||
|
const authHeader = req.headers['authorization'];
|
||||||
|
|
||||||
|
if (!authHeader || !authHeader.startsWith('Bearer ')) {
|
||||||
|
return res.status(401).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: 'MISSING_TOKEN', message: 'Bearer token required' }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const token = authHeader.split(' ')[1];
|
||||||
|
|
||||||
|
const decoded = jwt.verify(token, process.env.JWT_SECRET);
|
||||||
|
|
||||||
|
// Get user from database
|
||||||
|
const result = await query(
|
||||||
|
'SELECT id, email, company_name, plan, monthly_quota, calls_this_month FROM users WHERE id = $1 AND is_active = true',
|
||||||
|
[decoded.userId]
|
||||||
|
);
|
||||||
|
|
||||||
|
if (result.rows.length === 0) {
|
||||||
|
return res.status(401).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: 'USER_NOT_FOUND', message: 'User not found or inactive' }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
req.user = result.rows[0];
|
||||||
|
next();
|
||||||
|
} catch (error) {
|
||||||
|
if (error.name === 'JsonWebTokenError' || error.name === 'TokenExpiredError') {
|
||||||
|
return res.status(401).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: 'INVALID_TOKEN', message: 'Invalid or expired token' }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: 'AUTH_ERROR', message: 'Authentication failed' }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { authenticateApiKey, authenticateJWT };
|
||||||
28
src/middleware/errorHandler.js
Normal file
28
src/middleware/errorHandler.js
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
/**
|
||||||
|
* Error Handler Middleware
|
||||||
|
* Handles errors and provides consistent error responses
|
||||||
|
*/
|
||||||
|
|
||||||
|
function errorHandler(err, req, res, next) {
|
||||||
|
console.error('Error:', err.message);
|
||||||
|
|
||||||
|
const statusCode = err.statusCode || 500;
|
||||||
|
const message = process.env.NODE_ENV === 'production' && statusCode === 500
|
||||||
|
? 'Internal server error'
|
||||||
|
: err.message;
|
||||||
|
|
||||||
|
res.status(statusCode).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: err.code || 'INTERNAL_ERROR', message }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
class ApiError extends Error {
|
||||||
|
constructor(statusCode, code, message) {
|
||||||
|
super(message);
|
||||||
|
this.statusCode = statusCode;
|
||||||
|
this.code = code;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { errorHandler, ApiError };
|
||||||
50
src/middleware/rateLimit.js
Normal file
50
src/middleware/rateLimit.js
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
/**
|
||||||
|
* Rate Limiting Middleware
|
||||||
|
* Tracks request counts per user using Redis
|
||||||
|
*/
|
||||||
|
|
||||||
|
const { getRedisClient } = require('../cache/redis');
|
||||||
|
|
||||||
|
const RATE_LIMITS = {
|
||||||
|
free: 10,
|
||||||
|
starter: 60,
|
||||||
|
growth: 120,
|
||||||
|
business: 300,
|
||||||
|
enterprise: 1000
|
||||||
|
};
|
||||||
|
|
||||||
|
async function rateLimit(req, res, next) {
|
||||||
|
const redis = getRedisClient();
|
||||||
|
if (!redis) return next();
|
||||||
|
|
||||||
|
try {
|
||||||
|
const userId = req.user?.id || req.ip;
|
||||||
|
const plan = req.user?.plan || 'free';
|
||||||
|
const limit = RATE_LIMITS[plan] || RATE_LIMITS.free;
|
||||||
|
const key = `ratelimit:${userId}`;
|
||||||
|
|
||||||
|
const current = await redis.incr(key);
|
||||||
|
if (current === 1) await redis.expire(key, 60);
|
||||||
|
|
||||||
|
const ttl = await redis.ttl(key);
|
||||||
|
|
||||||
|
res.set({
|
||||||
|
'X-RateLimit-Limit': limit,
|
||||||
|
'X-RateLimit-Remaining': Math.max(0, limit - current),
|
||||||
|
'X-RateLimit-Reset': Math.floor(Date.now() / 1000) + ttl
|
||||||
|
});
|
||||||
|
|
||||||
|
if (current > limit) {
|
||||||
|
return res.status(429).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: 'RATE_LIMIT_EXCEEDED', message: `Limit: ${limit}/minute`, retry_after: ttl }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
next();
|
||||||
|
} catch (error) {
|
||||||
|
next();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { rateLimit };
|
||||||
156
src/routes/auth.js
Normal file
156
src/routes/auth.js
Normal file
@ -0,0 +1,156 @@
|
|||||||
|
/**
|
||||||
|
* Authentication Routes
|
||||||
|
* Handles user signup and login
|
||||||
|
*/
|
||||||
|
|
||||||
|
const express = require('express');
|
||||||
|
const router = express.Router();
|
||||||
|
const bcrypt = require('bcryptjs');
|
||||||
|
const jwt = require('jsonwebtoken');
|
||||||
|
const crypto = require('crypto');
|
||||||
|
const { query } = require('../database/connection');
|
||||||
|
const { ApiError } = require('../middleware/errorHandler');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate API Key using environment prefix
|
||||||
|
*/
|
||||||
|
function generateApiKey() {
|
||||||
|
const prefix = process.env.API_KEY_PREFIX || 'vi_';
|
||||||
|
return prefix + crypto.randomBytes(24).toString('hex');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* POST /v1/auth/signup
|
||||||
|
* Register a new user
|
||||||
|
*/
|
||||||
|
router.post('/signup', async (req, res, next) => {
|
||||||
|
try {
|
||||||
|
const { email, password, company_name, phone } = req.body;
|
||||||
|
|
||||||
|
if (!email || !password) {
|
||||||
|
throw new ApiError(400, 'MISSING_FIELDS', 'Email and password required');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (password.length < 8) {
|
||||||
|
throw new ApiError(400, 'WEAK_PASSWORD', 'Password must be 8+ characters');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if email already exists
|
||||||
|
const existing = await query('SELECT id FROM users WHERE email = $1', [email.toLowerCase()]);
|
||||||
|
if (existing.rows.length > 0) {
|
||||||
|
throw new ApiError(409, 'EMAIL_EXISTS', 'Email already registered');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hash password
|
||||||
|
const passwordHash = await bcrypt.hash(password, 10);
|
||||||
|
|
||||||
|
// Create user
|
||||||
|
const result = await query(
|
||||||
|
`INSERT INTO users (email, password_hash, company_name, phone, plan, monthly_quota, quota_reset_date)
|
||||||
|
VALUES ($1, $2, $3, $4, 'free', 100, DATE(NOW() + INTERVAL '1 month'))
|
||||||
|
RETURNING id, email, company_name, plan`,
|
||||||
|
[email.toLowerCase(), passwordHash, company_name, phone]
|
||||||
|
);
|
||||||
|
|
||||||
|
const user = result.rows[0];
|
||||||
|
|
||||||
|
// Generate API key with custom prefix from env
|
||||||
|
const apiKey = generateApiKey();
|
||||||
|
const keyHash = crypto.createHash('sha256').update(apiKey).digest('hex');
|
||||||
|
const keyPrefix = process.env.API_KEY_PREFIX || 'vi_';
|
||||||
|
|
||||||
|
// Store API key
|
||||||
|
await query(
|
||||||
|
`INSERT INTO api_keys (user_id, key_prefix, key_hash, key_hint, name)
|
||||||
|
VALUES ($1, $2, $3, $4, 'Default')`,
|
||||||
|
[user.id, keyPrefix, keyHash, apiKey.slice(-4)]
|
||||||
|
);
|
||||||
|
|
||||||
|
// Generate JWT token
|
||||||
|
const token = jwt.sign(
|
||||||
|
{ userId: user.id, email: user.email },
|
||||||
|
process.env.JWT_SECRET,
|
||||||
|
{ expiresIn: '7d' }
|
||||||
|
);
|
||||||
|
|
||||||
|
res.status(201).json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
user: {
|
||||||
|
id: user.id,
|
||||||
|
email: user.email,
|
||||||
|
company_name: user.company_name,
|
||||||
|
plan: user.plan
|
||||||
|
},
|
||||||
|
api_key: apiKey,
|
||||||
|
token
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* POST /v1/auth/login
|
||||||
|
* Authenticate existing user
|
||||||
|
*/
|
||||||
|
router.post('/login', async (req, res, next) => {
|
||||||
|
try {
|
||||||
|
const { email, password } = req.body;
|
||||||
|
|
||||||
|
if (!email || !password) {
|
||||||
|
throw new ApiError(400, 'MISSING_FIELDS', 'Email and password required');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find user
|
||||||
|
const result = await query(
|
||||||
|
'SELECT * FROM users WHERE email = $1 AND is_active = true',
|
||||||
|
[email.toLowerCase()]
|
||||||
|
);
|
||||||
|
|
||||||
|
if (result.rows.length === 0) {
|
||||||
|
throw new ApiError(401, 'INVALID_CREDENTIALS', 'Invalid email or password');
|
||||||
|
}
|
||||||
|
|
||||||
|
const user = result.rows[0];
|
||||||
|
|
||||||
|
// Verify password
|
||||||
|
const validPassword = await bcrypt.compare(password, user.password_hash);
|
||||||
|
|
||||||
|
if (!validPassword) {
|
||||||
|
throw new ApiError(401, 'INVALID_CREDENTIALS', 'Invalid email or password');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update last login
|
||||||
|
await query('UPDATE users SET last_login_at = NOW() WHERE id = $1', [user.id]);
|
||||||
|
|
||||||
|
// Generate JWT token
|
||||||
|
const token = jwt.sign(
|
||||||
|
{ userId: user.id, email: user.email },
|
||||||
|
process.env.JWT_SECRET,
|
||||||
|
{ expiresIn: '7d' }
|
||||||
|
);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
user: {
|
||||||
|
id: user.id,
|
||||||
|
email: user.email,
|
||||||
|
company_name: user.company_name,
|
||||||
|
plan: user.plan,
|
||||||
|
quota: user.monthly_quota,
|
||||||
|
used: user.calls_this_month
|
||||||
|
},
|
||||||
|
token
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
85
src/routes/bank.js
Normal file
85
src/routes/bank.js
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
/**
|
||||||
|
* Bank Account Verification Route
|
||||||
|
* Verifies bank account and returns holder details
|
||||||
|
*/
|
||||||
|
|
||||||
|
const express = require('express');
|
||||||
|
const router = express.Router();
|
||||||
|
const { authenticateApiKey } = require('../middleware/auth');
|
||||||
|
const { rateLimit } = require('../middleware/rateLimit');
|
||||||
|
const { verifyBankAccount } = require('../services/bankService');
|
||||||
|
const { logApiCall } = require('../services/analytics');
|
||||||
|
|
||||||
|
router.use(authenticateApiKey);
|
||||||
|
router.use(rateLimit);
|
||||||
|
|
||||||
|
router.post('/verify', async (req, res, next) => {
|
||||||
|
const startTime = Date.now();
|
||||||
|
let success = false;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { account_number, ifsc, name } = req.body;
|
||||||
|
|
||||||
|
if (!account_number || !ifsc) {
|
||||||
|
return res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: 'MISSING_FIELDS', message: 'Account number and IFSC are required' }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate IFSC format
|
||||||
|
const ifscRegex = /^[A-Z]{4}0[A-Z0-9]{6}$/;
|
||||||
|
if (!ifscRegex.test(ifsc.toUpperCase())) {
|
||||||
|
return res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: 'INVALID_IFSC', message: 'Invalid IFSC format' }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate account number (basic check: 9-18 digits)
|
||||||
|
if (!/^\d{9,18}$/.test(account_number)) {
|
||||||
|
return res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: 'INVALID_ACCOUNT', message: 'Account number must be 9-18 digits' }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await verifyBankAccount(account_number, ifsc.toUpperCase(), name);
|
||||||
|
|
||||||
|
if (!result.success) {
|
||||||
|
return res.status(result.statusCode || 500).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: result.errorCode, message: result.message }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
success = true;
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: result.data,
|
||||||
|
meta: {
|
||||||
|
request_id: `req_bank_${Date.now()}`,
|
||||||
|
credits_used: 2, // Bank verification costs 2 credits
|
||||||
|
credits_remaining: req.user.remaining - 2
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
} finally {
|
||||||
|
await logApiCall({
|
||||||
|
userId: req.user.id,
|
||||||
|
apiKeyId: req.user.apiKeyId,
|
||||||
|
endpoint: '/v1/bank/verify',
|
||||||
|
method: 'POST',
|
||||||
|
params: { account_number: req.body?.account_number, ifsc: req.body?.ifsc },
|
||||||
|
status: success ? 200 : 500,
|
||||||
|
duration: Date.now() - startTime,
|
||||||
|
success,
|
||||||
|
isTestKey: req.user.isTestKey
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
70
src/routes/gst.js
Normal file
70
src/routes/gst.js
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
/**
|
||||||
|
* GST Verification Route
|
||||||
|
* Verifies GSTIN and returns company details
|
||||||
|
*/
|
||||||
|
|
||||||
|
const express = require('express');
|
||||||
|
const router = express.Router();
|
||||||
|
const { authenticateApiKey } = require('../middleware/auth');
|
||||||
|
const { rateLimit } = require('../middleware/rateLimit');
|
||||||
|
const { verifyGSTIN } = require('../services/gstService');
|
||||||
|
const { logApiCall } = require('../services/analytics');
|
||||||
|
|
||||||
|
router.use(authenticateApiKey);
|
||||||
|
router.use(rateLimit);
|
||||||
|
|
||||||
|
router.get('/verify/:gstin', async (req, res, next) => {
|
||||||
|
const startTime = Date.now();
|
||||||
|
let success = false;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { gstin } = req.params;
|
||||||
|
const gstinRegex = /^[0-9]{2}[A-Z]{5}[0-9]{4}[A-Z]{1}[1-9A-Z]{1}[A-Z][0-9A-Z]{1}$/;
|
||||||
|
|
||||||
|
if (!gstinRegex.test(gstin.toUpperCase())) {
|
||||||
|
return res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: 'INVALID_GSTIN', message: 'Invalid GSTIN format' }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await verifyGSTIN(gstin.toUpperCase());
|
||||||
|
|
||||||
|
if (!result.success) {
|
||||||
|
return res.status(result.statusCode || 404).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: result.errorCode, message: result.message }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
success = true;
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: result.data,
|
||||||
|
meta: {
|
||||||
|
request_id: `req_gst_${Date.now()}`,
|
||||||
|
credits_used: 1,
|
||||||
|
credits_remaining: req.user.remaining - 1,
|
||||||
|
source: 'gstn'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
} finally {
|
||||||
|
await logApiCall({
|
||||||
|
userId: req.user.id,
|
||||||
|
apiKeyId: req.user.apiKeyId,
|
||||||
|
endpoint: '/v1/gst/verify',
|
||||||
|
method: 'GET',
|
||||||
|
params: { gstin: req.params.gstin },
|
||||||
|
status: success ? 200 : 500,
|
||||||
|
duration: Date.now() - startTime,
|
||||||
|
success,
|
||||||
|
isTestKey: req.user.isTestKey
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
210
src/routes/ifsc.js
Normal file
210
src/routes/ifsc.js
Normal file
@ -0,0 +1,210 @@
|
|||||||
|
/**
|
||||||
|
* IFSC Lookup Route
|
||||||
|
* Returns bank details for a given IFSC code
|
||||||
|
*/
|
||||||
|
|
||||||
|
const express = require('express');
|
||||||
|
const router = express.Router();
|
||||||
|
const { authenticateApiKey } = require('../middleware/auth');
|
||||||
|
const { rateLimit } = require('../middleware/rateLimit');
|
||||||
|
const { query } = require('../database/connection');
|
||||||
|
const { cacheGet, cacheSet } = require('../cache/redis');
|
||||||
|
const { logApiCall } = require('../services/analytics');
|
||||||
|
|
||||||
|
router.use(authenticateApiKey);
|
||||||
|
router.use(rateLimit);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /v1/ifsc/bank/:bankName
|
||||||
|
* Search branches by bank name
|
||||||
|
*/
|
||||||
|
router.get('/bank/:bankName', async (req, res, next) => {
|
||||||
|
const startTime = Date.now();
|
||||||
|
let success = false;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { bankName } = req.params;
|
||||||
|
|
||||||
|
if (!bankName || bankName.trim().length < 2) {
|
||||||
|
return res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: 'INVALID_QUERY', message: 'Bank name must be at least 2 characters' }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await query(
|
||||||
|
`SELECT ifsc, bank_name, branch, address, city, district, contact, state
|
||||||
|
FROM ifsc_codes
|
||||||
|
WHERE bank_name ILIKE $1
|
||||||
|
LIMIT 50`,
|
||||||
|
[`%${bankName}%`]
|
||||||
|
);
|
||||||
|
|
||||||
|
success = true;
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
count: result.rows.length,
|
||||||
|
results: result.rows
|
||||||
|
},
|
||||||
|
meta: {
|
||||||
|
request_id: `req_ifsc_bank_${Date.now()}`,
|
||||||
|
credits_used: 1,
|
||||||
|
credits_remaining: req.user.remaining - 1
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
} finally {
|
||||||
|
await logApiCall({
|
||||||
|
userId: req.user.id,
|
||||||
|
apiKeyId: req.user.apiKeyId,
|
||||||
|
endpoint: '/v1/ifsc/bank',
|
||||||
|
method: 'GET',
|
||||||
|
params: { bank: req.params.bankName },
|
||||||
|
status: success ? 200 : 500,
|
||||||
|
duration: Date.now() - startTime,
|
||||||
|
success,
|
||||||
|
isTestKey: req.user.isTestKey
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /v1/ifsc/state/:stateName
|
||||||
|
* Search branches by state
|
||||||
|
*/
|
||||||
|
router.get('/state/:stateName', async (req, res, next) => {
|
||||||
|
const startTime = Date.now();
|
||||||
|
let success = false;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { stateName } = req.params;
|
||||||
|
|
||||||
|
if (!stateName || stateName.trim().length < 2) {
|
||||||
|
return res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: 'INVALID_QUERY', message: 'State name must be at least 2 characters' }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await query(
|
||||||
|
`SELECT ifsc, bank_name, branch, city, district, state
|
||||||
|
FROM ifsc_codes
|
||||||
|
WHERE state ILIKE $1
|
||||||
|
LIMIT 50`,
|
||||||
|
[`%${stateName}%`]
|
||||||
|
);
|
||||||
|
|
||||||
|
success = true;
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
count: result.rows.length,
|
||||||
|
results: result.rows
|
||||||
|
},
|
||||||
|
meta: {
|
||||||
|
request_id: `req_ifsc_state_${Date.now()}`,
|
||||||
|
credits_used: 1,
|
||||||
|
credits_remaining: req.user.remaining - 1
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
} finally {
|
||||||
|
await logApiCall({
|
||||||
|
userId: req.user.id,
|
||||||
|
apiKeyId: req.user.apiKeyId,
|
||||||
|
endpoint: '/v1/ifsc/state',
|
||||||
|
method: 'GET',
|
||||||
|
params: { state: req.params.stateName },
|
||||||
|
status: success ? 200 : 500,
|
||||||
|
duration: Date.now() - startTime,
|
||||||
|
success,
|
||||||
|
isTestKey: req.user.isTestKey
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
router.get('/:ifsc_code', async (req, res, next) => {
|
||||||
|
const startTime = Date.now();
|
||||||
|
let success = false;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { ifsc_code } = req.params;
|
||||||
|
const ifscRegex = /^[A-Z]{4}0[A-Z0-9]{6}$/;
|
||||||
|
|
||||||
|
if (!ifscRegex.test(ifsc_code.toUpperCase())) {
|
||||||
|
return res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: 'INVALID_IFSC', message: 'Invalid IFSC format' }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const ifsc = ifsc_code.toUpperCase();
|
||||||
|
const cacheKey = `ifsc:${ifsc}`;
|
||||||
|
let data = await cacheGet(cacheKey);
|
||||||
|
|
||||||
|
if (!data) {
|
||||||
|
const result = await query('SELECT * FROM ifsc_codes WHERE ifsc = $1', [ifsc]);
|
||||||
|
|
||||||
|
if (result.rows.length === 0) {
|
||||||
|
return res.status(404).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: 'IFSC_NOT_FOUND', message: 'IFSC not found' }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
data = result.rows[0];
|
||||||
|
await cacheSet(cacheKey, data, 86400); // Cache for 24 hours
|
||||||
|
}
|
||||||
|
|
||||||
|
success = true;
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
ifsc: data.ifsc,
|
||||||
|
bank: data.bank_name,
|
||||||
|
branch: data.branch,
|
||||||
|
address: data.address,
|
||||||
|
city: data.city,
|
||||||
|
district: data.district,
|
||||||
|
state: data.state,
|
||||||
|
contact: data.contact,
|
||||||
|
upi: data.upi_enabled,
|
||||||
|
rtgs: data.rtgs_enabled,
|
||||||
|
neft: data.neft_enabled,
|
||||||
|
imps: data.imps_enabled,
|
||||||
|
micr: data.micr_code,
|
||||||
|
swift: data.swift_code
|
||||||
|
},
|
||||||
|
meta: {
|
||||||
|
request_id: `req_ifsc_${Date.now()}`,
|
||||||
|
credits_used: 1,
|
||||||
|
credits_remaining: req.user.remaining - 1
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
} finally {
|
||||||
|
await logApiCall({
|
||||||
|
userId: req.user.id,
|
||||||
|
apiKeyId: req.user.apiKeyId,
|
||||||
|
endpoint: '/v1/ifsc',
|
||||||
|
method: 'GET',
|
||||||
|
params: { ifsc: req.params.ifsc_code },
|
||||||
|
status: success ? 200 : 500,
|
||||||
|
duration: Date.now() - startTime,
|
||||||
|
success,
|
||||||
|
isTestKey: req.user.isTestKey
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
133
src/routes/pan.js
Normal file
133
src/routes/pan.js
Normal file
@ -0,0 +1,133 @@
|
|||||||
|
/**
|
||||||
|
* PAN Verification Route
|
||||||
|
* Verifies PAN and returns holder details
|
||||||
|
*/
|
||||||
|
|
||||||
|
const express = require('express');
|
||||||
|
const router = express.Router();
|
||||||
|
const { authenticateApiKey } = require('../middleware/auth');
|
||||||
|
const { rateLimit } = require('../middleware/rateLimit');
|
||||||
|
const { verifyPAN } = require('../services/panService');
|
||||||
|
const { logApiCall } = require('../services/analytics');
|
||||||
|
|
||||||
|
router.use(authenticateApiKey);
|
||||||
|
router.use(rateLimit);
|
||||||
|
|
||||||
|
router.post('/verify', async (req, res, next) => {
|
||||||
|
const startTime = Date.now();
|
||||||
|
let success = false;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { pan, name, dob } = req.body;
|
||||||
|
|
||||||
|
if (!pan) {
|
||||||
|
return res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: 'MISSING_PAN', message: 'PAN is required' }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const panRegex = /^[A-Z]{5}[0-9]{4}[A-Z]{1}$/;
|
||||||
|
|
||||||
|
if (!panRegex.test(pan.toUpperCase())) {
|
||||||
|
return res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: 'INVALID_PAN', message: 'Invalid PAN format' }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await verifyPAN(pan.toUpperCase(), name, dob);
|
||||||
|
|
||||||
|
if (!result.success) {
|
||||||
|
return res.status(result.statusCode || 404).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: result.errorCode, message: result.message }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
success = true;
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: result.data,
|
||||||
|
meta: {
|
||||||
|
request_id: `req_pan_${Date.now()}`,
|
||||||
|
credits_used: 1,
|
||||||
|
credits_remaining: req.user.remaining - 1
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
} finally {
|
||||||
|
await logApiCall({
|
||||||
|
userId: req.user.id,
|
||||||
|
apiKeyId: req.user.apiKeyId,
|
||||||
|
endpoint: '/v1/pan/verify',
|
||||||
|
method: 'POST',
|
||||||
|
params: { pan: req.body?.pan },
|
||||||
|
status: success ? 200 : 500,
|
||||||
|
duration: Date.now() - startTime,
|
||||||
|
success,
|
||||||
|
isTestKey: req.user.isTestKey
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /verify/:panNumber
|
||||||
|
* Fetch PAN details via URL parameter
|
||||||
|
*/
|
||||||
|
router.get('/verify/:panNumber', async (req, res, next) => {
|
||||||
|
const startTime = Date.now();
|
||||||
|
let success = false;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { panNumber } = req.params;
|
||||||
|
|
||||||
|
if (!panNumber) {
|
||||||
|
return res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: 'MISSING_PAN', message: 'PAN is required' }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await verifyPAN(panNumber.toUpperCase());
|
||||||
|
|
||||||
|
if (!result.success) {
|
||||||
|
return res.status(result.statusCode || 404).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: result.errorCode, message: result.message }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
success = true;
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: result.data,
|
||||||
|
meta: {
|
||||||
|
request_id: `req_pan_${Date.now()}`,
|
||||||
|
credits_used: 1,
|
||||||
|
credits_remaining: req.user.remaining - 1
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
} finally {
|
||||||
|
await logApiCall({
|
||||||
|
userId: req.user.id,
|
||||||
|
apiKeyId: req.user.apiKeyId,
|
||||||
|
endpoint: '/v1/pan/verify',
|
||||||
|
method: 'GET',
|
||||||
|
params: { pan: req.params?.panNumber },
|
||||||
|
status: success ? 200 : 500,
|
||||||
|
duration: Date.now() - startTime,
|
||||||
|
success,
|
||||||
|
isTestKey: req.user.isTestKey
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
193
src/routes/pincode.js
Normal file
193
src/routes/pincode.js
Normal file
@ -0,0 +1,193 @@
|
|||||||
|
/**
|
||||||
|
* Pincode Lookup Route
|
||||||
|
* Returns location details for a given pincode
|
||||||
|
*/
|
||||||
|
|
||||||
|
const express = require('express');
|
||||||
|
const router = express.Router();
|
||||||
|
const { authenticateApiKey } = require('../middleware/auth');
|
||||||
|
const { rateLimit } = require('../middleware/rateLimit');
|
||||||
|
const { query } = require('../database/connection');
|
||||||
|
const { cacheGet, cacheSet } = require('../cache/redis');
|
||||||
|
const { logApiCall } = require('../services/analytics');
|
||||||
|
|
||||||
|
const STATE_CODES = {
|
||||||
|
'Andhra Pradesh': 'AP', 'Arunachal Pradesh': 'AR', 'Assam': 'AS', 'Bihar': 'BR',
|
||||||
|
'Chhattisgarh': 'CG', 'Delhi': 'DL', 'Goa': 'GA', 'Gujarat': 'GJ', 'Haryana': 'HR',
|
||||||
|
'Himachal Pradesh': 'HP', 'Jharkhand': 'JH', 'Karnataka': 'KA', 'Kerala': 'KL',
|
||||||
|
'Madhya Pradesh': 'MP', 'Maharashtra': 'MH', 'Manipur': 'MN', 'Meghalaya': 'ML',
|
||||||
|
'Mizoram': 'MZ', 'Nagaland': 'NL', 'Odisha': 'OD', 'Punjab': 'PB', 'Rajasthan': 'RJ',
|
||||||
|
'Sikkim': 'SK', 'Tamil Nadu': 'TN', 'Telangana': 'TS', 'Tripura': 'TR',
|
||||||
|
'Uttar Pradesh': 'UP', 'Uttarakhand': 'UK', 'West Bengal': 'WB'
|
||||||
|
};
|
||||||
|
|
||||||
|
router.use(authenticateApiKey);
|
||||||
|
router.use(rateLimit);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /v1/pincode/search?q=query
|
||||||
|
* Reverse search - find pincodes by office name, district, or state
|
||||||
|
*/
|
||||||
|
router.get('/search', async (req, res, next) => {
|
||||||
|
const startTime = Date.now();
|
||||||
|
let success = false;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { q } = req.query;
|
||||||
|
|
||||||
|
if (!q || q.trim().length < 2) {
|
||||||
|
return res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: 'INVALID_QUERY', message: 'Search query must be at least 2 characters' }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const searchTerm = q.trim();
|
||||||
|
const cacheKey = `pincode_search:${searchTerm.toLowerCase()}`;
|
||||||
|
|
||||||
|
// Check cache first (60 second cache for search queries)
|
||||||
|
let data = await cacheGet(cacheKey);
|
||||||
|
|
||||||
|
if (!data) {
|
||||||
|
// Case-insensitive search using ILIKE with wildcards
|
||||||
|
const result = await query(
|
||||||
|
`SELECT DISTINCT pincode, office_name, office_type, district, state, latitude, longitude
|
||||||
|
FROM pincodes
|
||||||
|
WHERE office_name ILIKE $1
|
||||||
|
OR district ILIKE $1
|
||||||
|
OR state ILIKE $1
|
||||||
|
ORDER BY office_name
|
||||||
|
LIMIT 10`,
|
||||||
|
[`%${searchTerm}%`]
|
||||||
|
);
|
||||||
|
|
||||||
|
data = result.rows;
|
||||||
|
|
||||||
|
// Cache for 60 seconds
|
||||||
|
if (data.length > 0) {
|
||||||
|
await cacheSet(cacheKey, data, 60);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
success = true;
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
query: searchTerm,
|
||||||
|
count: data.length,
|
||||||
|
results: data.map(row => ({
|
||||||
|
pincode: row.pincode,
|
||||||
|
office_name: row.office_name,
|
||||||
|
office_type: row.office_type,
|
||||||
|
district: row.district,
|
||||||
|
state: row.state,
|
||||||
|
latitude: parseFloat(row.latitude) || null,
|
||||||
|
longitude: parseFloat(row.longitude) || null
|
||||||
|
}))
|
||||||
|
},
|
||||||
|
meta: {
|
||||||
|
request_id: `req_pin_search_${Date.now()}`,
|
||||||
|
credits_used: 1,
|
||||||
|
credits_remaining: req.user.remaining - 1
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
} finally {
|
||||||
|
await logApiCall({
|
||||||
|
userId: req.user.id,
|
||||||
|
apiKeyId: req.user.apiKeyId,
|
||||||
|
endpoint: '/v1/pincode/search',
|
||||||
|
method: 'GET',
|
||||||
|
params: { q: req.query?.q },
|
||||||
|
status: success ? 200 : 500,
|
||||||
|
duration: Date.now() - startTime,
|
||||||
|
success,
|
||||||
|
isTestKey: req.user.isTestKey
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /v1/pincode/:pincode
|
||||||
|
* Lookup pincode details
|
||||||
|
*/
|
||||||
|
router.get('/:pincode', async (req, res, next) => {
|
||||||
|
const startTime = Date.now();
|
||||||
|
let success = false;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { pincode } = req.params;
|
||||||
|
|
||||||
|
if (!/^\d{6}$/.test(pincode)) {
|
||||||
|
return res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: 'INVALID_PINCODE', message: 'Pincode must be 6 digits' }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const cacheKey = `pincode:${pincode}`;
|
||||||
|
let data = await cacheGet(cacheKey);
|
||||||
|
|
||||||
|
if (!data) {
|
||||||
|
const result = await query('SELECT * FROM pincodes WHERE pincode = $1', [pincode]);
|
||||||
|
|
||||||
|
if (result.rows.length === 0) {
|
||||||
|
return res.status(404).json({
|
||||||
|
success: false,
|
||||||
|
error: { code: 'PINCODE_NOT_FOUND', message: 'Pincode not found' }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
data = result.rows;
|
||||||
|
await cacheSet(cacheKey, data, 604800); // Cache for 7 days
|
||||||
|
}
|
||||||
|
|
||||||
|
success = true;
|
||||||
|
const primary = data[0];
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
pincode,
|
||||||
|
locations: data.map(row => ({
|
||||||
|
office_name: row.office_name,
|
||||||
|
office_type: row.office_type,
|
||||||
|
district: row.district,
|
||||||
|
state: row.state,
|
||||||
|
latitude: parseFloat(row.latitude) || null,
|
||||||
|
longitude: parseFloat(row.longitude) || null
|
||||||
|
})),
|
||||||
|
primary: {
|
||||||
|
district: primary.district,
|
||||||
|
state: primary.state,
|
||||||
|
state_code: STATE_CODES[primary.state] || ''
|
||||||
|
}
|
||||||
|
},
|
||||||
|
meta: {
|
||||||
|
request_id: `req_pin_${Date.now()}`,
|
||||||
|
credits_used: 1,
|
||||||
|
credits_remaining: req.user.remaining - 1
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
} finally {
|
||||||
|
await logApiCall({
|
||||||
|
userId: req.user.id,
|
||||||
|
apiKeyId: req.user.apiKeyId,
|
||||||
|
endpoint: '/v1/pincode',
|
||||||
|
method: 'GET',
|
||||||
|
params: { pincode: req.params.pincode },
|
||||||
|
status: success ? 200 : 500,
|
||||||
|
duration: Date.now() - startTime,
|
||||||
|
success,
|
||||||
|
isTestKey: req.user.isTestKey
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
124
src/routes/user.js
Normal file
124
src/routes/user.js
Normal file
@ -0,0 +1,124 @@
|
|||||||
|
/**
|
||||||
|
* User Routes
|
||||||
|
* Handles user profile and usage information
|
||||||
|
*/
|
||||||
|
|
||||||
|
const express = require('express');
|
||||||
|
const router = express.Router();
|
||||||
|
const { query } = require('../database/connection');
|
||||||
|
const { authenticateJWT } = require('../middleware/auth');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /v1/user/usage
|
||||||
|
* Get API usage statistics for the authenticated user
|
||||||
|
*/
|
||||||
|
router.get('/usage', authenticateJWT, async (req, res, next) => {
|
||||||
|
try {
|
||||||
|
const userId = req.user.id;
|
||||||
|
|
||||||
|
// Get total and successful/failed calls
|
||||||
|
const statsResult = await query(
|
||||||
|
`SELECT
|
||||||
|
COUNT(*) as total_calls,
|
||||||
|
COUNT(*) FILTER (WHERE success = true) as successful_calls,
|
||||||
|
COUNT(*) FILTER (WHERE success = false) as failed_calls,
|
||||||
|
COALESCE(SUM(credits_used), 0) as credits_used
|
||||||
|
FROM api_calls
|
||||||
|
WHERE user_id = $1
|
||||||
|
AND called_at >= DATE_TRUNC('month', CURRENT_DATE)`,
|
||||||
|
[userId]
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get breakdown by endpoint
|
||||||
|
const endpointResult = await query(
|
||||||
|
`SELECT endpoint, COUNT(*) as count
|
||||||
|
FROM api_calls
|
||||||
|
WHERE user_id = $1
|
||||||
|
AND called_at >= DATE_TRUNC('month', CURRENT_DATE)
|
||||||
|
GROUP BY endpoint
|
||||||
|
ORDER BY count DESC`,
|
||||||
|
[userId]
|
||||||
|
);
|
||||||
|
|
||||||
|
const stats = statsResult.rows[0];
|
||||||
|
const byEndpoint = {};
|
||||||
|
endpointResult.rows.forEach(row => {
|
||||||
|
byEndpoint[row.endpoint] = parseInt(row.count);
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
period: 'month',
|
||||||
|
total_calls: parseInt(stats.total_calls) || 0,
|
||||||
|
successful_calls: parseInt(stats.successful_calls) || 0,
|
||||||
|
failed_calls: parseInt(stats.failed_calls) || 0,
|
||||||
|
credits_used: parseInt(stats.credits_used) || 0,
|
||||||
|
quota: req.user.monthly_quota,
|
||||||
|
remaining: req.user.monthly_quota - req.user.calls_this_month,
|
||||||
|
by_endpoint: byEndpoint
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /v1/user/profile
|
||||||
|
* Get user profile information
|
||||||
|
*/
|
||||||
|
router.get('/profile', authenticateJWT, async (req, res, next) => {
|
||||||
|
try {
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
id: req.user.id,
|
||||||
|
email: req.user.email,
|
||||||
|
company_name: req.user.company_name,
|
||||||
|
plan: req.user.plan,
|
||||||
|
quota: req.user.monthly_quota,
|
||||||
|
used: req.user.calls_this_month
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /v1/user/api-keys
|
||||||
|
* Get all API keys for the user
|
||||||
|
*/
|
||||||
|
router.get('/api-keys', authenticateJWT, async (req, res, next) => {
|
||||||
|
try {
|
||||||
|
const result = await query(
|
||||||
|
`SELECT id, key_prefix, key_hint, name, is_test_key, is_active, last_used_at, total_calls, created_at
|
||||||
|
FROM api_keys
|
||||||
|
WHERE user_id = $1
|
||||||
|
ORDER BY created_at DESC`,
|
||||||
|
[req.user.id]
|
||||||
|
);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
api_keys: result.rows.map(key => ({
|
||||||
|
id: key.id,
|
||||||
|
name: key.name,
|
||||||
|
key_preview: `${key.key_prefix}****${key.key_hint}`,
|
||||||
|
is_test_key: key.is_test_key,
|
||||||
|
is_active: key.is_active,
|
||||||
|
last_used_at: key.last_used_at,
|
||||||
|
total_calls: key.total_calls,
|
||||||
|
created_at: key.created_at
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
27
src/services/analytics.js
Normal file
27
src/services/analytics.js
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
/**
|
||||||
|
* Analytics Service
|
||||||
|
* Logs API calls and updates usage counters
|
||||||
|
*/
|
||||||
|
|
||||||
|
const { query } = require('../database/connection');
|
||||||
|
|
||||||
|
async function logApiCall({ userId, apiKeyId, endpoint, method, params, status, duration, success, isTestKey, errorMessage = null }) {
|
||||||
|
try {
|
||||||
|
await query(
|
||||||
|
`INSERT INTO api_calls
|
||||||
|
(user_id, api_key_id, endpoint, method, request_params, response_status, response_time_ms, success, error_message, credits_used, is_billable)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)`,
|
||||||
|
[userId, apiKeyId, endpoint, method, JSON.stringify(params || {}), status, duration, success, errorMessage, success ? 1 : 0, !isTestKey && success]
|
||||||
|
);
|
||||||
|
|
||||||
|
if (success && !isTestKey) {
|
||||||
|
await query('UPDATE users SET calls_this_month = calls_this_month + 1 WHERE id = $1', [userId]);
|
||||||
|
await query('UPDATE api_keys SET last_used_at = NOW(), total_calls = total_calls + 1 WHERE id = $1', [apiKeyId]);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Log error:', error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { logApiCall };
|
||||||
|
|
||||||
153
src/services/bankService.js
Normal file
153
src/services/bankService.js
Normal file
@ -0,0 +1,153 @@
|
|||||||
|
/**
|
||||||
|
* Bank Account Verification Service
|
||||||
|
* Mock implementation for testing (replace with real API in production)
|
||||||
|
*/
|
||||||
|
|
||||||
|
const axios = require('axios');
|
||||||
|
const { cacheGet, cacheSet } = require('../cache/redis');
|
||||||
|
const { query } = require('../database/connection');
|
||||||
|
|
||||||
|
// Mock data for testing
|
||||||
|
const MOCK_BANK_DATA = {
|
||||||
|
'1234567890123': {
|
||||||
|
account_exists: true,
|
||||||
|
name_at_bank: 'RAHUL SHARMA',
|
||||||
|
bank_name: 'HDFC Bank',
|
||||||
|
branch: 'Fort Branch'
|
||||||
|
},
|
||||||
|
'9876543210987': {
|
||||||
|
account_exists: true,
|
||||||
|
name_at_bank: 'PRIYA PATEL',
|
||||||
|
bank_name: 'State Bank of India',
|
||||||
|
branch: 'Mumbai Main Branch'
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
async function verifyBankAccount(accountNumber, ifsc, name = null) {
|
||||||
|
try {
|
||||||
|
// Check if we should use real API or mock
|
||||||
|
if (process.env.BANK_PROVIDER_URL && process.env.BANK_PROVIDER_KEY) {
|
||||||
|
// Real API call
|
||||||
|
const response = await axios.post(
|
||||||
|
process.env.BANK_PROVIDER_URL,
|
||||||
|
{
|
||||||
|
account_number: accountNumber,
|
||||||
|
ifsc: ifsc,
|
||||||
|
name: name
|
||||||
|
},
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
'Authorization': `Bearer ${process.env.BANK_PROVIDER_KEY}`,
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
},
|
||||||
|
timeout: 30000
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!response.data) {
|
||||||
|
return { success: false, statusCode: 500, errorCode: 'VERIFICATION_FAILED', message: 'Verification failed' };
|
||||||
|
}
|
||||||
|
|
||||||
|
const d = response.data.data;
|
||||||
|
const data = {
|
||||||
|
account_number: accountNumber,
|
||||||
|
ifsc: ifsc,
|
||||||
|
account_exists: d.account_exists || d.valid,
|
||||||
|
name_at_bank: d.name_at_bank || d.beneficiary_name,
|
||||||
|
bank_name: d.bank_name,
|
||||||
|
branch: d.branch
|
||||||
|
};
|
||||||
|
|
||||||
|
return addNameMatch({ success: true, data }, name);
|
||||||
|
} else {
|
||||||
|
// Mock implementation for testing
|
||||||
|
const mockData = MOCK_BANK_DATA[accountNumber];
|
||||||
|
|
||||||
|
// Get bank details from IFSC if available
|
||||||
|
let bankDetails = { bank_name: 'Unknown Bank', branch: 'Unknown Branch' };
|
||||||
|
try {
|
||||||
|
const ifscResult = await query('SELECT bank_name, branch FROM ifsc_codes WHERE ifsc = $1', [ifsc.toUpperCase()]);
|
||||||
|
if (ifscResult.rows.length > 0) {
|
||||||
|
bankDetails = {
|
||||||
|
bank_name: ifscResult.rows[0].bank_name,
|
||||||
|
branch: ifscResult.rows[0].branch
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
// Ignore IFSC lookup errors
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!mockData) {
|
||||||
|
// Return account not found for unknown accounts
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
account_number: accountNumber,
|
||||||
|
ifsc: ifsc,
|
||||||
|
account_exists: false,
|
||||||
|
name_at_bank: null,
|
||||||
|
name_match: false,
|
||||||
|
name_match_score: 0,
|
||||||
|
bank_name: bankDetails.bank_name,
|
||||||
|
branch: bankDetails.branch
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = {
|
||||||
|
account_number: accountNumber,
|
||||||
|
ifsc: ifsc,
|
||||||
|
account_exists: mockData.account_exists,
|
||||||
|
name_at_bank: mockData.name_at_bank,
|
||||||
|
bank_name: bankDetails.bank_name || mockData.bank_name,
|
||||||
|
branch: bankDetails.branch || mockData.branch
|
||||||
|
};
|
||||||
|
|
||||||
|
return addNameMatch({ success: true, data }, name);
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
if (error.code === 'ECONNABORTED') {
|
||||||
|
return { success: false, statusCode: 504, errorCode: 'PROVIDER_TIMEOUT', message: 'Service timeout' };
|
||||||
|
}
|
||||||
|
console.error('Bank verification error:', error.message);
|
||||||
|
return { success: false, statusCode: 500, errorCode: 'VERIFICATION_FAILED', message: 'Verification failed' };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function addNameMatch(result, inputName) {
|
||||||
|
if (!result.success || !inputName || !result.data.name_at_bank) {
|
||||||
|
if (result.success) {
|
||||||
|
result.data.name_match = false;
|
||||||
|
result.data.name_match_score = 0;
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
const registeredName = result.data.name_at_bank.toUpperCase();
|
||||||
|
const providedName = inputName.toUpperCase();
|
||||||
|
|
||||||
|
// Simple name matching
|
||||||
|
const nameMatch = registeredName === providedName ||
|
||||||
|
registeredName.includes(providedName) ||
|
||||||
|
providedName.includes(registeredName);
|
||||||
|
|
||||||
|
// Calculate match score
|
||||||
|
let matchScore = 0;
|
||||||
|
if (nameMatch) {
|
||||||
|
matchScore = registeredName === providedName ? 100 : 80;
|
||||||
|
} else {
|
||||||
|
// Partial matching
|
||||||
|
const regWords = registeredName.split(' ');
|
||||||
|
const provWords = providedName.split(' ');
|
||||||
|
const matches = regWords.filter(w => provWords.includes(w)).length;
|
||||||
|
matchScore = Math.round((matches / Math.max(regWords.length, provWords.length)) * 100);
|
||||||
|
}
|
||||||
|
|
||||||
|
result.data.name_match = matchScore >= 80;
|
||||||
|
result.data.name_match_score = matchScore;
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { verifyBankAccount };
|
||||||
218
src/services/gstService.js
Normal file
218
src/services/gstService.js
Normal file
@ -0,0 +1,218 @@
|
|||||||
|
/**
|
||||||
|
* GST Verification Service
|
||||||
|
* Primary: Setu API | Fallback: Mock Data
|
||||||
|
*/
|
||||||
|
|
||||||
|
const axios = require('axios');
|
||||||
|
const { cacheGet, cacheSet } = require('../cache/redis');
|
||||||
|
const { verifyGstSetu } = require('./setuGstService');
|
||||||
|
|
||||||
|
const STATE_NAMES = {
|
||||||
|
'01': 'Jammu & Kashmir', '02': 'Himachal Pradesh', '03': 'Punjab',
|
||||||
|
'04': 'Chandigarh', '05': 'Uttarakhand', '06': 'Haryana', '07': 'Delhi',
|
||||||
|
'08': 'Rajasthan', '09': 'Uttar Pradesh', '10': 'Bihar', '11': 'Sikkim',
|
||||||
|
'12': 'Arunachal Pradesh', '13': 'Nagaland', '14': 'Manipur', '15': 'Mizoram',
|
||||||
|
'16': 'Tripura', '17': 'Meghalaya', '18': 'Assam', '19': 'West Bengal',
|
||||||
|
'20': 'Jharkhand', '21': 'Odisha', '22': 'Chhattisgarh', '23': 'Madhya Pradesh',
|
||||||
|
'24': 'Gujarat', '26': 'Dadra & Nagar Haveli', '27': 'Maharashtra',
|
||||||
|
'29': 'Karnataka', '30': 'Goa', '31': 'Lakshadweep', '32': 'Kerala',
|
||||||
|
'33': 'Tamil Nadu', '34': 'Puducherry', '35': 'Andaman & Nicobar',
|
||||||
|
'36': 'Telangana', '37': 'Andhra Pradesh', '38': 'Ladakh'
|
||||||
|
};
|
||||||
|
|
||||||
|
// Mock data for testing (fallback)
|
||||||
|
const MOCK_GST_DATA = {
|
||||||
|
'27AABCU9603R1ZM': {
|
||||||
|
legal_name: 'ULTRATECH CEMENT LIMITED',
|
||||||
|
trade_name: 'ULTRATECH CEMENT LIMITED',
|
||||||
|
status: 'Active',
|
||||||
|
registration_date: '2017-07-01',
|
||||||
|
last_updated: '2025-01-15',
|
||||||
|
business_type: 'Public Limited Company',
|
||||||
|
constitution: 'Public Limited Company',
|
||||||
|
address: {
|
||||||
|
building: 'B WING, AHURA CENTRE',
|
||||||
|
floor: '2ND FLOOR',
|
||||||
|
street: 'MAHAKALI CAVES ROAD',
|
||||||
|
locality: 'ANDHERI EAST',
|
||||||
|
city: 'MUMBAI',
|
||||||
|
district: 'Mumbai',
|
||||||
|
state: 'Maharashtra',
|
||||||
|
pincode: '400093'
|
||||||
|
},
|
||||||
|
nature_of_business: ['Manufacturer', 'Supplier of Services'],
|
||||||
|
filing_status: {
|
||||||
|
gstr1: 'Filed',
|
||||||
|
gstr3b: 'Filed',
|
||||||
|
last_filed_date: '2025-01-10'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'29AABCT1332L1ZD': {
|
||||||
|
legal_name: 'TATA MOTORS LIMITED',
|
||||||
|
trade_name: 'TATA MOTORS LIMITED',
|
||||||
|
status: 'Active',
|
||||||
|
registration_date: '2017-07-01',
|
||||||
|
last_updated: '2025-01-10',
|
||||||
|
business_type: 'Public Limited Company',
|
||||||
|
constitution: 'Public Limited Company',
|
||||||
|
address: {
|
||||||
|
building: 'TATA MOTORS OFFICE',
|
||||||
|
floor: '',
|
||||||
|
street: 'HOSUR ROAD',
|
||||||
|
locality: 'BOMMASANDRA',
|
||||||
|
city: 'BANGALORE',
|
||||||
|
district: 'Bangalore Urban',
|
||||||
|
state: 'Karnataka',
|
||||||
|
pincode: '560099'
|
||||||
|
},
|
||||||
|
nature_of_business: ['Manufacturer', 'Trader'],
|
||||||
|
filing_status: {
|
||||||
|
gstr1: 'Filed',
|
||||||
|
gstr3b: 'Filed',
|
||||||
|
last_filed_date: '2025-01-08'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'27AAACR5055K1Z5': {
|
||||||
|
legal_name: 'RELIANCE INDUSTRIES LIMITED',
|
||||||
|
trade_name: 'RELIANCE INDUSTRIES LIMITED',
|
||||||
|
status: 'Active',
|
||||||
|
registration_date: '2017-07-01',
|
||||||
|
last_updated: '2025-01-12',
|
||||||
|
business_type: 'Public Limited Company',
|
||||||
|
constitution: 'Public Limited Company',
|
||||||
|
address: {
|
||||||
|
building: 'MAKER CHAMBERS IV',
|
||||||
|
floor: '3RD FLOOR',
|
||||||
|
street: 'NARIMAN POINT',
|
||||||
|
locality: 'NARIMAN POINT',
|
||||||
|
city: 'MUMBAI',
|
||||||
|
district: 'Mumbai',
|
||||||
|
state: 'Maharashtra',
|
||||||
|
pincode: '400021'
|
||||||
|
},
|
||||||
|
nature_of_business: ['Manufacturer', 'Trader', 'Supplier of Services'],
|
||||||
|
filing_status: {
|
||||||
|
gstr1: 'Filed',
|
||||||
|
gstr3b: 'Filed',
|
||||||
|
last_filed_date: '2025-01-11'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
async function verifyGSTIN(gstin) {
|
||||||
|
try {
|
||||||
|
const cacheKey = `gst:${gstin}`;
|
||||||
|
const cached = await cacheGet(cacheKey);
|
||||||
|
if (cached) {
|
||||||
|
console.log(`📦 GSTIN ${gstin} found in cache`);
|
||||||
|
return { success: true, source: cached.source || 'CACHE', data: cached };
|
||||||
|
}
|
||||||
|
|
||||||
|
// 1. PRIMARY: Try Setu API first
|
||||||
|
try {
|
||||||
|
const setuResult = await verifyGstSetu(gstin);
|
||||||
|
|
||||||
|
if (setuResult.success) {
|
||||||
|
const stateCode = gstin.substring(0, 2);
|
||||||
|
const d = setuResult.data;
|
||||||
|
|
||||||
|
// Map Setu's nested response structure
|
||||||
|
const data = {
|
||||||
|
gstin: gstin,
|
||||||
|
legal_name: d.company?.name || d.legal_name,
|
||||||
|
trade_name: d.company?.tradeName || d.trade_name,
|
||||||
|
status: d.company?.status || d.status || 'Active',
|
||||||
|
registration_date: d.gst?.registrationDate || d.registration_date,
|
||||||
|
business_type: d.company?.type || d.business_type,
|
||||||
|
constitution: d.company?.constitutionOfBusiness || d.constitution,
|
||||||
|
tax_payer_type: d.company?.taxPayerType,
|
||||||
|
state: STATE_NAMES[stateCode] || d.address?.principle?.stateCode || d.state,
|
||||||
|
state_code: stateCode,
|
||||||
|
pan: gstin.substring(2, 12),
|
||||||
|
address: {
|
||||||
|
building: d.address?.principle?.buildingName || '',
|
||||||
|
building_number: d.address?.principle?.buildingNumber || '',
|
||||||
|
floor: d.address?.principle?.floorNo || '',
|
||||||
|
street: d.address?.principle?.street || '',
|
||||||
|
locality: d.address?.principle?.location || '',
|
||||||
|
city: d.address?.principle?.city || '',
|
||||||
|
district: d.address?.principle?.district || '',
|
||||||
|
state: d.address?.principle?.stateCode || '',
|
||||||
|
pincode: d.address?.principle?.pinCode || ''
|
||||||
|
},
|
||||||
|
jurisdiction: d.jurisdiction || {},
|
||||||
|
source: 'SETU_API'
|
||||||
|
};
|
||||||
|
|
||||||
|
// Cache the successful result for 24 hours
|
||||||
|
await cacheSet(cacheKey, data, 86400);
|
||||||
|
return { success: true, source: 'SETU_API', data };
|
||||||
|
}
|
||||||
|
} catch (setuError) {
|
||||||
|
// Log the Setu error and fall back to mock data
|
||||||
|
console.log(`🔄 Switching to mock/local GST data lookup...`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. FALLBACK: Mock data for testing
|
||||||
|
const mockData = MOCK_GST_DATA[gstin];
|
||||||
|
|
||||||
|
if (mockData) {
|
||||||
|
const stateCode = gstin.substring(0, 2);
|
||||||
|
const data = {
|
||||||
|
gstin,
|
||||||
|
...mockData,
|
||||||
|
state: STATE_NAMES[stateCode] || mockData.address.state,
|
||||||
|
state_code: stateCode,
|
||||||
|
pan: gstin.substring(2, 12),
|
||||||
|
source: 'LOCAL_MOCK'
|
||||||
|
};
|
||||||
|
|
||||||
|
await cacheSet(cacheKey, data, 86400);
|
||||||
|
console.log(`✅ Mock data verification successful for GSTIN: ${gstin}`);
|
||||||
|
return { success: true, source: 'LOCAL_MOCK', data };
|
||||||
|
} else {
|
||||||
|
return { success: false, statusCode: 404, errorCode: 'GSTIN_NOT_FOUND', message: 'GSTIN not found in any verification source' };
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
if (error.code === 'ECONNABORTED') {
|
||||||
|
return { success: false, statusCode: 504, errorCode: 'PROVIDER_TIMEOUT', message: 'Service timeout' };
|
||||||
|
}
|
||||||
|
console.error('GST verification error:', error.message);
|
||||||
|
return { success: false, statusCode: 500, errorCode: 'VERIFICATION_FAILED', message: 'Verification failed' };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatGSTData(gstin, d) {
|
||||||
|
return {
|
||||||
|
gstin,
|
||||||
|
legal_name: d.legal_name || d.lgnm,
|
||||||
|
trade_name: d.trade_name || d.tradeNam,
|
||||||
|
status: d.status || d.sts,
|
||||||
|
registration_date: d.registration_date || d.rgdt,
|
||||||
|
last_updated: d.last_update || d.lstupdt,
|
||||||
|
business_type: d.business_type || d.ctb,
|
||||||
|
constitution: d.constitution || d.ctj,
|
||||||
|
state: d.state || STATE_NAMES[gstin.substring(0, 2)],
|
||||||
|
state_code: gstin.substring(0, 2),
|
||||||
|
pan: gstin.substring(2, 12),
|
||||||
|
address: {
|
||||||
|
building: d.address?.bno || d.pradr?.addr?.bno || '',
|
||||||
|
floor: d.address?.flno || d.pradr?.addr?.flno || '',
|
||||||
|
street: d.address?.st || d.pradr?.addr?.st || '',
|
||||||
|
locality: d.address?.loc || d.pradr?.addr?.loc || '',
|
||||||
|
city: d.address?.city || d.pradr?.addr?.city || '',
|
||||||
|
district: d.address?.dst || d.pradr?.addr?.dst || '',
|
||||||
|
state: d.address?.stcd || d.pradr?.addr?.stcd || '',
|
||||||
|
pincode: d.address?.pncd || d.pradr?.addr?.pncd || ''
|
||||||
|
},
|
||||||
|
nature_of_business: d.nature_of_business || d.nba || [],
|
||||||
|
filing_status: {
|
||||||
|
gstr1: d.filing_status?.gstr1 || 'Unknown',
|
||||||
|
gstr3b: d.filing_status?.gstr3b || 'Unknown',
|
||||||
|
last_filed_date: d.filing_status?.last_filed || null
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { verifyGSTIN };
|
||||||
145
src/services/panService.js
Normal file
145
src/services/panService.js
Normal file
@ -0,0 +1,145 @@
|
|||||||
|
/**
|
||||||
|
* PAN Verification Service
|
||||||
|
* Primary: Setu API | Fallback: Local Database
|
||||||
|
*/
|
||||||
|
|
||||||
|
const axios = require('axios');
|
||||||
|
const { cacheGet, cacheSet } = require('../cache/redis');
|
||||||
|
const { pool } = require('../database/connection');
|
||||||
|
const { verifyPanSetu } = require('./setuService');
|
||||||
|
|
||||||
|
// PAN Types based on 4th character
|
||||||
|
const PAN_TYPES = {
|
||||||
|
'P': 'Individual',
|
||||||
|
'C': 'Company',
|
||||||
|
'H': 'HUF',
|
||||||
|
'A': 'AOP',
|
||||||
|
'B': 'BOI',
|
||||||
|
'G': 'Government',
|
||||||
|
'J': 'Artificial Juridical Person',
|
||||||
|
'L': 'Local Authority',
|
||||||
|
'F': 'Firm/Partnership',
|
||||||
|
'T': 'Trust'
|
||||||
|
};
|
||||||
|
|
||||||
|
async function verifyPAN(pan, name = null, dob = null) {
|
||||||
|
try {
|
||||||
|
const cacheKey = `pan:${pan}`;
|
||||||
|
const cached = await cacheGet(cacheKey);
|
||||||
|
if (cached) {
|
||||||
|
console.log(`📦 PAN ${pan} found in cache`);
|
||||||
|
return addNameMatch({ success: true, source: cached.source || 'CACHE', data: cached }, name);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 1. Regex Validation (Standard 10-char PAN format)
|
||||||
|
const panRegex = /^[A-Z]{5}[0-9]{4}[A-Z]{1}$/;
|
||||||
|
if (!panRegex.test(pan)) {
|
||||||
|
return { success: false, statusCode: 400, errorCode: 'INVALID_FORMAT', message: 'Invalid PAN format. Expected: AAAAA0000A' };
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. PRIMARY: Try Setu API first
|
||||||
|
try {
|
||||||
|
console.log(`🌐 Attempting Setu API verification for PAN: ${pan}`);
|
||||||
|
const setuResult = await verifyPanSetu(pan);
|
||||||
|
|
||||||
|
if (setuResult.success) {
|
||||||
|
const panType = PAN_TYPES[pan.charAt(3)] || 'Unknown';
|
||||||
|
|
||||||
|
const data = {
|
||||||
|
pan: pan,
|
||||||
|
name: setuResult.data.full_name || setuResult.data.name,
|
||||||
|
status: 'Valid',
|
||||||
|
type: panType,
|
||||||
|
source: 'SETU_API',
|
||||||
|
// Include any additional fields from Setu response
|
||||||
|
...setuResult.data
|
||||||
|
};
|
||||||
|
|
||||||
|
// Cache the successful result for 24 hours
|
||||||
|
await cacheSet(cacheKey, data, 86400);
|
||||||
|
console.log(`✅ Setu API verification successful for PAN: ${pan}`);
|
||||||
|
return addNameMatch({ success: true, source: 'SETU_API', data }, name);
|
||||||
|
}
|
||||||
|
} catch (setuError) {
|
||||||
|
// Log the Setu error and fall back to local DB
|
||||||
|
console.warn(`⚠️ Setu API failed for PAN ${pan}: ${setuError.message}`);
|
||||||
|
console.log(`🔄 Switching to local database lookup...`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. FALLBACK: Local Database Lookup
|
||||||
|
const result = await pool.query('SELECT * FROM pan_data WHERE pan_number = $1', [pan]);
|
||||||
|
|
||||||
|
if (result.rows.length > 0) {
|
||||||
|
const row = result.rows[0];
|
||||||
|
const panType = PAN_TYPES[pan.charAt(3)] || 'Unknown';
|
||||||
|
|
||||||
|
const data = {
|
||||||
|
pan: row.pan_number,
|
||||||
|
name: row.full_name,
|
||||||
|
status: row.status || 'Valid',
|
||||||
|
type: panType,
|
||||||
|
father_name: row.father_name,
|
||||||
|
dob: row.date_of_birth,
|
||||||
|
gender: row.gender,
|
||||||
|
category: row.category,
|
||||||
|
source: 'LOCAL_DB'
|
||||||
|
};
|
||||||
|
|
||||||
|
await cacheSet(cacheKey, data, 86400);
|
||||||
|
console.log(`✅ Local DB verification successful for PAN: ${pan}`);
|
||||||
|
return addNameMatch({ success: true, source: 'LOCAL_DB', data }, name);
|
||||||
|
} else {
|
||||||
|
return { success: false, statusCode: 404, errorCode: 'PAN_NOT_FOUND', message: 'PAN not found in any verification source' };
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
if (error.code === 'ECONNABORTED') {
|
||||||
|
return { success: false, statusCode: 504, errorCode: 'PROVIDER_TIMEOUT', message: 'Service timeout' };
|
||||||
|
}
|
||||||
|
console.error('PAN verification error:', error.message);
|
||||||
|
return { success: false, statusCode: 500, errorCode: 'VERIFICATION_FAILED', message: 'Verification failed' };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatPANData(pan, d) {
|
||||||
|
return {
|
||||||
|
pan,
|
||||||
|
name: d.name || d.full_name,
|
||||||
|
status: d.status || 'Valid',
|
||||||
|
type: PAN_TYPES[pan.charAt(3)] || 'Unknown',
|
||||||
|
last_name: d.last_name || '',
|
||||||
|
first_name: d.first_name || '',
|
||||||
|
middle_name: d.middle_name || '',
|
||||||
|
title: d.title || ''
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function addNameMatch(result, inputName) {
|
||||||
|
if (!result.success || !inputName) return result;
|
||||||
|
|
||||||
|
const registeredName = result.data.name.toUpperCase();
|
||||||
|
const providedName = inputName.toUpperCase();
|
||||||
|
|
||||||
|
// Simple name matching
|
||||||
|
const nameMatch = registeredName === providedName ||
|
||||||
|
registeredName.includes(providedName) ||
|
||||||
|
providedName.includes(registeredName);
|
||||||
|
|
||||||
|
// Calculate match score
|
||||||
|
let matchScore = 0;
|
||||||
|
if (nameMatch) {
|
||||||
|
matchScore = registeredName === providedName ? 100 : 80;
|
||||||
|
} else {
|
||||||
|
// Partial matching
|
||||||
|
const regWords = registeredName.split(' ');
|
||||||
|
const provWords = providedName.split(' ');
|
||||||
|
const matches = regWords.filter(w => provWords.includes(w)).length;
|
||||||
|
matchScore = Math.round((matches / Math.max(regWords.length, provWords.length)) * 100);
|
||||||
|
}
|
||||||
|
|
||||||
|
result.data.name_match = matchScore >= 80;
|
||||||
|
result.data.name_match_score = matchScore;
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { verifyPAN };
|
||||||
79
src/services/setuGstService.js
Normal file
79
src/services/setuGstService.js
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
/**
|
||||||
|
* Setu API Integration Service for GST Verification
|
||||||
|
* Primary verification provider for GSTIN
|
||||||
|
* Documentation: https://docs.setu.co/
|
||||||
|
*/
|
||||||
|
|
||||||
|
const axios = require('axios');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verify GSTIN using Setu Sandbox API
|
||||||
|
* @param {string} gstin - The GSTIN to verify
|
||||||
|
* @returns {Promise<Object>} - Verification result with business details
|
||||||
|
* @throws {Error} - Throws if Setu API fails
|
||||||
|
*/
|
||||||
|
async function verifyGstSetu(gstin) {
|
||||||
|
const baseUrl = process.env.SETU_BASE_URL;
|
||||||
|
const clientId = process.env.SETU_CLIENT_ID;
|
||||||
|
const clientSecret = process.env.SETU_CLIENT_SECRET;
|
||||||
|
const productInstanceId = process.env.SETU_GST_INSTANCE_ID;
|
||||||
|
|
||||||
|
// Validate environment variables
|
||||||
|
if (!baseUrl || !clientId || !clientSecret || !productInstanceId) {
|
||||||
|
throw new Error('Setu GST API configuration is incomplete. Check environment variables.');
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
console.log(`🌐 Attempting Setu GST verification for GSTIN: ${gstin}`);
|
||||||
|
|
||||||
|
const response = await axios.post(
|
||||||
|
`${baseUrl}/api/verify/gst`,
|
||||||
|
{
|
||||||
|
gstin: gstin,
|
||||||
|
consent: 'Y',
|
||||||
|
reason: 'Vendor verification'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'x-client-id': clientId,
|
||||||
|
'x-client-secret': clientSecret,
|
||||||
|
'x-product-instance-id': productInstanceId
|
||||||
|
},
|
||||||
|
timeout: 30000 // 30 second timeout
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
// Check if Setu returned a successful verification (case-insensitive)
|
||||||
|
const verification = response.data?.verification?.toLowerCase();
|
||||||
|
if (response.data && (verification === 'success' || response.data.status === 'success')) {
|
||||||
|
console.log(`✅ Setu GST verification successful for GSTIN: ${gstin}`);
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
source: 'SETU_API',
|
||||||
|
data: response.data.data
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// If verification field is not SUCCESS, treat as failure
|
||||||
|
throw new Error(response.data?.message || 'GST verification failed via Setu');
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
// Re-throw with more context
|
||||||
|
if (error.response) {
|
||||||
|
// Setu returned an error response
|
||||||
|
const statusCode = error.response.status;
|
||||||
|
const errorMessage = error.response.data?.message || error.response.data?.error || 'Unknown Setu API error';
|
||||||
|
console.warn(`⚠️ Setu GST API failed for GSTIN ${gstin}: Setu API Error (${statusCode}): ${errorMessage}`);
|
||||||
|
throw new Error(`Setu API Error (${statusCode}): ${errorMessage}`);
|
||||||
|
} else if (error.code === 'ECONNABORTED') {
|
||||||
|
console.warn(`⚠️ Setu GST API timeout for GSTIN ${gstin}`);
|
||||||
|
throw new Error('Setu API timeout');
|
||||||
|
} else {
|
||||||
|
console.warn(`⚠️ Setu GST API failed for GSTIN ${gstin}: ${error.message}`);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { verifyGstSetu };
|
||||||
72
src/services/setuService.js
Normal file
72
src/services/setuService.js
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
/**
|
||||||
|
* Setu API Integration Service
|
||||||
|
* Primary verification provider for PAN
|
||||||
|
* Documentation: https://docs.setu.co/
|
||||||
|
*/
|
||||||
|
|
||||||
|
const axios = require('axios');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verify PAN using Setu Sandbox API
|
||||||
|
* @param {string} panNumber - The PAN number to verify
|
||||||
|
* @returns {Promise<Object>} - Verification result with full_name and other details
|
||||||
|
* @throws {Error} - Throws if Setu API fails
|
||||||
|
*/
|
||||||
|
async function verifyPanSetu(panNumber) {
|
||||||
|
const baseUrl = process.env.SETU_BASE_URL;
|
||||||
|
const clientId = process.env.SETU_CLIENT_ID;
|
||||||
|
const clientSecret = process.env.SETU_CLIENT_SECRET;
|
||||||
|
const productInstanceId = process.env.SETU_PRODUCT_INSTANCE_ID;
|
||||||
|
|
||||||
|
// Validate environment variables
|
||||||
|
if (!baseUrl || !clientId || !clientSecret || !productInstanceId) {
|
||||||
|
throw new Error('Setu API configuration is incomplete. Check environment variables.');
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await axios.post(
|
||||||
|
`${baseUrl}/api/verify/pan`,
|
||||||
|
{
|
||||||
|
pan: panNumber,
|
||||||
|
consent: 'Y',
|
||||||
|
reason: 'Verification for user onboarding'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'x-client-id': clientId,
|
||||||
|
'x-client-secret': clientSecret,
|
||||||
|
'x-product-instance-id': productInstanceId
|
||||||
|
},
|
||||||
|
timeout: 30000 // 30 second timeout
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
// Check if Setu returned a successful verification
|
||||||
|
if (response.data && response.data.verification === 'SUCCESS') {
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
source: 'SETU_API',
|
||||||
|
data: response.data.data
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// If verification field is not SUCCESS, treat as failure
|
||||||
|
throw new Error(response.data?.message || 'PAN verification failed via Setu');
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
// Re-throw with more context
|
||||||
|
if (error.response) {
|
||||||
|
// Setu returned an error response
|
||||||
|
const statusCode = error.response.status;
|
||||||
|
const errorMessage = error.response.data?.message || error.response.data?.error || 'Unknown Setu API error';
|
||||||
|
throw new Error(`Setu API Error (${statusCode}): ${errorMessage}`);
|
||||||
|
} else if (error.code === 'ECONNABORTED') {
|
||||||
|
throw new Error('Setu API timeout');
|
||||||
|
} else {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { verifyPanSetu };
|
||||||
Loading…
Reference in New Issue
Block a user