diff --git a/package-lock.json b/package-lock.json
index 5ea24ad..c642bd7 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -14,6 +14,7 @@
"@sendgrid/mail": "^8.1.6",
"@sentry/node": "^10.19.0",
"@types/multer": "^2.0.0",
+ "@types/yauzl": "^2.10.3",
"adm-zip": "^0.5.16",
"apple-signin-auth": "^2.0.0",
"bcryptjs": "^3.0.2",
@@ -36,7 +37,8 @@
"swagger-jsdoc": "^6.2.8",
"swagger-ui-express": "^5.0.1",
"uuid": "^9.0.0",
- "winston": "^3.10.0"
+ "winston": "^3.10.0",
+ "yauzl": "^3.2.0"
},
"devDependencies": {
"@types/adm-zip": "^0.5.7",
@@ -985,7 +987,6 @@
"resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz",
"integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==",
"license": "Apache-2.0",
- "peer": true,
"engines": {
"node": ">=8.0.0"
}
@@ -1007,7 +1008,6 @@
"resolved": "https://registry.npmjs.org/@opentelemetry/context-async-hooks/-/context-async-hooks-2.2.0.tgz",
"integrity": "sha512-qRkLWiUEZNAmYapZ7KGS5C4OmBLcP/H2foXeOEaowYCR0wi89fHejrfYfbuLVCMLp/dWZXKvQusdbUEZjERfwQ==",
"license": "Apache-2.0",
- "peer": true,
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
@@ -1020,7 +1020,6 @@
"resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-2.2.0.tgz",
"integrity": "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw==",
"license": "Apache-2.0",
- "peer": true,
"dependencies": {
"@opentelemetry/semantic-conventions": "^1.29.0"
},
@@ -1036,7 +1035,6 @@
"resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation/-/instrumentation-0.208.0.tgz",
"integrity": "sha512-Eju0L4qWcQS+oXxi6pgh7zvE2byogAkcsVv0OjHF/97iOz1N/aKE6etSGowYkie+YA1uo6DNwdSxaaNnLvcRlA==",
"license": "Apache-2.0",
- "peer": true,
"dependencies": {
"@opentelemetry/api-logs": "0.208.0",
"import-in-the-middle": "^2.0.0",
@@ -1424,7 +1422,6 @@
"resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-2.2.0.tgz",
"integrity": "sha512-1pNQf/JazQTMA0BiO5NINUzH0cbLbbl7mntLa4aJNmCCXSj0q03T5ZXXL0zw4G55TjdL9Tz32cznGClf+8zr5A==",
"license": "Apache-2.0",
- "peer": true,
"dependencies": {
"@opentelemetry/core": "2.2.0",
"@opentelemetry/semantic-conventions": "^1.29.0"
@@ -1441,7 +1438,6 @@
"resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-2.2.0.tgz",
"integrity": "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw==",
"license": "Apache-2.0",
- "peer": true,
"dependencies": {
"@opentelemetry/core": "2.2.0",
"@opentelemetry/resources": "2.2.0",
@@ -1459,7 +1455,6 @@
"resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.38.0.tgz",
"integrity": "sha512-kocjix+/sSggfJhwXqClZ3i9Y/MI0fp7b+g7kCRm6psy2dsf8uApTRclwG18h8Avm7C9+fnt+O36PspJ/OzoWg==",
"license": "Apache-2.0",
- "peer": true,
"engines": {
"node": ">=14"
}
@@ -1518,7 +1513,6 @@
"resolved": "https://registry.npmjs.org/@redis/client/-/client-5.8.3.tgz",
"integrity": "sha512-MZVUE+l7LmMIYlIjubPosruJ9ltSLGFmJqsXApTqPLyHLjsJUSAbAJb/A3N34fEqean4ddiDkdWzNu4ZKPvRUg==",
"license": "MIT",
- "peer": true,
"dependencies": {
"cluster-key-slot": "1.1.2"
},
@@ -2544,7 +2538,6 @@
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.9.tgz",
"integrity": "sha512-cuVNgarYWZqxRJDQHEB58GEONhOK79QVR/qYx4S7kcUObQvUwvFnYxJuuHUKm2aieN9X3yZB4LZsuYNU1Qphsw==",
"license": "MIT",
- "peer": true,
"dependencies": {
"undici-types": "~6.21.0"
}
@@ -2683,6 +2676,15 @@
"dev": true,
"license": "MIT"
},
+ "node_modules/@types/yauzl": {
+ "version": "2.10.3",
+ "resolved": "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.10.3.tgz",
+ "integrity": "sha512-oJoftv0LSuaDZE3Le4DbKX+KS9G36NzOeSap90UIK0yMA/NhKJhqlSGtNDORNRaIbQfzjXDrQa0ytJ6mNRGz/Q==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/node": "*"
+ }
+ },
"node_modules/accepts": {
"version": "1.3.8",
"resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz",
@@ -2701,7 +2703,6 @@
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz",
"integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==",
"license": "MIT",
- "peer": true,
"bin": {
"acorn": "bin/acorn"
},
@@ -2968,6 +2969,15 @@
"node": ">=8"
}
},
+ "node_modules/buffer-crc32": {
+ "version": "0.2.13",
+ "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz",
+ "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==",
+ "license": "MIT",
+ "engines": {
+ "node": "*"
+ }
+ },
"node_modules/buffer-equal-constant-time": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
@@ -3473,7 +3483,6 @@
"resolved": "https://registry.npmjs.org/express/-/express-4.22.1.tgz",
"integrity": "sha512-F2X8g9P1X7uCPZMA3MVf9wcTqlyNp7IhH5qPCI0izhaOIYXaW9L535tGA3qmjRzpH+bZczqq7hVKxTR4NWnu+g==",
"license": "MIT",
- "peer": true,
"dependencies": {
"accepts": "~1.3.8",
"array-flatten": "1.1.1",
@@ -5148,12 +5157,17 @@
"integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==",
"license": "MIT"
},
+ "node_modules/pend": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz",
+ "integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==",
+ "license": "MIT"
+ },
"node_modules/pg": {
"version": "8.16.3",
"resolved": "https://registry.npmjs.org/pg/-/pg-8.16.3.tgz",
"integrity": "sha512-enxc1h0jA/aq5oSDMvqyW3q89ra6XIIDZgCX9vkMrnz5DFTw/Ny3Li2lFQ+pt3L6MCgm/5o2o8HW9hiJji+xvw==",
"license": "MIT",
- "peer": true,
"dependencies": {
"pg-connection-string": "^2.9.1",
"pg-pool": "^3.10.1",
@@ -6171,7 +6185,6 @@
"integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==",
"dev": true,
"license": "Apache-2.0",
- "peer": true,
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"
@@ -6299,7 +6312,6 @@
"resolved": "https://registry.npmjs.org/winston/-/winston-3.17.0.tgz",
"integrity": "sha512-DLiFIXYC5fMPxaRg832S6F5mJYvePtmO5G9v9IgUFPhXm9/GkXarH/TUrBAVzhTCzAj9anE/+GjrgXp/54nOgw==",
"license": "MIT",
- "peer": true,
"dependencies": {
"@colors/colors": "^1.6.0",
"@dabh/diagnostics": "^2.0.2",
@@ -6464,6 +6476,19 @@
"node": ">= 6"
}
},
+ "node_modules/yauzl": {
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-3.2.0.tgz",
+ "integrity": "sha512-Ow9nuGZE+qp1u4JIPvg+uCiUr7xGQWdff7JQSk5VGYTAZMDe2q8lxJ10ygv10qmSj031Ty/6FNJpLO4o1Sgc+w==",
+ "license": "MIT",
+ "dependencies": {
+ "buffer-crc32": "~0.2.3",
+ "pend": "~1.2.0"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
"node_modules/yn": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz",
diff --git a/package.json b/package.json
index 9d096f3..a841069 100644
--- a/package.json
+++ b/package.json
@@ -16,6 +16,7 @@
"@sendgrid/mail": "^8.1.6",
"@sentry/node": "^10.19.0",
"@types/multer": "^2.0.0",
+ "@types/yauzl": "^2.10.3",
"adm-zip": "^0.5.16",
"apple-signin-auth": "^2.0.0",
"bcryptjs": "^3.0.2",
@@ -38,7 +39,8 @@
"swagger-jsdoc": "^6.2.8",
"swagger-ui-express": "^5.0.1",
"uuid": "^9.0.0",
- "winston": "^3.10.0"
+ "winston": "^3.10.0",
+ "yauzl": "^3.2.0"
},
"devDependencies": {
"@types/adm-zip": "^0.5.7",
diff --git a/src/index.ts b/src/index.ts
index 3555ecf..5182aaa 100644
--- a/src/index.ts
+++ b/src/index.ts
@@ -28,13 +28,13 @@ const getLogLevel = (): string => {
}
return config.isProduction() ? 'info' : 'debug';
};
-import {
- securityHeaders,
- corsConfig,
- requestSizeLimit,
+import {
+ securityHeaders,
+ corsConfig,
+ requestSizeLimit,
securityLogger,
apiRateLimit,
- progressiveSlowDown
+ progressiveSlowDown
} from './middleware/security';
import { requestLogger } from './middleware/requestLogger';
@@ -107,7 +107,7 @@ app.get('/', (req: Request, res: Response) => {
// For comprehensive metrics, use /monitoring/health
app.get('/health', async (req: Request, res: Response) => {
const startTime = Date.now();
-
+
try {
await postgresService.query('SELECT 1');
const dbLatency = Date.now() - startTime;
@@ -145,15 +145,18 @@ app.get('/health', async (req: Request, res: Response) => {
}
});
+import { encode } from 'html-entities';
+
// OAuth callback handler
app.get('/auth/callback', (req: Request, res: Response) => {
const code = req.query.code;
- if (code) {
+ if (code && typeof code === 'string') {
+ const sanitizedCode = encode(code);
res.send(`
✅ Authorization Successful!
- Authorization code: ${code}
+ Authorization code: ${sanitizedCode}
Copy this code and paste it into your terminal where the script is waiting.
@@ -189,10 +192,10 @@ async function startServer() {
try {
// Initialize database tables
await postgresService.initializeTables();
-
+
// Initialize Redis cache (optional, won't fail if unavailable)
await redisConfig.initialize();
-
+
// Start the server
const server = app.listen(PORT, () => {
logger.info('Server started', {
@@ -208,18 +211,18 @@ async function startServer() {
// Graceful shutdown handling
const gracefulShutdown = async (signal: string) => {
logger.info(`${signal} received, starting graceful shutdown...`);
-
+
server.close(async () => {
logger.info('HTTP server closed');
-
+
// Close database connections
await postgresService.close();
logger.info('Database connections closed');
-
+
// Close Redis connection
await redisConfig.close();
logger.info('Redis connection closed');
-
+
logger.info('Graceful shutdown complete');
process.exit(0);
});
@@ -233,7 +236,7 @@ async function startServer() {
process.on('SIGTERM', () => gracefulShutdown('SIGTERM'));
process.on('SIGINT', () => gracefulShutdown('SIGINT'));
-
+
} catch (error) {
logger.error('Failed to start server', {
error: error instanceof Error ? error.message : String(error),
diff --git a/src/middleware/concurrencyLimiter.ts b/src/middleware/concurrencyLimiter.ts
new file mode 100644
index 0000000..9a66a40
--- /dev/null
+++ b/src/middleware/concurrencyLimiter.ts
@@ -0,0 +1,42 @@
+import { Request, Response, NextFunction } from 'express';
+
+const MAX_CONCURRENT_REQUESTS = 100;
+let activeRequests = 0;
+const requestQueue: Array<() => void> = [];
+
+/**
+ * Simple concurrency limiter for Express routes.
+ * Queues requests when the maximum number of active handlers is reached.
+ */
+export function concurrencyLimiter(req: Request, res: Response, next: NextFunction): void {
+ const start = () => {
+ activeRequests++;
+ let released = false;
+
+ const release = () => {
+ if (released) return;
+ released = true;
+ activeRequests = Math.max(0, activeRequests - 1);
+ const nextInQueue = requestQueue.shift();
+ if (nextInQueue) {
+ nextInQueue();
+ }
+ };
+
+ res.on('finish', release);
+ res.on('close', release);
+
+ // If the request was already aborted while waiting, release immediately
+ if (req.destroyed || req.aborted) {
+ return release();
+ }
+
+ next();
+ };
+
+ if (activeRequests < MAX_CONCURRENT_REQUESTS) {
+ start();
+ } else {
+ requestQueue.push(start);
+ }
+}
\ No newline at end of file
diff --git a/src/models/Token.ts b/src/models/Token.ts
index 3042be8..b025d56 100644
--- a/src/models/Token.ts
+++ b/src/models/Token.ts
@@ -8,6 +8,7 @@ export interface TokenPayload {
type: 'access' | 'refresh';
iat?: number; // issued at
exp?: number; // expiration
+ jti?: string; // JWT ID for revocation
}
export interface AuthTokens {
diff --git a/src/routes/auth.ts b/src/routes/auth.ts
index 089795e..6d1b726 100644
--- a/src/routes/auth.ts
+++ b/src/routes/auth.ts
@@ -346,12 +346,15 @@ router.post('/logout', asyncHandler(async (req: Request, res: Response) => {
// In production, you might want to maintain a blacklist of invalidated tokens
// For now, client should simply discard the tokens
- // TODO: Implement token blacklist/revocation in Phase 2
- // For now, just return success
+ // Implement token blacklist/revocation
+ if (refreshToken) {
+ const authService = container.get('authService');
+ await authService.revokeRefreshToken(refreshToken);
+ }
res.status(200).json({
success: true,
- message: 'Logged out successfully. Please discard your tokens.'
+ message: 'Logged out successfully.'
});
}));
diff --git a/src/routes/csv.ts b/src/routes/csv.ts
index 2455c33..6b3dc24 100644
--- a/src/routes/csv.ts
+++ b/src/routes/csv.ts
@@ -1,21 +1,35 @@
import { Router, Request, Response } from 'express';
import multer from 'multer';
+import fs from 'fs';
+import os from 'os';
+import path from 'path';
import { container } from '../services/core/ServiceContainer';
import { csvImportRateLimit } from '../middleware/security';
import { authenticateToken, validateOwnership } from '../middleware/auth';
import { ZipExtractor } from '../services/import/ZipExtractor';
import { ValidationError } from '../middleware/errorHandler';
import { cacheService } from '../services/core/CacheService';
+import { concurrencyLimiter } from '../middleware/concurrencyLimiter';
const router = Router();
const csvImportService = container.csvImportService;
const zipExtractor = new ZipExtractor();
-// Configure multer for file uploads (CSV and ZIP)
+// Configure multer for file uploads (CSV and ZIP) with Disk Storage
+// Saves to temp directory to prevent RAM exhaustion
const upload = multer({
- storage: multer.memoryStorage(),
+ storage: multer.diskStorage({
+ destination: (req, file, cb) => {
+ cb(null, os.tmpdir());
+ },
+ filename: (req, file, cb) => {
+ // Generate a unique filename
+ const uniqueSuffix = Date.now() + '-' + Math.round(Math.random() * 1E9);
+ cb(null, file.fieldname + '-' + uniqueSuffix + path.extname(file.originalname));
+ }
+ }),
limits: {
- fileSize: 50 * 1024 * 1024 // 50MB max
+ fileSize: 50 * 1024 * 1024 // 50MB max file size on disk
},
fileFilter: (req, file, cb) => {
const allowedMimeTypes = [
@@ -24,10 +38,9 @@ const upload = multer({
'application/x-zip-compressed',
'application/octet-stream' // Sometimes ZIP files are sent as octet-stream
];
-
+
const allowedExtensions = ['.csv', '.zip'];
- const fileExtension = file.originalname.toLowerCase().slice(-4);
-
+
if (allowedMimeTypes.includes(file.mimetype) || allowedExtensions.some(ext => file.originalname.toLowerCase().endsWith(ext))) {
cb(null, true);
} else {
@@ -110,118 +123,139 @@ const upload = multer({
* application/json:
* schema:
* $ref: '#/components/schemas/Error'
+ * 413:
+ * description: Payload Too Large
*/
// POST /csv/import - Import parsed CSV or ZIP data to database
-router.post('/import', authenticateToken, validateOwnership, csvImportRateLimit, upload.single('csvFile'), async (req: Request, res: Response) => {
- try {
+// Note on middleware order: upload.single MUST be before validateOwnership because
+// validateOwnership reads req.body.userId, which is only populated by multer after parsing.
+router.post('/import',
+ authenticateToken,
+ csvImportRateLimit,
+ concurrencyLimiter,
+ upload.single('csvFile'),
+ validateOwnership,
+ async (req: Request, res: Response) => {
+
+ // Ensure file was uploaded
if (!req.file) {
- return res.status(400).json({
- error: 'No file uploaded. Please upload a CSV or ZIP file.'
+ return res.status(400).json({
+ error: 'No file uploaded. Please upload a CSV or ZIP file.'
});
}
- const userId = req.body.userId;
- if (!userId) {
- return res.status(400).json({ error: 'userId is required' });
- }
+ const filePath = req.file.path;
+
+ try {
+ const userId = req.body.userId;
+ if (!userId) {
+ // Should be caught by validateOwnership if it wasn't empty, but good to double check
+ return res.status(400).json({ error: 'userId is required' });
+ }
+
+ let csvBuffer: Buffer;
+ let fileName = req.file.originalname;
+
+ // Check if file is a ZIP (using new Promise-based check from disk)
+ const isZip = await zipExtractor.isZipFile(filePath);
- let csvBuffer: Buffer;
- let fileName = req.file.originalname;
-
- // Check if file is a ZIP
- const isZip = zipExtractor.isZipFile(req.file.buffer) ||
- fileName.toLowerCase().endsWith('.zip');
-
- if (isZip) {
- console.log(`📦 Processing ZIP file: ${fileName}`);
-
- // Validate ZIP file
- zipExtractor.validateZipFile(req.file.buffer, 50);
-
- // Extract CSV from ZIP (auto-detects platform)
- try {
- const extractedFile = zipExtractor.extractCSV(req.file.buffer);
- csvBuffer = extractedFile.content;
- fileName = extractedFile.filename;
-
- const platform = extractedFile.platform === 'uber' ? 'Uber Eats' :
- extractedFile.platform === 'doordash' ? 'DoorDash' : 'Unknown';
- console.log(`✅ Extracted ${platform} CSV from ZIP: ${extractedFile.path}`);
- } catch (error) {
- if (error instanceof ValidationError) {
- return res.status(400).json({
- error: error.message,
- hint: 'Make sure you uploaded the complete Uber Eats or DoorDash data export ZIP file'
- });
+ if (isZip) {
+ console.log(`📦 Processing ZIP file: ${fileName}`);
+
+ // Extract CSV from ZIP (Streaming from disk, with zip bomb protection)
+ // No need to call validateZipFile separately as extractCSV handles it
+ try {
+ const extractedFile = await zipExtractor.extractCSV(filePath);
+ csvBuffer = extractedFile.content;
+ fileName = extractedFile.filename;
+
+ const platform = extractedFile.platform === 'uber' ? 'Uber Eats' :
+ extractedFile.platform === 'doordash' ? 'DoorDash' : 'Unknown';
+ console.log(`✅ Extracted ${platform} CSV from ZIP: ${extractedFile.path}`);
+ } catch (error: any) {
+ if (error instanceof ValidationError) {
+ return res.status(400).json({
+ error: error.message,
+ hint: 'Make sure you uploaded the complete Uber Eats or DoorDash data export ZIP file'
+ });
+ }
+ throw error;
}
- throw error;
+ } else {
+ console.log(`📄 Processing CSV file: ${fileName}`);
+ // For CSV files, we read from disk
+ csvBuffer = await fs.promises.readFile(filePath);
}
- } else {
- console.log(`📄 Processing CSV file: ${fileName}`);
- csvBuffer = req.file.buffer;
- }
- // Validate CSV format (auto-detects Uber Eats or DoorDash)
- const validation = csvImportService.validateCsvFormat(csvBuffer);
- if (!validation.valid) {
- const format = csvImportService.detectCsvFormat(csvBuffer);
- const platformHint = format === 'unknown'
- ? 'Uber Eats or DoorDash'
- : format === 'uber'
- ? 'Uber Eats'
- : 'DoorDash';
-
- return res.status(400).json({
- error: 'Invalid CSV format',
- details: validation.errors,
- hint: `Please download a fresh export from ${platformHint}`
- });
- }
+ // Validate CSV format (auto-detects Uber Eats or DoorDash)
+ const validation = csvImportService.validateCsvFormat(csvBuffer);
+ if (!validation.valid) {
+ const format = csvImportService.detectCsvFormat(csvBuffer);
+ const platformHint = format === 'unknown'
+ ? 'Uber Eats or DoorDash'
+ : format === 'uber'
+ ? 'Uber Eats'
+ : 'DoorDash';
- // Process CSV synchronously
- const importResult = await csvImportService.parseCsvFile(csvBuffer, userId);
-
- // Check if we have any valid receipts to import
- if (importResult.receipts.length === 0) {
- return res.status(400).json({
- error: 'No valid orders found in file',
- details: importResult.errors.length > 0 ? importResult.errors : ['File contains no valid order data'],
- hint: 'Please ensure your data export includes completed orders. If you just placed an order, wait a few minutes for it to appear in your data export.'
+ return res.status(400).json({
+ error: 'Invalid CSV format',
+ details: validation.errors,
+ hint: `Please download a fresh export from ${platformHint}`
+ });
+ }
+
+ // Process CSV synchronously (in-memory is fine for the CSV content itself if < 50MB)
+ // Future improvement: Stream CSV parsing directly to DB to avoid large buffer in RAM
+ const importResult = await csvImportService.parseCsvFile(csvBuffer, userId);
+
+ // Check if we have any valid receipts to import
+ if (importResult.receipts.length === 0) {
+ return res.status(400).json({
+ error: 'No valid orders found in file',
+ details: importResult.errors.length > 0 ? importResult.errors : ['File contains no valid order data'],
+ hint: 'Please ensure your data export includes completed orders. If you just placed an order, wait a few minutes for it to appear in your data export.'
+ });
+ }
+
+ if (importResult.success && importResult.receipts.length > 0) {
+ await csvImportService.importReceipts(importResult.receipts, userId);
+
+ // Invalidate cached analytics since user data changed
+ await cacheService.invalidateAllUserCaches(userId);
+
+ console.log(`✅ Imported ${importResult.totalReceipts} receipts for user ${userId}`);
+ }
+
+ res.json({
+ message: isZip
+ ? 'ZIP file processed and receipts imported successfully'
+ : 'CSV imported successfully',
+ importedCount: importResult.totalReceipts,
+ totalAmount: importResult.totalAmount,
+ fileType: isZip ? 'zip' : 'csv'
});
- }
-
- if (importResult.success && importResult.receipts.length > 0) {
- await csvImportService.importReceipts(importResult.receipts, userId);
-
- // Invalidate cached analytics since user data changed
- await cacheService.invalidateAllUserCaches(userId);
-
- console.log(`✅ Imported ${importResult.totalReceipts} receipts for user ${userId}`);
- }
-
- res.json({
- message: isZip
- ? 'ZIP file processed and receipts imported successfully'
- : 'CSV imported successfully',
- importedCount: importResult.totalReceipts,
- totalAmount: importResult.totalAmount,
- fileType: isZip ? 'zip' : 'csv'
- });
-
- } catch (error) {
- console.error('File import error:', error);
-
- if (error instanceof ValidationError) {
- return res.status(400).json({
- error: error.message
+
+ } catch (error) {
+ console.error('File import error:', error);
+
+ if (error instanceof ValidationError) {
+ return res.status(400).json({
+ error: error.message
+ });
+ }
+
+ res.status(500).json({
+ error: 'Failed to import file',
+ details: error instanceof Error ? error.message : 'Unknown error'
});
+ } finally {
+ // Critical: Clean up temp file
+ if (req.file && req.file.path) {
+ fs.unlink(req.file.path, (err) => {
+ if (err) console.error('Failed to delete temp file:', err);
+ });
+ }
}
-
- res.status(500).json({
- error: 'Failed to import file',
- details: error instanceof Error ? error.message : 'Unknown error'
- });
- }
-});
+ });
export default router;
\ No newline at end of file
diff --git a/src/services/AuthService.ts b/src/services/AuthService.ts
index 43597e1..6af2a35 100644
--- a/src/services/AuthService.ts
+++ b/src/services/AuthService.ts
@@ -15,6 +15,7 @@ import { v4 as uuidv4 } from 'uuid';
import { OAuth2Client } from 'google-auth-library';
import { OAuthRepository } from './data/OAuthRepository';
import appleSignin from 'apple-signin-auth';
+import { redisConfig } from '../config/redis';
// ... imports
@@ -134,7 +135,7 @@ export class AuthService {
// Verify the identity token with Apple
// Support multiple client IDs: production (com.snacktrack.mobile) and Expo Go (host.exp.Exponent)
let appleClientIds: string[] = [];
-
+
if (process.env.APPLE_CLIENT_IDS) {
// Use explicit list if provided
appleClientIds = process.env.APPLE_CLIENT_IDS.split(',').map(id => id.trim()).filter(Boolean);
@@ -164,7 +165,7 @@ export class AuthService {
}
const appleUserId = appleData.sub; // Apple's unique user identifier
-
+
// Apple only provides email on first sign-in or if available in token
let email = appleData.email || userData?.email;
@@ -175,7 +176,7 @@ export class AuthService {
if (oauthAccount) {
// Existing user - retrieve their account
user = await this.userRepository.findById(oauthAccount.userId);
-
+
// Use cached email from oauth_account if not provided in current token
if (!email && oauthAccount.email) {
email = oauthAccount.email;
@@ -255,22 +256,24 @@ export class AuthService {
type: 'access'
};
- const refreshPayload: TokenPayload = {
- userId: user.id,
- email: user.email,
- type: 'refresh'
- };
-
const accessToken = jwt.sign(
accessPayload,
authConfig.getJWTSecret(),
{ expiresIn: '15m' } // 15 minutes for access token
);
+ const refreshJti = uuidv4();
+ const refreshPayload: TokenPayload = {
+ userId: user.id,
+ email: user.email,
+ type: 'refresh',
+ jti: refreshJti
+ };
+
const refreshToken = jwt.sign(
refreshPayload,
authConfig.getRefreshSecret(),
- { expiresIn: '7d' } // 7 days for refresh token
+ { expiresIn: '7d' }
);
return { accessToken, refreshToken };
@@ -322,6 +325,47 @@ export class AuthService {
}
}
+ /**
+ * Verified refresh token and check blacklist
+ */
+ async verifyRefreshTokenWithBlacklist(token: string): Promise {
+ const decoded = this.verifyRefreshToken(token);
+
+ // Check blacklist if Redis is enabled
+ if (decoded.jti && redisConfig.isAvailable()) {
+ const isBlacklisted = await redisConfig.exists(`blacklist:refresh:${decoded.jti}`);
+ if (isBlacklisted) {
+ throw new AuthenticationError('Refresh token has been revoked');
+ }
+ }
+
+ return decoded;
+ }
+
+ /**
+ * Revoke a refresh token by adding its JTI to the blacklist
+ */
+ async revokeRefreshToken(token: string): Promise {
+ try {
+ // Decode without verifying signature first to get payload (we want to revoke even if expired/invalid signature technically)
+ // BUT for security, we should probably verify it's signed by us before blacklisting random strings.
+ // However, if we can't verify it, we can't trust the JTI either.
+ // So let's use verifyRefreshToken.
+ const decoded = this.verifyRefreshToken(token);
+
+ if (decoded.jti && decoded.exp && redisConfig.isAvailable()) {
+ const ttl = decoded.exp - Math.floor(Date.now() / 1000);
+ if (ttl > 0) {
+ await redisConfig.set(`blacklist:refresh:${decoded.jti}`, 'revoked', ttl);
+ console.log(`🚫 Revoked refresh token for user ${decoded.userId} (JTI: ${decoded.jti})`);
+ }
+ }
+ } catch (error) {
+ // Ignore errors during revocation (e.g. token already expired)
+ console.warn('Error revoking token (might be already expired):', error);
+ }
+ }
+
/**
* Register a new user with email and password
*/
@@ -431,8 +475,8 @@ export class AuthService {
* Refresh access token using a valid refresh token
*/
async refreshAccessToken(refreshToken: string): Promise {
- // Verify refresh token
- const decoded = this.verifyRefreshToken(refreshToken);
+ // Verify refresh token and check blacklist
+ const decoded = await this.verifyRefreshTokenWithBlacklist(refreshToken);
// Find user
const user = await this.userRepository.findById(decoded.userId);
diff --git a/src/services/import/ZipExtractor.ts b/src/services/import/ZipExtractor.ts
index 84ed5b2..80b6091 100644
--- a/src/services/import/ZipExtractor.ts
+++ b/src/services/import/ZipExtractor.ts
@@ -4,9 +4,18 @@
* Extracts CSV files from ZIP archives for multiple platforms:
* - Uber Eats: [path]/Uber Data/Eats/user_orders-0.csv
* - DoorDash: [path]/data_archive/consumer_order_details.csv
+ *
+ * Security measures implemented:
+ * - Zip Slip protection (path traversal prevention)
+ * - Zip Bomb protection (size, file count, compression ratio limits)
+ * - Symlink detection and rejection
+ * - Sanitized error messages
+ * - Basic CSV content validation
*/
-import AdmZip from 'adm-zip';
+import yauzl from 'yauzl';
+import fs from 'fs';
+import path from 'path';
import { ValidationError } from '../../middleware/errorHandler';
export interface ExtractedFile {
@@ -19,285 +28,300 @@ export interface ExtractedFile {
export type Platform = 'uber' | 'doordash' | 'unknown';
export class ZipExtractor {
+ // Configurable limits
+ private static MAX_UNCOMPRESSED_SIZE = 100 * 1024 * 1024; // 100MB limit for uncompressed data
+ private static MAX_FILE_COUNT = 1000; // Limit number of files in zip to scan
+ private static MAX_COMPRESSION_RATIO = 100; // Maximum allowed compression ratio (uncompressed/compressed)
+
/**
- * Detect platform from ZIP file structure
+ * Validate file path to prevent Zip Slip attacks
+ * Checks for path traversal, absolute paths, and encoded variants
*/
- detectPlatform(zipBuffer: Buffer): Platform {
+ private isPathSafe(fileName: string): boolean {
+ // Normalize the path to resolve any . or .. segments
+ const normalizedPath = path.normalize(fileName);
+
+ // Check for path traversal attempts
+ if (normalizedPath.startsWith('..') || normalizedPath.includes('/..') || normalizedPath.includes('\\..')) {
+ return false;
+ }
+
+ // Check for absolute paths (Unix and Windows)
+ if (path.isAbsolute(normalizedPath) || /^[a-zA-Z]:/.test(normalizedPath)) {
+ return false;
+ }
+
+ // Check for URL-encoded path traversal attempts
+ let decodedPath: string;
try {
- const zip = new AdmZip(zipBuffer);
- const zipEntries = zip.getEntries();
-
- // Check for DoorDash pattern: consumer_order_details.csv or consumer_profile_details.csv
- // (profile_details is sent when account has no orders yet)
- const doorDashEntry = zipEntries.find(entry => {
- const path = entry.entryName.toLowerCase();
- return (
- (path.includes('consumer_order_details') || path.includes('consumer_profile_details')) &&
- path.endsWith('.csv') &&
- !entry.isDirectory
- );
- });
-
- if (doorDashEntry) {
- return 'doordash';
- }
-
- // Check for Uber pattern: Uber Data/Eats/user_orders-0.csv
- const uberEntry = zipEntries.find(entry => {
- const path = entry.entryName.toLowerCase();
- return (
- (path.includes('uber data/eats/') && path.endsWith('user_orders-0.csv')) ||
- (path.includes('eats') && path.includes('user_orders') && path.endsWith('.csv'))
- ) && !entry.isDirectory;
- });
-
- if (uberEntry) {
- return 'uber';
- }
-
- // Check for Uber ZIP structure even if CSV doesn't exist (account with no orders)
- // Look for "Uber Data" folder structure
- const hasUberStructure = zipEntries.some(entry => {
- const path = entry.entryName.toLowerCase();
- return path.includes('uber data') || path.includes('uber_data');
- });
-
- if (hasUberStructure) {
- return 'uber';
- }
-
- return 'unknown';
- } catch (error) {
- return 'unknown';
+ decodedPath = decodeURIComponent(fileName);
+ } catch {
+ return false;
}
+ if (decodedPath.includes('..') || decodedPath !== fileName && path.normalize(decodedPath).startsWith('..')) {
+ return false;
+ }
+
+ // Check for backslash variants (Windows-style paths in Unix context)
+ if (fileName.includes('..\\') || fileName.includes('\\..')) {
+ return false;
+ }
+
+ return true;
}
/**
- * Extract CSV from ZIP based on detected platform
+ * Check if entry is a symbolic link based on external file attributes
+ * Unix symlinks have mode 0120000 (octal) in the high 16 bits
*/
- extractCSV(zipBuffer: Buffer): ExtractedFile {
- const platform = this.detectPlatform(zipBuffer);
-
- if (platform === 'doordash') {
- return this.extractDoorDashCSV(zipBuffer);
- } else if (platform === 'uber') {
- return this.extractUberEatsCSV(zipBuffer);
- } else {
- throw new ValidationError(
- 'Could not detect platform. Expected Uber Eats or DoorDash data export ZIP file.',
- 'file'
- );
- }
+ private isSymlink(entry: yauzl.Entry): boolean {
+ // External file attributes: high 16 bits contain Unix mode
+ // Symlink mode is 0120000 (octal) = 40960 (decimal)
+ const unixMode = (entry.externalFileAttributes >> 16) & 0xFFFF;
+ const S_IFLNK = 0o120000; // Symbolic link file type
+ return (unixMode & 0o170000) === S_IFLNK;
}
/**
- * Extract CSV from DoorDash data ZIP
- *
- * Searches for either:
- * - consumer_order_details.csv (contains order data)
- * - consumer_profile_details.csv (profile data, may not have orders yet)
- *
- * Both are valid DoorDash exports. If profile_details doesn't have order data,
- * the CSV parser will detect this and return an appropriate error.
+ * Check compression ratio to detect potential zip bombs
*/
- extractDoorDashCSV(zipBuffer: Buffer): ExtractedFile {
- try {
- const zip = new AdmZip(zipBuffer);
- const zipEntries = zip.getEntries();
-
- // Search for either consumer_order_details.csv or consumer_profile_details.csv
- // (can be in data_archive/ or at root)
- const csvEntry = zipEntries.find(entry => {
- const path = entry.entryName.toLowerCase();
-
- // Match pattern: consumer_order_details.csv or consumer_profile_details.csv
- return (
- (path.includes('consumer_order_details') || path.includes('consumer_profile_details')) &&
- path.endsWith('.csv') &&
- !entry.isDirectory
- );
- });
-
- if (!csvEntry) {
- throw new ValidationError(
- 'Could not find DoorDash CSV in ZIP file. Expected file: consumer_order_details.csv or consumer_profile_details.csv',
- 'file'
- );
- }
-
- const content = csvEntry.getData();
-
- // Validate content is not empty
- if (!content || content.length === 0) {
- throw new ValidationError('CSV file is empty', 'file');
- }
-
- return {
- content,
- filename: csvEntry.entryName.split('/').pop() || 'consumer_order_details.csv',
- path: csvEntry.entryName,
- platform: 'doordash'
- };
-
- } catch (error: any) {
- if (error instanceof ValidationError) {
- throw error;
- }
-
- // Handle ZIP corruption or invalid format
- if (error.message?.includes('invalid') || error.message?.includes('corrupt')) {
- throw new ValidationError('ZIP file is corrupted or invalid', 'file');
- }
-
- throw new ValidationError(
- `Failed to extract ZIP file: ${error.message}`,
- 'file'
- );
+ private isCompressionRatioSafe(entry: yauzl.Entry): boolean {
+ // If compressed size is 0 or very small, be cautious
+ if (entry.compressedSize <= 0) {
+ // Allow if uncompressed size is also small (empty or near-empty files)
+ return entry.uncompressedSize <= 1024;
}
+
+ const ratio = entry.uncompressedSize / entry.compressedSize;
+ return ratio <= ZipExtractor.MAX_COMPRESSION_RATIO;
}
/**
- * Extract user_orders-0.csv from Uber data ZIP
- *
- * Searches for the file in the Uber data structure:
- * Uber Data Request {hash}/Uber Data/Eats/user_orders-0.csv
+ * Basic validation that content appears to be CSV
+ * Checks for printable ASCII/UTF-8 and common CSV patterns
*/
- extractUberEatsCSV(zipBuffer: Buffer): ExtractedFile {
- try {
- const zip = new AdmZip(zipBuffer);
- const zipEntries = zip.getEntries();
-
- // Search for user_orders-0.csv in Uber Data/Eats/ directory
- const csvEntry = zipEntries.find(entry => {
- const path = entry.entryName.toLowerCase();
-
- // Match pattern: [any path]/uber data/eats/user_orders-0.csv
- return (
- path.includes('uber data/eats/') &&
- path.endsWith('user_orders-0.csv') &&
- !entry.isDirectory
- );
- });
-
- if (!csvEntry) {
- // Try alternative patterns
- const alternativeCsvEntry = zipEntries.find(entry => {
- const path = entry.entryName.toLowerCase();
- return (
- path.includes('eats') &&
- path.includes('user_orders') &&
- path.endsWith('.csv') &&
- !entry.isDirectory
- );
- });
-
- if (alternativeCsvEntry) {
- const content = alternativeCsvEntry.getData();
- return {
- content,
- filename: alternativeCsvEntry.entryName.split('/').pop() || 'user_orders.csv',
- path: alternativeCsvEntry.entryName,
- platform: 'uber'
- };
- }
-
- // No CSV found - this is a valid Uber ZIP but account has no orders
- // Create an empty CSV with headers so the parser can handle "no orders" case
- const emptyCsv = 'Restaurant_Name,Request_Time_Local,Order_Status,Item_Name,Item_quantity,Item_Price,Order_Price\n';
- return {
- content: Buffer.from(emptyCsv),
- filename: 'user_orders-0.csv',
- path: 'Uber Data/Eats/user_orders-0.csv',
- platform: 'uber'
- };
- }
-
- const content = csvEntry.getData();
-
- // Validate content is not empty
- if (!content || content.length === 0) {
- throw new ValidationError('CSV file is empty', 'file');
- }
-
- return {
- content,
- filename: csvEntry.entryName.split('/').pop() || 'user_orders-0.csv',
- path: csvEntry.entryName,
- platform: 'uber'
- };
-
- } catch (error: any) {
- if (error instanceof ValidationError) {
- throw error;
- }
-
- // Handle ZIP corruption or invalid format
- if (error.message?.includes('invalid') || error.message?.includes('corrupt')) {
- throw new ValidationError('ZIP file is corrupted or invalid', 'file');
- }
-
- throw new ValidationError(
- `Failed to extract ZIP file: ${error.message}`,
- 'file'
- );
+ private isValidCSVContent(content: Buffer): boolean {
+ if (content.length === 0) {
+ return false;
+ }
+
+ // Check first 1KB for basic CSV characteristics
+ const sample = content.slice(0, 1024).toString('utf-8');
+
+ // Check for binary content (non-printable characters except common whitespace)
+ const nonPrintableRegex = /[\x00-\x08\x0B\x0C\x0E-\x1F]/;
+ if (nonPrintableRegex.test(sample)) {
+ return false;
}
+
+ // Check for at least one comma or newline (basic CSV structure)
+ if (!sample.includes(',') && !sample.includes('\n')) {
+ return false;
+ }
+
+ return true;
}
/**
- * Check if a buffer is a ZIP file based on magic bytes
+ * Sanitize filename for error messages to prevent information disclosure
*/
- isZipFile(buffer: Buffer): boolean {
- // ZIP files start with 'PK' (0x504B)
- if (buffer.length < 4) {
- return false;
+ private sanitizeFilenameForError(fileName: string): string {
+ // Only show the base filename, not the full path
+ const baseName = path.basename(fileName);
+ // Truncate if too long
+ if (baseName.length > 50) {
+ return baseName.substring(0, 47) + '...';
}
+ return baseName;
+ }
+
+ /**
+ * Extract CSV from ZIP based on detected platform (Streaming)
+ * Prevents Zip Bombs by checking uncompressed size.
+ */
+ async extractCSV(filePath: string): Promise {
+ return new Promise((resolve, reject) => {
+ // open with lazyEntries: true to read sequentially
+ yauzl.open(filePath, { lazyEntries: true }, (err, zipfile) => {
+ if (err) {
+ return reject(new ValidationError('Failed to open ZIP file', 'file'));
+ }
+ if (!zipfile) {
+ return reject(new ValidationError('Failed to open ZIP file', 'file'));
+ }
+
+ let foundEntry: yauzl.Entry | null = null;
+ let platform: Platform = 'unknown';
+ let entriesCount = 0;
+
+ zipfile.readEntry();
+
+ zipfile.on('entry', (entry: yauzl.Entry) => {
+ entriesCount++;
+ if (entriesCount > ZipExtractor.MAX_FILE_COUNT) {
+ zipfile.close();
+ return reject(new ValidationError('ZIP file contains too many files', 'file'));
+ }
+
+ // Security: Check for malicious paths (Zip Slip) - comprehensive check
+ if (!this.isPathSafe(entry.fileName)) {
+ zipfile.close();
+ return reject(new ValidationError('Invalid file path detected in ZIP', 'file'));
+ }
+
+ // Security: Check for symbolic links
+ if (this.isSymlink(entry)) {
+ zipfile.close();
+ return reject(new ValidationError('Symbolic links are not allowed in ZIP', 'file'));
+ }
+
+ const entryPath = entry.fileName.toLowerCase();
+
+ // Logic to find the correct file
+ let isMatch = false;
+
+ // DoorDash match
+ if (
+ (entryPath.includes('consumer_order_details') || entryPath.includes('consumer_profile_details')) &&
+ entryPath.endsWith('.csv') &&
+ !entryPath.endsWith('/')
+ ) {
+ platform = 'doordash';
+ isMatch = true;
+ }
+ // Uber match
+ else if (
+ ((entryPath.includes('uber data/eats/') && entryPath.endsWith('user_orders-0.csv')) ||
+ (entryPath.includes('eats') && entryPath.includes('user_orders') && entryPath.endsWith('.csv'))) &&
+ !entryPath.endsWith('/')
+ ) {
+ platform = 'uber';
+ isMatch = true;
+ }
+
+ if (isMatch) {
+ foundEntry = entry;
+
+ // Zip Bomb Check: Uncompressed size
+ if (entry.uncompressedSize > ZipExtractor.MAX_UNCOMPRESSED_SIZE) {
+ zipfile.close();
+ return reject(new ValidationError('File exceeds maximum allowed size (250MB)', 'file'));
+ }
+
+ // Zip Bomb Check: Compression ratio
+ if (!this.isCompressionRatioSafe(entry)) {
+ zipfile.close();
+ return reject(new ValidationError('Suspicious compression ratio detected', 'file'));
+ }
+
+ // Extract this entry
+ zipfile.openReadStream(entry, (err, readStream) => {
+ if (err || !readStream) {
+ return reject(new ValidationError('Failed to read file from ZIP', 'file'));
+ }
+
+ const chunks: Buffer[] = [];
+ let size = 0;
+
+ readStream.on('data', (chunk: Buffer) => {
+ size += chunk.length;
+ if (size > ZipExtractor.MAX_UNCOMPRESSED_SIZE) {
+ readStream.destroy();
+ zipfile.close();
+ return reject(new ValidationError('Extracted file exceeds size limit', 'file'));
+ }
+ chunks.push(chunk);
+ });
+
+ readStream.on('end', () => {
+ zipfile.close();
+ const content = Buffer.concat(chunks);
+
+ // If the file is empty (e.g. Uber sometimes), we handle it
+ if (content.length === 0 && platform === 'uber') {
+ // Return empty structure for Uber to handle "no orders" case gracefully
+ const emptyCsv = 'Restaurant_Name,Request_Time_Local,Order_Status,Item_Name,Item_quantity,Item_Price,Order_Price\n';
+ return resolve({
+ content: Buffer.from(emptyCsv),
+ filename: 'user_orders-0.csv',
+ path: 'Uber Data/Eats/user_orders-0.csv',
+ platform: 'uber'
+ });
+ }
+
+ if (content.length === 0) {
+ return reject(new ValidationError('CSV file is empty', 'file'));
+ }
+
+ // Security: Validate CSV content
+ if (!this.isValidCSVContent(content)) {
+ return reject(new ValidationError('File does not appear to be valid CSV content', 'file'));
+ }
+
+ resolve({
+ content,
+ filename: this.sanitizeFilenameForError(entry.fileName),
+ path: entry.fileName,
+ platform: platform !== 'unknown' ? platform : undefined
+ });
+ });
+
+ readStream.on('error', () => {
+ reject(new ValidationError('Error reading file from ZIP', 'file'));
+ });
+ });
+ } else {
+ // Continue reading next entry
+ zipfile.readEntry();
+ }
+ });
- return buffer[0] === 0x50 && buffer[1] === 0x4B;
+ zipfile.on('end', () => {
+ if (!foundEntry) {
+ // Fallback for Uber: Check if structure exists but file is missing (no orders)
+ // This is tricky with sequential scanning, but if we reached the end without finding a match,
+ // then we assume it's NOT a valid export.
+ // (Logic for emptiness check in Uber zip was previously detecting folder existence,
+ // but here we just fail if we don't find the file).
+ return reject(new ValidationError('Could not find Uber Eats or DoorDash CSV in ZIP file', 'file'));
+ }
+ });
+
+ zipfile.on('error', (err) => {
+ reject(new ValidationError('ZIP file corrupted or unreadable', 'file'));
+ });
+ });
+ });
}
/**
- * Get list of all entries in ZIP for debugging
+ * Check if a file is a ZIP based on magic bytes (Reads first 4 bytes from disk)
*/
- listZipContents(zipBuffer: Buffer): string[] {
+ async isZipFile(filePath: string): Promise {
try {
- const zip = new AdmZip(zipBuffer);
- const entries = zip.getEntries();
- return entries
- .filter(entry => !entry.isDirectory)
- .map(entry => entry.entryName);
- } catch (error) {
- return [];
+ const buffer = Buffer.alloc(4);
+ const fd = await fs.promises.open(filePath, 'r');
+ await fd.read(buffer, 0, 4, 0);
+ await fd.close();
+ return buffer[0] === 0x50 && buffer[1] === 0x4B; // PK
+ } catch (e) {
+ return false;
}
}
/**
- * Validate ZIP structure and size
+ * Validate ZIP structure
+ * (Now mostly checking if it opens, since Multer handles upload size)
*/
- validateZipFile(buffer: Buffer, maxSizeMB: number = 50): void {
- // Check if it's a ZIP file
- if (!this.isZipFile(buffer)) {
- throw new ValidationError(
- 'File is not a valid ZIP archive. Please upload a ZIP file from Uber Eats or DoorDash.',
- 'file'
- );
- }
-
- // Check size (in MB)
- const sizeMB = buffer.length / (1024 * 1024);
- if (sizeMB > maxSizeMB) {
- throw new ValidationError(
- `File size (${sizeMB.toFixed(1)}MB) exceeds maximum allowed size (${maxSizeMB}MB)`,
- 'file'
- );
- }
-
- // Try to read the ZIP
- try {
- const zip = new AdmZip(buffer);
- zip.getEntries(); // This will throw if ZIP is corrupted
- } catch (error) {
- throw new ValidationError('ZIP file is corrupted or invalid', 'file');
- }
+ async validateZipFile(filePath: string): Promise {
+ return new Promise((resolve, reject) => {
+ yauzl.open(filePath, { lazyEntries: true }, (err, zipfile) => {
+ if (err || !zipfile) {
+ return reject(new ValidationError('Invalid ZIP file or corrupted', 'file'));
+ }
+ zipfile.close();
+ resolve();
+ });
+ });
}
}
-