Queue concurrent uploads instead of rejecting with 503
Server: replace hard 503 rejection with a bounded queue (50 slots, 2min timeout) so requests wait for a processing slot instead of failing. Client: upload files sequentially instead of via Promise.all, and retry with exponential backoff on 503/504 responses.
This commit is contained in:
parent
4cbc3163de
commit
b52d6b1a6f
2 changed files with 66 additions and 21 deletions
|
|
@ -730,29 +730,41 @@ export const useGame = (defaultGameConfig?: GameConfig) => {
|
||||||
};
|
};
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
const uploadDocument = async (file: File, useOcr: boolean = false): Promise<ProcessedDocument> => {
|
const uploadDocument = async (file: File, useOcr: boolean = false, maxRetries = 5): Promise<ProcessedDocument> => {
|
||||||
if (!auth.user?.access_token) {
|
if (!auth.user?.access_token) {
|
||||||
throw new Error('Authentication required to upload documents');
|
throw new Error('Authentication required to upload documents');
|
||||||
}
|
}
|
||||||
|
|
||||||
const formData = new FormData();
|
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
||||||
formData.append('document', file);
|
const formData = new FormData();
|
||||||
formData.append('useOcr', String(useOcr));
|
formData.append('document', file);
|
||||||
|
formData.append('useOcr', String(useOcr));
|
||||||
const response = await fetch(`${BACKEND_URL}/api/upload`, {
|
|
||||||
method: 'POST',
|
const response = await fetch(`${BACKEND_URL}/api/upload`, {
|
||||||
headers: {
|
method: 'POST',
|
||||||
'Authorization': `Bearer ${auth.user.access_token}`,
|
headers: {
|
||||||
},
|
'Authorization': `Bearer ${auth.user.access_token}`,
|
||||||
body: formData
|
},
|
||||||
});
|
body: formData
|
||||||
|
});
|
||||||
if (!response.ok) {
|
|
||||||
const error = await response.json();
|
if (response.status === 503 || response.status === 504) {
|
||||||
throw new Error(error.error || 'Failed to upload document');
|
if (attempt < maxRetries) {
|
||||||
|
const delay = Math.min(1000 * 2 ** attempt, 15000);
|
||||||
|
await new Promise(r => setTimeout(r, delay));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const error = await response.json();
|
||||||
|
throw new Error(error.error || 'Failed to upload document');
|
||||||
|
}
|
||||||
|
|
||||||
|
return response.json();
|
||||||
}
|
}
|
||||||
|
|
||||||
return response.json();
|
throw new Error('Failed to upload document after multiple retries');
|
||||||
};
|
};
|
||||||
|
|
||||||
const startQuizGen = async (options: {
|
const startQuizGen = async (options: {
|
||||||
|
|
@ -773,9 +785,10 @@ export const useGame = (defaultGameConfig?: GameConfig) => {
|
||||||
|
|
||||||
let documents: ProcessedDocument[] | undefined;
|
let documents: ProcessedDocument[] | undefined;
|
||||||
if (options.files && options.files.length > 0) {
|
if (options.files && options.files.length > 0) {
|
||||||
documents = await Promise.all(
|
documents = [];
|
||||||
options.files.map(file => uploadDocument(file, options.useOcr))
|
for (const file of options.files) {
|
||||||
);
|
documents.push(await uploadDocument(file, options.useOcr));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const generateOptions: GenerateQuizOptions = {
|
const generateOptions: GenerateQuizOptions = {
|
||||||
|
|
|
||||||
|
|
@ -44,6 +44,31 @@ function tieredUploadLimiter(req: AuthenticatedRequest, res: Response, next: Nex
|
||||||
|
|
||||||
let activeUploads = 0;
|
let activeUploads = 0;
|
||||||
const MAX_CONCURRENT_UPLOADS = 5;
|
const MAX_CONCURRENT_UPLOADS = 5;
|
||||||
|
const MAX_QUEUE_SIZE = 50;
|
||||||
|
const QUEUE_TIMEOUT_MS = 120_000; // 2 minutes
|
||||||
|
|
||||||
|
const waitForSlot = (): Promise<void> => {
|
||||||
|
if (activeUploads < MAX_CONCURRENT_UPLOADS) return Promise.resolve();
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
let wrappedResolve: () => void;
|
||||||
|
const timeout = setTimeout(() => {
|
||||||
|
const idx = uploadQueue.indexOf(wrappedResolve);
|
||||||
|
if (idx !== -1) uploadQueue.splice(idx, 1);
|
||||||
|
reject(new Error('Upload queue timeout'));
|
||||||
|
}, QUEUE_TIMEOUT_MS);
|
||||||
|
wrappedResolve = () => { clearTimeout(timeout); resolve(); };
|
||||||
|
uploadQueue.push(wrappedResolve);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const uploadQueue: Array<() => void> = [];
|
||||||
|
|
||||||
|
const drainQueue = () => {
|
||||||
|
while (uploadQueue.length > 0 && activeUploads < MAX_CONCURRENT_UPLOADS) {
|
||||||
|
const next = uploadQueue.shift();
|
||||||
|
if (next) next();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
router.use(requireAuth);
|
router.use(requireAuth);
|
||||||
router.use(tieredUploadLimiter);
|
router.use(tieredUploadLimiter);
|
||||||
|
|
@ -66,10 +91,16 @@ const upload = multer({
|
||||||
});
|
});
|
||||||
|
|
||||||
router.post('/', upload.single('document'), async (req: AuthenticatedRequest, res) => {
|
router.post('/', upload.single('document'), async (req: AuthenticatedRequest, res) => {
|
||||||
if (activeUploads >= MAX_CONCURRENT_UPLOADS) {
|
if (activeUploads >= MAX_CONCURRENT_UPLOADS && uploadQueue.length >= MAX_QUEUE_SIZE) {
|
||||||
return res.status(503).json({ error: 'Server busy processing uploads. Please try again shortly.' });
|
return res.status(503).json({ error: 'Server busy processing uploads. Please try again shortly.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await waitForSlot();
|
||||||
|
} catch {
|
||||||
|
return res.status(504).json({ error: 'Upload timed out waiting in queue. Please try again.' });
|
||||||
|
}
|
||||||
|
|
||||||
activeUploads++;
|
activeUploads++;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|
@ -114,6 +145,7 @@ router.post('/', upload.single('document'), async (req: AuthenticatedRequest, re
|
||||||
});
|
});
|
||||||
} finally {
|
} finally {
|
||||||
activeUploads--;
|
activeUploads--;
|
||||||
|
drainQueue();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue