Skip to content

Commit fe77892

Browse files
updated the package
1 parent 31a08be commit fe77892

File tree

4 files changed

+50
-79
lines changed

4 files changed

+50
-79
lines changed

example/package-lock.json

Lines changed: 5 additions & 4 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

example/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
"description": "",
1515
"dependencies": {
1616
"express": "^5.1.0",
17-
"lib-judge": "^1.2.7",
17+
"lib-judge": "^1.2.11",
1818
"uid-safe": "^2.1.5"
1919
}
2020
}

worker-example/index.js

Lines changed: 23 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -143,7 +143,7 @@ function runTestcase(
143143
});
144144
}
145145

146-
async function processJob(ques_name, code, language, testcases) {
146+
async function processJob(ques_name, code, language, testcases, batchName) {
147147
const extension =
148148
language === "cpp" ? "cpp" : language === "java" ? "java" : "py";
149149
const fileName = `${ques_name}.${extension}`;
@@ -170,51 +170,35 @@ async function processJob(ques_name, code, language, testcases) {
170170
)
171171
)
172172
);
173-
console.log(results, "this is results");
174173

175-
// ✅ Added log before pushing result to Redis
176-
console.log(`[Redis Push] 🟢 Storing job:${ques_name}:result`);
177-
178-
await redis_server.rPush(
179-
`results_queue:${ques_name}`,
180-
JSON.stringify(results)
181-
);
174+
// Split large result into chunks of 1000 entries
175+
const chunkSize = 1000;
176+
for (let i = 0; i < results.length; i += chunkSize) {
177+
const chunk = results.slice(i, i + chunkSize);
178+
await redis_server.rPush(`results_queue:${ques_name}`, JSON.stringify(chunk));
179+
}
182180
await redis_server.expire(`results_queue:${ques_name}`, 300);
183-
await redis_server.hIncrBy(
184-
`job:${ques_name}:status`,
185-
"completedBatches",
186-
1
187-
);
188-
console.log(`[Worker] 🧮 Incremented completedBatches for ${ques_name}`);
189181

190-
console.log(`[Redis Push] ✅ Result stored successfully`);
191182

192-
console.log(
193-
`[Redis Push] 🟢 Updating job:${ques_name}:status -> completed`
194-
);
195-
await redis_server.hSet(`job:${ques_name}:status`, { state: "completed" });
196-
await redis_server.expire(`job:${ques_name}:status`, 300);
197-
console.log(`[Redis Push] ✅ Status updated successfully`);
183+
await redis_server.hIncrBy(`job:${ques_name}:status`, "completedBatches", 1);
184+
console.log(`[Worker ✅] Batch completed for ${ques_name}`);
185+
198186
} catch (err) {
199-
console.error("Error during job processing:", err);
187+
console.error(`[Worker ❌] Batch failed for ${ques_name}:`, err);
188+
189+
// still increment to prevent job hang
190+
await redis_server.hIncrBy(`job:${ques_name}:status`, "completedBatches", 1);
200191

201-
console.log(`[Redis Push] 🔴 Storing failed state for job:${ques_name}`);
202-
await redis_server.setEx(
203-
`job:${ques_name}:result`,
204-
30,
205-
JSON.stringify([{ error: err.toString() }])
192+
await redis_server.rPush(
193+
`results_queue:${ques_name}`,
194+
JSON.stringify([{ error: err.toString(), crashed: true }])
206195
);
207-
await redis_server.hSet(`job:${ques_name}:status`, { state: "failed" });
208-
console.log(`[Redis Push] ❌ Failure recorded in Redis`);
209196
} finally {
210-
try {
211-
fs.unlinkSync(filePath);
212-
} catch {}
197+
try { fs.unlinkSync(filePath); } catch { }
213198
try {
214199
if (language === "cpp") fs.unlinkSync(execPath);
215-
if (language === "java")
216-
fs.unlinkSync(filePath.replace(".java", ".class"));
217-
} catch {}
200+
if (language === "java") fs.unlinkSync(filePath.replace(".java", ".class"));
201+
} catch { }
218202
}
219203
}
220204

@@ -243,7 +227,8 @@ async function pollForJobs() {
243227
}
244228

245229
// Fetch testcases for this batch
246-
const batchData = await redis_server.lPop(`testcase_queue:${batchName}`);
230+
const batchDataResult = await redis_server.brPop(`testcase_queue:${batchName}`, 0);
231+
const batchData = batchDataResult ? batchDataResult.element : null;
247232
if (!batchData) {
248233
console.warn(`[Worker] ⚠️ No testcases found for batch ${batchName}`);
249234
continue;
@@ -262,7 +247,7 @@ async function pollForJobs() {
262247
);
263248

264249
// Run batch
265-
await processJob(ques_name, code, language, testcases);
250+
await processJob(ques_name, code, language, testcases, batchName);
266251

267252
console.log(`[Worker] ✅ Completed batch for ${ques_name}`);
268253

worker-image/workers/index.js

Lines changed: 21 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -170,51 +170,35 @@ async function processJob(ques_name, code, language, testcases) {
170170
)
171171
)
172172
);
173-
console.log(results, "this is results");
174173

175-
// ✅ Added log before pushing result to Redis
176-
console.log(`[Redis Push] 🟢 Storing job:${ques_name}:result`);
177-
178-
await redis_server.rPush(
179-
`results_queue:${ques_name}`,
180-
JSON.stringify(results)
181-
);
174+
// Split large result into chunks of 1000 entries
175+
const chunkSize = 1000;
176+
for (let i = 0; i < results.length; i += chunkSize) {
177+
const chunk = results.slice(i, i + chunkSize);
178+
await redis_server.rPush(`results_queue:${ques_name}:${batchName}`, JSON.stringify(chunk));
179+
}
182180
await redis_server.expire(`results_queue:${ques_name}`, 300);
183-
await redis_server.hIncrBy(
184-
`job:${ques_name}:status`,
185-
"completedBatches",
186-
1
187-
);
188-
console.log(`[Worker] 🧮 Incremented completedBatches for ${ques_name}`);
189181

190-
console.log(`[Redis Push] ✅ Result stored successfully`);
191182

192-
console.log(
193-
`[Redis Push] 🟢 Updating job:${ques_name}:status -> completed`
194-
);
195-
await redis_server.hSet(`job:${ques_name}:status`, { state: "completed" });
196-
await redis_server.expire(`job:${ques_name}:status`, 300);
197-
console.log(`[Redis Push] ✅ Status updated successfully`);
183+
await redis_server.hIncrBy(`job:${ques_name}:status`, "completedBatches", 1);
184+
console.log(`[Worker ✅] Batch completed for ${ques_name}`);
185+
198186
} catch (err) {
199-
console.error("Error during job processing:", err);
187+
console.error(`[Worker ❌] Batch failed for ${ques_name}:`, err);
188+
189+
// still increment to prevent job hang
190+
await redis_server.hIncrBy(`job:${ques_name}:status`, "completedBatches", 1);
200191

201-
console.log(`[Redis Push] 🔴 Storing failed state for job:${ques_name}`);
202-
await redis_server.setEx(
203-
`job:${ques_name}:result`,
204-
30,
205-
JSON.stringify([{ error: err.toString() }])
192+
await redis_server.rPush(
193+
`results_queue:${ques_name}`,
194+
JSON.stringify([{ error: err.toString(), crashed: true }])
206195
);
207-
await redis_server.hSet(`job:${ques_name}:status`, { state: "failed" });
208-
console.log(`[Redis Push] ❌ Failure recorded in Redis`);
209196
} finally {
210-
try {
211-
fs.unlinkSync(filePath);
212-
} catch {}
197+
try { fs.unlinkSync(filePath); } catch { }
213198
try {
214199
if (language === "cpp") fs.unlinkSync(execPath);
215-
if (language === "java")
216-
fs.unlinkSync(filePath.replace(".java", ".class"));
217-
} catch {}
200+
if (language === "java") fs.unlinkSync(filePath.replace(".java", ".class"));
201+
} catch { }
218202
}
219203
}
220204

@@ -243,7 +227,8 @@ async function pollForJobs() {
243227
}
244228

245229
// Fetch testcases for this batch
246-
const batchData = await redis_server.lPop(`testcase_queue:${batchName}`);
230+
const batchDataResult = await redis_server.brPop(`testcase_queue:${batchName}`, 0);
231+
const batchData = batchDataResult ? batchDataResult.element : null;
247232
if (!batchData) {
248233
console.warn(`[Worker] ⚠️ No testcases found for batch ${batchName}`);
249234
continue;

0 commit comments

Comments
 (0)