From 752f74d05ce05c83a04b25146f36c3f5c2df44c8 Mon Sep 17 00:00:00 2001 From: Massiles Ghernaout <749-gm213204@users.noreply.www-apps.univ-lehavre.fr> Date: Mon, 9 Feb 2026 17:49:22 +0100 Subject: [PATCH 1/2] first attempt at adapting the UI for the tester/frontend job seperation --- backend/src/clients.js | 6 +++ backend/src/config.js | 2 + backend/src/routes/cluster.js | 49 ++++++++++++++++++ backend/src/routes/hash.js | 12 ++++- backend/src/routes/tester.js | 7 +++ frontend/src/App.jsx | 97 +++++++++++++++++++---------------- proxy/index.js | 2 +- 7 files changed, 130 insertions(+), 45 deletions(-) diff --git a/backend/src/clients.js b/backend/src/clients.js index 6ea9e8e..986ff5e 100644 --- a/backend/src/clients.js +++ b/backend/src/clients.js @@ -35,6 +35,12 @@ export const redis = isTest async scard() { return 0; }, + async ltrim() { + return "OK"; + }, + async lrange() { + return []; + }, } : new Redis(REDIS_URL); diff --git a/backend/src/config.js b/backend/src/config.js index f4c6dac..70ec2d6 100644 --- a/backend/src/config.js +++ b/backend/src/config.js @@ -11,6 +11,8 @@ export const REDIS_KEYS = { JOBS_RESULTS: "jobs:results", JOBS_STATUS: "jobs:status", JOBS_IN_PROGRESS: "jobs:in_progress", + JOBS_META: "jobs:meta", + JOBS_RECENT: "jobs:recent", }; // Docker diff --git a/backend/src/routes/cluster.js b/backend/src/routes/cluster.js index a188d7e..7395592 100644 --- a/backend/src/routes/cluster.js +++ b/backend/src/routes/cluster.js @@ -53,6 +53,55 @@ router.get("/state", async (req, res) => { } }); +const JOBS_RECENT_LIMIT = 200; + +// GET /cluster/jobs - liste des jobs récents (avec source, statut, résultat) +router.get("/jobs", async (req, res) => { + try { + const ids = await redis.lrange( + REDIS_KEYS.JOBS_RECENT, + 0, + JOBS_RECENT_LIMIT - 1 + ); + const jobs = []; + for (const id of ids) { + const [rawMeta, rawStatus, rawResult] = await Promise.all([ + redis.hget(REDIS_KEYS.JOBS_META, id), + redis.hget(REDIS_KEYS.JOBS_STATUS, id), + redis.hget(REDIS_KEYS.JOBS_RESULTS, id), + ]); + const meta = rawMeta ? JSON.parse(rawMeta) : null; + if (!meta) continue; + const hash = meta.hash; + const source = meta.source ?? "frontend"; + let status = "queued"; + let found; + let plaintext; + let elapsedMs; + if (rawResult) { + const result = JSON.parse(rawResult); + status = result.error ? "failed" : "done"; + found = result.found; + plaintext = result.plaintext ?? null; + elapsedMs = result.elapsedMs; + } else if (rawStatus) { + const statusObj = JSON.parse(rawStatus); + status = statusObj.status ?? "queued"; + } + const entry = { id, hash, source, status }; + if (status === "done" || status === "failed") { + entry.found = found; + entry.plaintext = plaintext; + if (elapsedMs != null) entry.elapsedMs = elapsedMs; + } + jobs.push(entry); + } + res.json(jobs); + } catch (err) { + console.error("Error getting cluster jobs", err); + res.status(500).json({ error: "failed to get cluster jobs" }); + } +}); export default router; diff --git a/backend/src/routes/hash.js b/backend/src/routes/hash.js index d203a59..1c334c1 100644 --- a/backend/src/routes/hash.js +++ b/backend/src/routes/hash.js @@ -7,10 +7,13 @@ const router = express.Router(); // POST /hash/manual - queing the hash to be bruteforced by the worker router.post("/manual", async (req, res) => { - const { hash } = req.body; + const { hash, source } = req.body; if (!hash) { return res.status(400).json({ error: "hash is required" }); } + if (!source){ + console.log("Received job with no source meta data. Setting source to unknown..."); + } const jobId = uuidv4(); const job = { id: jobId, hash, createdAt: Date.now() }; @@ -23,6 +26,13 @@ router.post("/manual", async (req, res) => { // stuff, it is okey for critical stuff, but this is not ! await redis.lpush(REDIS_KEYS.JOBS_PENDING, JSON.stringify(job)); await redis.hset(REDIS_KEYS.JOBS_STATUS, jobId, JSON.stringify({ status: "queued" })); + await redis.hset( + REDIS_KEYS.JOBS_META, + jobId, + JSON.stringify({ hash, source: source ?? "unkown", createdAt: job.createdAt }) + ); + await redis.lpush(REDIS_KEYS.JOBS_RECENT, jobId); + await redis.ltrim(REDIS_KEYS.JOBS_RECENT, 0, 499); return res.status(202).json({ id: jobId }); }); diff --git a/backend/src/routes/tester.js b/backend/src/routes/tester.js index 70139c2..c1eb30f 100644 --- a/backend/src/routes/tester.js +++ b/backend/src/routes/tester.js @@ -44,6 +44,13 @@ router.post("/search", async (req, res) => { jobId, JSON.stringify({ status: "queued" }) ); + await redis.hset( + REDIS_KEYS.JOBS_META, + jobId, + JSON.stringify({ hash, source: "tester", createdAt: job.createdAt }) + ); + await redis.lpush(REDIS_KEYS.JOBS_RECENT, jobId); + await redis.ltrim(REDIS_KEYS.JOBS_RECENT, 0, 499); return res.status(202).json({ id: jobId }); }); diff --git a/frontend/src/App.jsx b/frontend/src/App.jsx index abfe5b6..f87ece0 100644 --- a/frontend/src/App.jsx +++ b/frontend/src/App.jsx @@ -55,12 +55,12 @@ function App() { if (!hashInput) return; setSendingHash(true); try { - const data = await apiRequest("/hash/manual", { + await apiRequest("/hash/manual", { method: "POST", - body: JSON.stringify({ hash: hashInput }), + body: JSON.stringify({ hash: hashInput, source: "frontend" }), }); - setJobs((prev) => [...prev, { id: data.id, hash: hashInput }]); setHashInput(""); + // Jobs list is refreshed by GET /cluster/jobs poll } catch (e) { console.error(e); alert("Erreur lors de l'envoi du hash"); @@ -70,42 +70,31 @@ function App() { }; useEffect(() => { - const poll = async () => { - // We use a functional update to peek at the latest jobs without - // needing jobs in the dependency array - setJobs(currentJobs => { - const pendingJobs = currentJobs.filter(j => !j.result); - - // If nothing to poll, return existing state to avoid re-render - if (pendingJobs.length === 0) return currentJobs; - - // Trigger the API calls - Promise.all( - currentJobs.map(async (job) => { - - // if the job obj already has a result key - // then skip - if (job.result) return job; - - // otherwise, request the backend - try { - const res = await apiRequest(`/hash/${job.id}`); - return res.status === "queued" ? job : { ...job, result: res }; - } catch { - return job; - } - }) - ).then(updatedResults => { - // Only update if something actually changed to prevent infinite loops - setJobs(updatedResults); - }); - - return currentJobs; // Return current state while waiting for Promise - }); - }; - - const pollId = setInterval(poll, 2000); - return () => clearInterval(pollId); + async function pollJobs() { + try { + const data = await apiRequest("/cluster/jobs"); + setJobs( + data.map((j) => ({ + id: j.id, + hash: j.hash, + source: j.source ?? "frontend", + result: + j.status === "done" || j.status === "failed" + ? { + found: j.found, + plaintext: j.plaintext ?? null, + elapsedMs: j.elapsedMs, + } + : undefined, + })) + ); + } catch (e) { + console.error("cluster jobs error", e); + } + } + pollJobs(); + const pollJobsId = setInterval(pollJobs, CLUSTER_POLL_INTERVAL_MS); + return () => clearInterval(pollJobsId); }, []); useEffect(() => { @@ -148,11 +137,11 @@ function App() { const hash = CryptoJS.MD5(randomText).toString(CryptoJS.enc.Hex); try { - const res = await apiRequest("/hash/manual", { + await apiRequest("/hash/manual", { method: "POST", body: JSON.stringify({ hash }), }); - setJobs((prev) => [...prev, { id: res.id, hash }]); + // Jobs list is refreshed by GET /cluster/jobs poll } catch (e) { console.error("auto send error", e); } @@ -180,7 +169,7 @@ function App() { const outputsContent = jobs .map((j) => { if (!j.result) return "PENDING"; - return j.result.found ? j.result.plaintext : "NOT_FOUND"; + return j.result.found ? (j.result.plaintext ?? "") : "NOT_FOUND"; }) .join("\n"); const ts = new Date().toISOString().slice(0, 19).replace(/[-:T]/g, ""); @@ -482,6 +471,7 @@ function App() {