aboutsummaryrefslogtreecommitdiff
path: root/src/zenserver/frontend/html/indexer/indexer.js
diff options
context:
space:
mode:
authorMartin Ridgers <[email protected]>2024-11-11 10:31:34 +0100
committerGitHub Enterprise <[email protected]>2024-11-11 10:31:34 +0100
commit05d1044045539557dfe4e9c8996737d83f9dee89 (patch)
tree00907e9a5306318e8a9d169348348b7a5cc1f32d /src/zenserver/frontend/html/indexer/indexer.js
parentUpdate VERSION.txt (diff)
downloadzen-05d1044045539557dfe4e9c8996737d83f9dee89.tar.xz
zen-05d1044045539557dfe4e9c8996737d83f9dee89.zip
Self-hosted dashboard: Searchable oplog and links between oplog entry dependencies (#213)v5.5.12-pre0
* Consistent use of semicolons * Added fallback if oplog entry assumptions do not hold * 'marker' and 'expired' cells were incorrectly friendly * Two spaces when there should only be one * Robustness against .text(undefined) calls * A single step into JavaScript modules * Turned Fetcher into a module * Friendly into a module * Specialise Cbo field name comparison as TextDecoder() is very slow * Prefer is_named() over get_name() * Incorrect logic checking if a server reply was okay * Try and make sure it's always numbers that flow through Friendly * Added a progress bar component * Swap key and package hash columns * CbObject cloning * Dark and light themes depending on browser settings * Adjust styling of input boxes * Add theme swatches to test page * Turns out one can nest CSS selectors * Separate swatch for links/actions * Generate theme by lerping intermediate colours * Clearer progress bar * Chromium was complaining about label-less input elements * Promise-based cache using an IndexedDb * WebWorker for generating map of package ids to names * Indexer class for building, loading, and saving map of ids to names * Added links to oplog entries of an entry's dependencies * This doesn't need to be decorated as async any longer * Implemented oplog searching * View and drop make no sense on package data payloads * Rudimentary search result truncation * Updated changelog * Updated HTML zip archive
Diffstat (limited to 'src/zenserver/frontend/html/indexer/indexer.js')
-rw-r--r--src/zenserver/frontend/html/indexer/indexer.js193
1 files changed, 193 insertions, 0 deletions
diff --git a/src/zenserver/frontend/html/indexer/indexer.js b/src/zenserver/frontend/html/indexer/indexer.js
new file mode 100644
index 000000000..8e5003edf
--- /dev/null
+++ b/src/zenserver/frontend/html/indexer/indexer.js
@@ -0,0 +1,193 @@
+// Copyright Epic Games, Inc. All Rights Reserved.
+
+"use strict";
+
+import { Cache } from "./cache.js"
+import { Message } from "./worker.js"
+import { Fetcher } from "../util/fetcher.js"
+
+////////////////////////////////////////////////////////////////////////////////
+class Indexer
+{
+ constructor(pages)
+ {
+ this._pages = pages;
+ }
+
+ lookup_id(entry_id)
+ {
+ const bin_search = function(page) {
+ var l = 0;
+ var r = page.length;
+ while (l < r)
+ {
+ const mid = l + ((r - l) >> 1);
+ const d = entry_id - page[mid][0];
+ if (d < 0n) r = mid;
+ else if (d > 0n) l = mid + 1;
+ else return mid;
+ }
+
+ return -1;
+ };
+
+ for (const page of this._pages)
+ {
+ const index = bin_search(page);
+ if (index >= 0)
+ return page[index][1];
+ }
+
+ return "";
+ }
+
+ *search(needle)
+ {
+ for (const page of this._pages)
+ for (const [_, name] of page)
+ if (name.indexOf(needle) >= 0)
+ yield name;
+ }
+}
+
+
+
+////////////////////////////////////////////////////////////////////////////////
+async function save(progress_cb, oplog_info, pages)
+{
+ const project_id = oplog_info["project"];
+ const cache = new Cache(project_id, "pages");
+
+ const page_count = pages.length;
+ const puts = new Array(page_count);
+ for (var i = 0; i < page_count; ++i)
+ puts[i] = cache.put("pages", i, pages[i]);
+
+ var okay = true
+ for (var i = 0; i < page_count; ++i)
+ {
+ okay &= await puts[i];
+ progress_cb("saving", i + 1, page_count);
+ }
+ if (!okay)
+ return false;
+
+ cache.put("pages", "$", {
+ "page_count" : pages.length,
+ "total_size" : oplog_info["totalsize"],
+ "op_count" : oplog_info["opcount"],
+ "timestamp" : (Date.now() / 1000) | 0,
+ });
+
+ return true
+}
+
+////////////////////////////////////////////////////////////////////////////////
+async function build(progress_cb, oplog_info)
+{
+ const project_id = oplog_info["project"];
+ const oplog = oplog_info["id"];
+ const init_msg = Message.create(Message.Init, project_id, oplog);
+
+ const worker_n = Math.min(navigator.hardwareConcurrency / 2, 6);
+ const page_size = 48 << 10;
+ const stride = page_size * worker_n;
+ const end = oplog_info["opcount"];
+ var entry_count = 0;
+
+ const pages = new Array();
+
+ const executor = function(index, resolve, reject) {
+ const worker = new Worker("indexer/worker.js", { type: "module" });
+ worker.onerror = (evt) => reject(Error("Worker error"));
+ worker.onmessage = (evt) => {
+ const [msg_id, ...params] = evt.data;
+ switch (msg_id)
+ {
+ case Message.MapDone:
+ resolve();
+ worker.terminate();
+ break;
+
+ case Message.MapPage:
+ const [page] = params;
+ pages.push(page);
+ entry_count += page.length;
+ progress_cb("parsing", entry_count, end);
+ break;
+ }
+ }
+ worker.postMessage(init_msg);
+
+ const start = page_size * index;
+ const map_msg = Message.create(Message.Map, start, end, page_size, stride);
+ worker.postMessage(map_msg);
+ };
+
+ const workers = []
+ for (var i = 0; i < worker_n; ++i)
+ {
+ const worker = new Promise((...args) => executor(i, ...args));
+ workers.push(worker);
+ }
+
+ for (const worker of workers)
+ await worker;
+
+ return pages;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+async function load(progress_cb, oplog_info)
+{
+ const project_id = oplog_info["project"];
+ const cache = new Cache(project_id, "pages");
+ const meta = await cache.get("pages", "$");
+
+ var hit = false;
+ if (meta)
+ {
+ const yesterday = (Date.now() / 1000) - (24 * 60 * 60);
+ hit = true;
+ hit &= (meta["total_size"] == oplog_info["totalsize"]);
+ hit &= (meta["op_count"] == oplog_info["opcount"]);
+ hit &= (meta["timestamp"] >= yesterday);
+ }
+ if (!hit)
+ return null;
+
+ const page_count = meta["page_count"];
+ const gets = new Array(page_count);
+ const pages = new Array(page_count);
+ for (var i = 0; i < page_count; ++i)
+ gets[i] = cache.get("pages", i);
+
+ progress_cb("loading", 0, page_count);
+ for (var i = 0; i < page_count; ++i)
+ {
+ pages[i] = await gets[i];
+ progress_cb("loading", i + 1, page_count);
+ }
+
+ return pages;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+export async function create_indexer(project_id, oplog, progress_cb)
+{
+ if (!window.Worker)
+ throw Error("browser does not support web workers");
+
+ const oplog_info = await new Fetcher()
+ .resource("prj", project_id, "oplog", oplog)
+ .json();
+
+ var pages = await load(progress_cb, oplog_info);
+ if (!pages)
+ {
+ pages = await build(progress_cb, oplog_info);
+ await save(progress_cb, oplog_info, pages);
+ }
+
+ return new Indexer(pages);
+}