// Copyright Epic Games, Inc. All Rights Reserved. "use strict"; import { Cache } from "./cache.js" import { Message } from "./worker.js" import { Fetcher } from "../util/fetcher.js" //////////////////////////////////////////////////////////////////////////////// class Indexer { constructor(pages) { this._pages = pages; } lookup_id(entry_id) { const bin_search = function(page) { var l = 0; var r = page.length; while (l < r) { const mid = l + ((r - l) >> 1); const d = entry_id - page[mid][0]; if (d < 0n) r = mid; else if (d > 0n) l = mid + 1; else return mid; } return -1; }; for (const page of this._pages) { const index = bin_search(page); if (index >= 0) return page[index][1]; } return ""; } *search(needle) { var needleLwr = needle.toLowerCase(); for (const page of this._pages) for (const [_, name] of page) if (name.toLowerCase().indexOf(needleLwr) >= 0) yield name; } *enum_names() { for (const page of this._pages) for (const [_, name] of page) yield name; } *enum_all() { for (const page of this._pages) for (const [_, name, size, raw_size] of page) yield [name, size|0n, raw_size|0n]; } } //////////////////////////////////////////////////////////////////////////////// async function save(progress_cb, oplog_info, pages) { const project_id = oplog_info["project"]; const cache = new Cache(project_id, "pages"); const page_count = pages.length; const puts = new Array(page_count); for (var i = 0; i < page_count; ++i) puts[i] = cache.put("pages", i, pages[i]); var okay = true for (var i = 0; i < page_count; ++i) { okay &= await puts[i]; progress_cb("saving", i + 1, page_count); } if (!okay) return false; cache.put("pages", "$", { "page_count" : pages.length, "total_size" : oplog_info["totalsize"], "op_count" : oplog_info["opcount"], "timestamp" : (Date.now() / 1000) | 0, }); return true } //////////////////////////////////////////////////////////////////////////////// async function build(progress_cb, oplog_info, max_workers=6, page_size=48 << 10) { const project_id = oplog_info["project"]; const oplog = oplog_info["id"]; const init_msg = Message.create(Message.Init, project_id, oplog); const worker_n = Math.min(navigator.hardwareConcurrency / 2, max_workers); const stride = page_size * worker_n; const end = oplog_info["opcount"]; var entry_count = 0; const pages = new Array(); const executor = function(index, resolve, reject) { const worker = new Worker("indexer/worker.js", { type: "module" }); worker.onerror = (evt) => reject(Error("Worker error")); worker.onmessage = (evt) => { const [msg_id, ...params] = evt.data; switch (msg_id) { case Message.MapDone: resolve(); worker.terminate(); break; case Message.MapPage: const [page] = params; pages.push(page); entry_count += page.length; progress_cb("parsing", entry_count, end); break; } } worker.postMessage(init_msg); const start = page_size * index; const map_msg = Message.create(Message.Map, start, end, page_size, stride); worker.postMessage(map_msg); }; const workers = [] for (var i = 0; i < worker_n; ++i) { const worker = new Promise((...args) => executor(i, ...args)); workers.push(worker); } for (const worker of workers) await worker; return pages; } //////////////////////////////////////////////////////////////////////////////// async function load(progress_cb, oplog_info) { const project_id = oplog_info["project"]; const cache = new Cache(project_id, "pages"); const meta = await cache.get("pages", "$"); var hit = false; if (meta) { const yesterday = (Date.now() / 1000) - (24 * 60 * 60); hit = true; hit &= (meta["total_size"] == oplog_info["totalsize"]); hit &= (meta["op_count"] == oplog_info["opcount"]); hit &= (meta["timestamp"] >= yesterday); } if (!hit) return null; const page_count = meta["page_count"]; const gets = new Array(page_count); const pages = new Array(page_count); for (var i = 0; i < page_count; ++i) gets[i] = cache.get("pages", i); progress_cb("loading", 0, page_count); for (var i = 0; i < page_count; ++i) { pages[i] = await gets[i]; progress_cb("loading", i + 1, page_count); } return pages; } //////////////////////////////////////////////////////////////////////////////// export async function create_indexer(project_id, oplog, progress_cb) { if (!window.Worker) throw Error("browser does not support web workers"); const oplog_info = await new Fetcher() .resource("prj", project_id, "oplog", oplog) .json(); var pages = await load(progress_cb, oplog_info); if (!pages) { pages = await build(progress_cb, oplog_info); await save(progress_cb, oplog_info, pages); } return new Indexer(pages); }