diff --git a/.github/workflows/update_components.yaml b/.github/workflows/update_components.yaml index a4ff063bc1e..aad24abd346 100644 --- a/.github/workflows/update_components.yaml +++ b/.github/workflows/update_components.yaml @@ -61,6 +61,7 @@ jobs: rm -f web/build/data/cache.z* zip -s 50m web/build/data/cache.zip cache.sqlite3 + - name: Tar artifact # Artifact are case insensitive, this is workaround run: tar -czf web_build.tar.gz web/build/ - name: Upload artifact diff --git a/.gitignore b/.gitignore index e2ab415f904..de9138b67b2 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,8 @@ .idea *.zip *.z* +*.tar +.vscode/ # Byte-compiled / optimized / DLL files __pycache__/ diff --git a/jlcparts/datatables.py b/jlcparts/datatables.py index f3fe8cc57e2..f0673ae9469 100644 --- a/jlcparts/datatables.py +++ b/jlcparts/datatables.py @@ -13,18 +13,23 @@ from jlcparts.common import sha256file from jlcparts import attributes, descriptionAttributes -def saveJson(object, filename, hash=False, pretty=False, compress=False): - openFn = gzip.open if compress else open - with openFn(filename, "wt", encoding="utf-8") as f: - if pretty: - json.dump(object, f, indent=4, sort_keys=True) - else: - json.dump(object, f, separators=(',', ':'), sort_keys=True) - if hash: - with open(filename + ".sha256", "w") as f: - hash = sha256file(filename) - f.write(hash) - return hash +import tarfile + +from time import time + +def saveDatabaseFile(database, outpath, outfilename): + for key, value in database.items(): + filename = os.path.join(outpath, key + ".jsonlines.gz") + with gzip.open(filename, "wt", encoding="utf-8") as f: + for entry in value: + json.dump(entry, f, separators=(',', ':'), sort_keys=False) + f.write("\n") + + with tarfile.open(os.path.join(outpath, outfilename), 'w') as tar: + for key, value in database.items(): + filename = os.path.join(outpath, key + ".jsonlines.gz") + tar.add(filename, arcname=os.path.relpath(filename, start=outpath)) + os.unlink(filename) def weakUpdateParameters(attrs, newParameters): for attr, value in newParameters.items(): @@ -260,6 +265,8 @@ def extractComponent(component, schema): elif schItem == "url": url = component.get("extra", {}).get("url", None) propertyList.append(trimLcscUrl(url, component["lcsc"])) + elif schItem == "stock": + propertyList.append(component["stock"]) elif schItem in component: item = component[schItem] if isinstance(item, str): @@ -273,15 +280,12 @@ def extractComponent(component, schema): def buildDatatable(components): schema = ["lcsc", "mfr", "joints", "description", - "datasheet", "price", "img", "url", "attributes"] + "datasheet", "price", "img", "url", "attributes", "stock"] return { "schema": schema, "components": [extractComponent(x, schema) for x in components] } -def buildStocktable(components): - return {component["lcsc"]: component["stock"] for component in components } - def clearDir(directory): """ Delete everything inside a directory @@ -293,6 +297,28 @@ def clearDir(directory): elif os.path.isdir(file_path): shutil.rmtree(file_path) +def schemaToLookup(schema): + lut = {} + for idx, key in enumerate(schema): + lut[key] = idx + return lut + +def updateLut(lut, item): + key = json.dumps(item, separators=(',', ':'), sort_keys=True) + if not key in lut: + index = len(lut) + lut[key] = index + return index + return lut[key] + +# Inverts the lut so that the Map becomes an array, with the key being the value. +# Values must be 0-based, numeric, and contiguous, or everything will be wrong. +def lutToArray(lutMap): + arr = [None] * len(lutMap) + for key, value in lutMap.items(): + arr[value] = key + return arr + @dataclasses.dataclass class MapCategoryParams: @@ -316,26 +342,10 @@ def _map_category(val: MapCategoryParams): components = lib.getCategoryComponents(val.catName, val.subcatName, stockNewerThan=val.ignoreoldstock) if not components: return None - - filebase = val.catName + val.subcatName - filebase = filebase.replace("&", "and").replace("/", "aka") - filebase = re.sub('[^A-Za-z0-9]', '_', filebase) - - dataTable = buildDatatable(components) + dataTable = buildDatatable(components) dataTable.update({"category": val.catName, "subcategory": val.subcatName}) - dataHash = saveJson(dataTable, os.path.join(val.outdir, f"{filebase}.json.gz"), - hash=True, compress=True) - stockTable = buildStocktable(components) - stockHash = saveJson(stockTable, os.path.join(val.outdir, f"{filebase}.stock.json"), hash=True) - - return { - "catName": val.catName, - "subcatName": val.subcatName, - "sourcename": filebase, - "datahash": dataHash, - "stockhash": stockHash - } + return dataTable @click.command() @click.argument("library", type=click.Path(dir_okay=False)) @@ -348,6 +358,8 @@ def buildtables(library, outdir, ignoreoldstock, jobs): """ Build datatables out of the LIBRARY and save them in OUTDIR """ + t0 = time() + lib = PartLibraryDb(library) Path(outdir).mkdir(parents=True, exist_ok=True) clearDir(outdir) @@ -367,18 +379,50 @@ def buildtables(library, outdir, ignoreoldstock, jobs): for i, result in enumerate(pool.imap_unordered(_map_category, params)): if result is None: continue - catName, subcatName = result["catName"], result["subcatName"] + catName = result["category"] #.lower() + subcatName = result["subcategory"] #.lower() + sourceName = f"{catName}__x__{subcatName}" print(f"{((i) / total * 100):.2f} % {catName}: {subcatName}") - if catName not in categoryIndex: - categoryIndex[catName] = {} - assert subcatName not in categoryIndex[catName] - categoryIndex[catName][subcatName] = { - "sourcename": result["sourcename"], - "datahash": result["datahash"], - "stockhash": result["stockhash"] - } - index = { - "categories": categoryIndex, - "created": datetime.datetime.now().astimezone().replace(microsecond=0).isoformat() + if sourceName not in categoryIndex: + categoryIndex[sourceName] = result + else: + categoryIndex[sourceName]["components"] += result["components"] # combine for categories that are only different because of case + + t1 = time() + # db holds the data we're putting into our database file + db = { + "subcategories": [schemaToLookup(['subcategory', 'category', 'subcategoryIdx'])], + "components": [schemaToLookup(['lcsc', 'mfr', 'description', 'attrsIdx', 'stock', 'subcategoryIdx', 'joints', 'datasheet', 'price', 'img', 'url'])], + "attributes-lut": {} } - saveJson(index, os.path.join(outdir, "index.json"), hash=True) + + # fill database + s = None # schema lookup + subcatIndex = 0 + for sourceName, subcatEntry in categoryIndex.items(): + if s is None: + s = schemaToLookup(subcatEntry["schema"]) # all schema will be the same + + subcatIndex += 1 + db["subcategories"] += [[subcatEntry["subcategory"], subcatEntry["category"], subcatIndex]] + + for comp in subcatEntry["components"]: + db["components"] += [[ + comp[s["lcsc"]], + comp[s["mfr"]], + comp[s["description"]], + [updateLut(db["attributes-lut"], [attrName, value]) for attrName,value in comp[s["attributes"]].items()], + comp[s["stock"]], + subcatIndex, + comp[s["joints"]], + comp[s["datasheet"]], + comp[s["price"]], + comp[s["img"]], + comp[s["url"]] + ]] + + # invert the lut + db["attributes-lut"] = [json.loads(str) for str in lutToArray(db["attributes-lut"])] + saveDatabaseFile(db, outdir, "all.jsonlines.tar") + + print(f"Table extraction took {(t1 - t0)}, reformat into one file took {time() - t1}") diff --git a/web/.gitignore b/web/.gitignore index c1fe7dfdf2e..661b7cea7b9 100644 --- a/web/.gitignore +++ b/web/.gitignore @@ -5,6 +5,10 @@ /.pnp .pnp.js +*.json +*.gz +*.jsonlines + # testing /coverage diff --git a/web/package-lock.json b/web/package-lock.json index 27d9b581c1f..287e05a4901 100644 --- a/web/package-lock.json +++ b/web/package-lock.json @@ -19,6 +19,7 @@ "@testing-library/user-event": "^7.2.1", "dexie": "^3.0.2", "immer": "^7.0.8", + "js-untar": "^2.0.0", "pako": "^2.0.4", "react": "^16.13.1", "react-copy-to-clipboard": "^5.0.2", @@ -13865,6 +13866,11 @@ "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" }, + "node_modules/js-untar": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/js-untar/-/js-untar-2.0.0.tgz", + "integrity": "sha512-7CsDLrYQMbLxDt2zl9uKaPZSdmJMvGGQ7wo9hoB3J+z/VcO2w63bXFgHVnjF1+S9wD3zAu8FBVj7EYWjTQ3Z7g==" + }, "node_modules/js-yaml": { "version": "3.14.0", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.0.tgz", @@ -29779,6 +29785,11 @@ "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" }, + "js-untar": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/js-untar/-/js-untar-2.0.0.tgz", + "integrity": "sha512-7CsDLrYQMbLxDt2zl9uKaPZSdmJMvGGQ7wo9hoB3J+z/VcO2w63bXFgHVnjF1+S9wD3zAu8FBVj7EYWjTQ3Z7g==" + }, "js-yaml": { "version": "3.14.0", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.0.tgz", diff --git a/web/package.json b/web/package.json index c9a6c225360..4b95398f6dc 100644 --- a/web/package.json +++ b/web/package.json @@ -14,6 +14,7 @@ "@testing-library/user-event": "^7.2.1", "dexie": "^3.0.2", "immer": "^7.0.8", + "js-untar": "^2.0.0", "pako": "^2.0.4", "react": "^16.13.1", "react-copy-to-clipboard": "^5.0.2", diff --git a/web/src/app.js b/web/src/app.js index aa97aab5c86..a22c5b51e26 100644 --- a/web/src/app.js +++ b/web/src/app.js @@ -6,13 +6,14 @@ import { NavLink } from "react-router-dom"; + import { library } from '@fortawesome/fontawesome-svg-core' import { fas } from '@fortawesome/free-solid-svg-icons' import { far } from '@fortawesome/free-regular-svg-icons' import { fab } from '@fortawesome/free-brands-svg-icons' import './main.css'; -import { updateComponentLibrary, checkForComponentLibraryUpdate, db } from './db' +import { updateComponentLibrary, checkForComponentLibraryUpdate, db, unpackLinesAsArray } from './db' import { ComponentOverview } from './componentTable' import { History } from './history' @@ -79,8 +80,8 @@ class FirstTimeNote extends React.Component { } componentDidMount() { - db.components.count().then(x => { - this.setState({componentCount: x}); + unpackLinesAsArray('components').then(components => { + this.setState({componentCount: Math.max(0, components.length - 1)}); // don't count the schema entry }) } @@ -110,9 +111,11 @@ class NewComponentFormatWarning extends React.Component { } componentDidMount() { - db.components.toCollection().first().then(x => { - if (x !== undefined && typeof x.attributes[Object.keys(x.attributes)[0]] !== 'object') - this.setState({newComponentFormat: false}); + // I don't know if newComponentFormat will work like this + unpackLinesAsArray('subcategories').then(cats => { + if (cats.size > 1) { + this.setState({newComponentFormat: false}); + } }); } @@ -142,7 +145,7 @@ class UpdateBar extends React.Component { this.setState({updateAvailable}); }); db.settings.get("lastUpdate").then(lastUpdate => { - this.setState({lastUpdate}); + this.setState({lastUpdate: lastUpdate?.value}); }) }; diff --git a/web/src/componentTable.js b/web/src/componentTable.js index 5de2d158882..bf419d169b2 100644 --- a/web/src/componentTable.js +++ b/web/src/componentTable.js @@ -1,4 +1,4 @@ -import { db } from "./db"; +import { unpackAndProcessLines, unpackLinesAsArray} from "./db"; import React from "react"; import { produce, enableMapSet } from "immer"; import { FontAwesomeIcon } from '@fortawesome/react-fontawesome' @@ -161,14 +161,27 @@ export class ComponentOverview extends React.Component { } componentDidMount() { - db.categories.toArray().then( categories => { + (async () => { + // generate categories array + let subCats = (await unpackLinesAsArray('subcategories')).map(str => JSON.parse(str)); + + let schema = subCats[0]; // first entry is always the schema lookup + let cats = subCats.filter((sc, i) => i > 0).map((sc, id) => ({ + id: sc[schema.subcategoryIdx], + category: sc[schema.category], + subcategory: sc[schema.subcategory], + sourcename: "", // not needed + stockhash: 0, // not needed + datahash: 0 // not needed + })); + this.setState({ - categories: this.prepareCategories(categories), - rawCategories: categories + categories: this.prepareCategories(cats), + rawCategories: cats }); - }) + })(); } - + prepareCategories(sourceCategories) { let categories = {}; for (const category of sourceCategories) { @@ -640,32 +653,81 @@ class CategoryFilter extends React.Component { // full-text search async components() { this.state.abort(); - let query; + + let categoryFilter = (cat) => true; + if (this.state.allCategories) { if (this.state.searchString.length < 3) { // prevent high ram usage return []; } - query = db.components; } - else - query = db.components.where("category").anyOf(this.collectActiveCategories()); - + else { + const catIds = this.collectActiveCategories(); + const catIdLookup = new Set(catIds); + categoryFilter = (catid) => catIdLookup.has(catid); + } + + let results = []; + let words = []; if (this.state.searchString.length !== 0) { - const words = this.state.searchString.split(/\s+/) + words = this.state.searchString.split(/\s+/) .filter(x => x.length > 0) .map(x => x.toLocaleLowerCase()); - if (words.length > 0) { - query = query.filter(component => { - const text = componentText(component); - return words.every(word => text.includes(word)); - }); - } } let aborted = false; this.setState({abort: () => aborted = true}); - const components = await query.until(() => aborted).toArray(); - return aborted ? null : components; + + let schema; + await unpackAndProcessLines('components', (comp, idx) => { + comp = JSON.parse(comp); + + if (idx === 0) { // first line is always schema lookup + schema = comp; + } else { + if (categoryFilter(comp[schema.subcategoryIdx])) { + let component = { + lcsc: comp[schema.lcsc], + mfr: comp[schema.mfr], + description: comp[schema.description], + attrsIdx: comp[schema.attrsIdx], + stock: comp[schema.stock], + category: comp[schema.subcategoryIdx], + componentIdx: idx, + joints: comp[schema.joints], + datasheet: comp[schema.datasheet], + price: comp[schema.price], + img: comp[schema.img], + url: comp[schema.url] + }; + + if (words.length > 0) { + const text = componentText(component); + if(words.every(word => text.includes(word))) { + results.push(component); + } + } else { + results.push(component); + } + } + } + }, () => aborted); + + if (aborted) { + return null; + } + + if (results.length > 0) { + const attributesLut = await unpackLinesAsArray('attributes-lut'); + results.forEach(res => { + res.attributes = {}; + res.attrsIdx.map(idx => JSON.parse(attributesLut[idx])).forEach(entry => { + res.attributes[entry[0]] = entry[1]; + }); + }); + } + + return results; } handleCategoryChange = (category, value) => { diff --git a/web/src/db.js b/web/src/db.js index 9881bf56e04..fe4fa164514 100644 --- a/web/src/db.js +++ b/web/src/db.js @@ -1,103 +1,188 @@ import Dexie from 'dexie'; import * as pako from 'pako'; +import untar from "js-untar"; if (!window.indexedDB) { alert("This page requires IndexedDB to work.\n" + - "Your browser does not support it. Please upgrade your browser."); + "Your browser does not support it. Please upgrade your browser."); } async function persist() { -return await navigator.storage?.persist?.(); + return await navigator.storage?.persist?.(); } export const db = new Dexie('jlcparts'); -db.version(1).stores({ +db.version(2).stores({ settings: 'key', - components: 'lcsc, category, mfr, *indexWords', - categories: 'id++,[category+subcategory], subcategory, category' + jsonlines: 'name' }); -function extractCategoryKey(category) { - return category.id; -} const SOURCE_PATH = "data"; +const dbWebPath = `${SOURCE_PATH}/all.jsonlines.tar`; + +let jsonlines = {}; // copy of the database in memory so we only access the database once (doesn't really matter - it would be pretty fast anyway) +async function getJsonlines() { + if (Object.keys(jsonlines).length === 0) { + (await db.jsonlines.toArray()).forEach(obj => { + jsonlines[obj.name] = obj.compressedData + }); + } + return jsonlines; +} + +export async function unpackLinesAsArray(name) { + let arr = []; + await unpackAndProcessLines(name, (val, idx) => arr.push(val)); + return arr; +} + +async function yieldExec() { + return new Promise((resolve, reject) => { + setTimeout(() => resolve(), 0); + }); +} + +export async function unpackAndProcessLines(name, callback, checkAbort) { + await getJsonlines(); + + if (jsonlines[name] === undefined) { + return; + } + + let time = new Date().getTime(); + + if (!window.DecompressionStream) { + console.error("DecompressionStream is not supported in this environment."); + return; + } + + const decompressionStream = new window.DecompressionStream('gzip'); + + // Convert the ArrayBuffer to a ReadableStream + const inputStream = new ReadableStream({ + start(controller) { + controller.enqueue(jsonlines[name]); + controller.close(); + }, + }); + + // Pipe the input stream through the decompression stream + const decompressedStream = inputStream.pipeThrough(decompressionStream); + + // Convert the stream into text + const textStream = decompressedStream.pipeThrough(new window.TextDecoderStream()); + + const reader = textStream.getReader(); // to read chunks of text from stream + let chunk = ''; + let idx = 0; + let lastYield = new Date().getTime(); + + try { + while (true) { + + // Periodically allow UI to do what it needs to, including updating any abort flag. + // This does slow down the this function a variable amount (could be <100ms, could be a few seconds) + const now = new Date().getTime(); + if (now - lastYield > 300) { + await yieldExec(); + console.log('yielded for ', new Date().getTime() - now, 'ms'); + lastYield = new Date().getTime(); + + if (checkAbort && checkAbort()) { // check abort flag + break; + } + } + + + const { done, value } = await reader.read(); + if (done) { + // If there's any remaining line, process it as well -- should never happen + if (chunk) { + callback(chunk, idx++); + } + break; + } + + chunk += value; + + let start = 0; + while (true) { + let pos = chunk.indexOf('\n', start); + if (pos >= 0) { + if (callback(chunk.slice(start, pos), idx++) === 'abort') { + break; // quit early + } + start = pos + 1; + } else { + chunk = chunk.slice(start); // dump everything that we've processed + break; // no more lines in our chunk + } + } + } + + console.log(`Time to gunzip & segment ${name}: ${new Date().getTime() - time}`); + } finally { + reader.releaseLock(); + } +} // Updates the whole component library, takes a callback for reporting progress: // the progress is given as list of tuples (task, [statusMessage, finished]) export async function updateComponentLibrary(report) { await persist(); - report({"Component index": ["fetching", false]}) - let index = await fetchJson(`${SOURCE_PATH}/index.json`, - "Cannot fetch categories index: "); - let progress = {} + + let progress = {}; let updateProgress = (name, status) => { progress[name] = status; report(progress); - } - db.settings.put({key: "lastDbUpdate", value: index.created}) - await updateCategories(index.categories, - // onNew - async (cName, sName, attr) => { - let name = cName + ": " + sName; - updateProgress(name, ["Adding components 1/2", false]); - let category = await addCategory(cName, sName, attr); - updateProgress(name, ["Updating stock 2/2", false]); - await updateStock(category); - updateProgress(name, ["Added", true]); - return category; - }, - // onUpdateExisting - async (category, attr) => { - let cName = category.category; - let sName = category.subcategory; - let name = cName + ": " + sName; - updateProgress(name, ["Updating components 1/2", false]); - await deleteCategory(category); - let newCategory = await addCategory(cName, sName, attr); - updateProgress(name, ["Updating stock 2/2", false]); - await updateStock(newCategory); - updateProgress(name, ["Update finished", true]); - return newCategory; - }, - // onUpdateStock - async (category, _) => { - let cName = category.category; - let sName = category.subcategory; - let name = cName + ": " + sName; - updateProgress(name, ["Updating stock 1/1", false]); - await updateStock(category); - updateProgress(name, ["Stock updated", true]); - return category; - }, - // onExcessive - async category => { - let cName = category.category; - let sName = category.subcategory; - let name = cName + ": " + sName; - updateProgress(name, ["Removing category", false]); - await deleteCategory(category); - updateProgress(name, ["Removed", true]); + }; + + // get new db files + const downloadingTitle = `Downloading ${dbWebPath}`; + updateProgress(downloadingTitle, ["In progress", false]); + const resp = await fetch(dbWebPath); + if (resp.status === 200) { + const data = await resp.arrayBuffer(); + updateProgress(downloadingTitle, ["OK", true]); + + const untarTitle = `Updating database`; + updateProgress(untarTitle, ["In progress", false]); + + const files = await untar(data); + for (const file of files) { + const basename = file.name.split('.')[0]; + let result = await db.jsonlines.put({ name: basename, compressedData: file.buffer }); + console.log(result); + + // store copy in memory (we can load from indexeddb on startup) + jsonlines[basename] = file.buffer; } - ); + + updateProgress(untarTitle, ["OK", true]); + + db.settings.put({ + key: "lastUpdate", + value: resp.headers.get('Last-Modified') || new Date().toUTCString() + }); + + } else { + updateProgress(downloadingTitle, ["Download failed", false]); + } } // Check if the component library can be updated export async function checkForComponentLibraryUpdate() { - let index = await fetchJson(`${SOURCE_PATH}/index.json`, - "Cannot fetch categories index: "); - let updateAvailable = false; - let onUpdate = (category) => { updateAvailable = true; return category; } - await updateCategories(index.categories, - // onNew - onUpdate, - // onUpdateExisting - onUpdate, - // onUpdateStock - onUpdate, - // onExcessive - onUpdate - ); + let lastUpdate = (await db.settings.get("lastUpdate"))?.value || new Date(0).toUTCString(); + + let head = await fetch(dbWebPath, { + method: 'HEAD', + headers: { + 'If-Modified-Since': lastUpdate + } + }); + + let updateAvailable = head.status === 200; // 304 if not modified; any error means we don't know if there's an update return updateAvailable; } @@ -129,135 +214,3 @@ export async function fetchJson(path, errorIntro) { throw Error(errorIntro + `Response is not a (compressed) JSON, but ${contentType}: ` + path); } - -async function fetchText(path, errorIntro) { - let response = await fetch(path); - if (!response.ok) { - throw Error(errorIntro + response.statusText); - } - return await response.text(); -} - -// Update categories. Fetched categoryIndex and 3 callback are supplied to -// perform the update. -async function updateCategories(categoryIndex, onNew, onUpdateExisting, onUpdateStock, onExcessive) { - let updates = []; - let usedCategories = new Set(); - for (const [categoryName, subcategories] of Object.entries(categoryIndex)) { - for ( const [subcategoryName, attributes] of Object.entries(subcategories)) { - let action = db.categories - .where({category: categoryName, subcategory: subcategoryName}) - .first(async category => { - if (category === undefined) { - category = await onNew(categoryName, subcategoryName, attributes); - } else if (attributes.datahash !== category.datahash || - attributes.sourcename !== category.sourcename) - { - category = await onUpdateExisting(category, attributes); - } else if (attributes.stockhash !== category.stockhash) { - category = await onUpdateStock(category); - } - - if (category) { - usedCategories.add(extractCategoryKey(category)); - } - }); - updates.push(action); - } - } - await Promise.all(updates); - await db.categories.each(category => { - if (usedCategories.has(extractCategoryKey(category))) { - return; - } - onExcessive(category); - }); -} - -// Takes an array containing schema and an array of values and turns them into -// dictionary -function restoreObject(schema, source) { - return schema.reduce((obj, k, i) => { - obj[k] = source[i]; - return obj; - }, {}); -} - -// Takes a JSON fetched from server and adds them to the database for the -// corresponding category -function addComponents(category, components) { - let schema = components.schema; - let cObjects = components.components.map(src => { - let obj = restoreObject(schema, src); - obj.category = extractCategoryKey(category); - return obj; - }); - return db.components.bulkPut(cObjects); -} - -// Add a single category and fetch all of its components -async function addCategory(categoryName, subcategoryName, attributes) { - let components = await fetchJson(`${SOURCE_PATH}/${attributes.sourcename}.json.gz`, - `Cannot fetch components for category ${categoryName}: ${subcategoryName}: `); - return db.transaction("rw", db.categories, db.components, async () => { - let key = await db.categories.put({ - category: categoryName, - subcategory: subcategoryName, - sourcename: attributes.sourcename, - datahash: attributes.datahash, - stockhash: attributes.stockhash - }); - let category = await db.categories.get(key); - await addComponents(category, components); - return category; - }); -} - -// Fetch and update stock -async function updateStock(category) { - let stock = await fetchJson(`${SOURCE_PATH}/${category.sourcename}.stock.json`, - `Cannot fetch stock for category ${category.category}: ${category.subcategory}: `); - await db.components.where({category: category.id}).modify(component =>{ - component.stock = stock[component.lcsc]; - }); - // await db.transaction("rw", db.components, async () => { - // let actions = []; - // for (const [component, stockVal] of Object.entries(stock)) { - // actions.push(db.components.update(component, {"stock": stockVal })); - // } - // await Promise.all(actions); - // }); - let hash = await fetchText(`${SOURCE_PATH}/${category.sourcename}.stock.json.sha256`, - `Cannot fetch stock hash for category ${category.category}: ${category.subcategory}: `); - await db.categories.update(extractCategoryKey(category), {stockhash: hash}); -} - -// Delete given category and all of its components -async function deleteCategory(category) { - await db.transaction("rw", db.components, db.categories, async () => { - await db.components.where({category: extractCategoryKey(category)}).delete(); - await db.categories.delete(extractCategoryKey(category)); - }); -} - - -// See https://stackoverflow.com/questions/64114482/aborting-dexie-js-query -// export function cancellableDexieQuery(includedTables, querierFunction) { -// let tx = null; -// let cancelled = false; -// const promise = db.transaction('r', includedTables, () => { -// if (cancelled) -// throw new Dexie.AbortError('Query was cancelled'); -// tx = Dexie.currentTransaction; -// return querierFunction(); -// }); -// return [ -// promise, -// () => { -// cancelled = true; // In case transaction hasn't been started yet. -// if (tx) -// tx.abort(); // If started, abort it. -// tx = null; // Avoid calling abort twice. -// } -// ]; -// } \ No newline at end of file diff --git a/web/src/history.js b/web/src/history.js index 29aea60d93c..29a86b4a69e 100644 --- a/web/src/history.js +++ b/web/src/history.js @@ -1,5 +1,5 @@ import React from 'react'; -import { fetchJson, db } from './db' +import { fetchJson, unpackAndProcessLines, unpackLinesAsArray } from './db' import { Spinbox, InlineSpinbox, ZoomableLazyImage, formatAttribute, findCategoryById, getImageUrl, restoreLcscUrl } from './componentTable' @@ -19,8 +19,17 @@ class HistoryItem extends React.Component { } componentDidMount() { - db.components.get({lcsc: this.props.lcsc}).then( component => { - this.setState({info: component}); + let schema; + unpackAndProcessLines('components', (component, idx) => { + component = JSON.parse(component); + if (idx === 0) { // first entry is schema + schema = component; + } else { + if (component[schema.lcsc] === this.props.lcsc) { + this.setState({info: component}); + return 'abort'; // done + } + } }); } @@ -152,7 +161,10 @@ class HistoryTable extends React.Component { log.sort((a, b) => b.day - a.day); this.setState({table: log}); }); - db.categories.toArray().then( categories => this.setState({categories}) ); + + unpackLinesAsArray('subcategories').then(cats => { + this.setState({categories: cats.filter((c,i) => i > 0).map(s => JSON.parse(s))}); + }); } render() {