From 3f8054816b95a7404ac17041f27d597fd29a95c8 Mon Sep 17 00:00:00 2001 From: EMaslowskiQ <118929649+EMaslowskiQ@users.noreply.github.com> Date: Mon, 22 Jan 2024 20:51:03 -0500 Subject: [PATCH] DPO3DPKRT-701/generate downloads support for scenes (#566) (fix) restoring generate-downloads functionality with support for returned svx scenes and draco/usdz models (fix) modified tags assigned to 'Download:' to allow for capturing a model's use and it's type. --- .gitignore | 2 +- server/collections/impl/PublishScene.ts | 19 +- server/db/api/ModelSceneXref.ts | 1 + server/db/api/Scene.ts | 1 + server/job/impl/Cook/JobCook.ts | 2 +- .../impl/Cook/JobCookSIGenerateDownloads.ts | 969 +++++++++++++++--- .../job/impl/Cook/JobCookSIPackratInspect.ts | 8 +- .../storage/interface/AssetStorageAdapter.ts | 4 +- server/tsconfig.json | 3 +- server/utils/logger.ts | 2 + server/utils/migration/SceneMigration.ts | 2 +- .../workflow/impl/Packrat/WorkflowEngine.ts | 46 +- 12 files changed, 875 insertions(+), 184 deletions(-) diff --git a/.gitignore b/.gitignore index 7e6b52cb3..d56bbd2d8 100644 --- a/.gitignore +++ b/.gitignore @@ -123,5 +123,5 @@ server/config/solr/data/packratMeta/data ### Per-developer settings ### JTConfig -### Init Scripts ### +### Environment Setup Scripts ### *.bat \ No newline at end of file diff --git a/server/collections/impl/PublishScene.ts b/server/collections/impl/PublishScene.ts index 08295a13a..c3a647e78 100644 --- a/server/collections/impl/PublishScene.ts +++ b/server/collections/impl/PublishScene.ts @@ -281,9 +281,13 @@ export class PublishScene { return null; } + // LOG.info(`>>> computeDownloadMSXMap (${idScene}): ${H.Helpers.JSONStringify(MSXs)}`,LOG.LS.eDEBUG); const DownloadMSXMap: Map = new Map(); for (const MSX of MSXs) { - if (MSX.Usage && MSX.Usage.startsWith('Download')) { + // HACK: Packrat is misusing the Usage property returned by Cook for Voyager scene generation. Some + // assets like draco and USDZ downloads are used by the viewer & as a download. temporarily adding + // their Usage types until a file's 'downloadable' property is detached from 'Usage'. (#DPO3DPKRT-777) + if (MSX.Usage && (MSX.Usage.startsWith('Download:') || MSX.Usage.startsWith('App3D') || MSX.Usage.startsWith('iOSApp3D'))) { const SOI: DBAPI.SystemObjectInfo | undefined = await CACHE.SystemObjectCache.getSystemFromObjectID({ eObjectType: COMMON.eSystemObjectType.eModel, idObject: MSX.idModel }); if (SOI) DownloadMSXMap.set(SOI.idSystemObject, MSX); @@ -303,14 +307,14 @@ export class PublishScene { return PublishScene.sendResult(true); if (newDownloadState) { - LOG.info(`PublishScene.handleSceneUpdates generating downloads for scene ${idScene} (skipping)`, LOG.LS.eGQL); + LOG.info(`PublishScene.handleSceneUpdates generating downloads for scene ${idScene}`, LOG.LS.eGQL); // Generate downloads const workflowEngine: WF.IWorkflowEngine | null = await WF.WorkflowFactory.getInstance(); if (!workflowEngine) return PublishScene.sendResult(false, `Unable to fetch workflow engine for download generation for scene ${idScene}`); - // HACK: temporarily skip generate downloads while development on that wraps up - // workflowEngine.generateSceneDownloads(idScene, { idUserInitiator: idUser }); // don't await + // trigger the workflow/recipe + workflowEngine.generateSceneDownloads(idScene, { idUserInitiator: _idUser }); // don't await return { success: true, downloadsGenerated: true, downloadsRemoved: false }; } else { // Remove downloads LOG.info(`PublishScene.handleSceneUpdates removing downloads for scene ${idScene}`, LOG.LS.eGQL); @@ -343,6 +347,7 @@ export class PublishScene { } private async collectAssets(ePublishedStateIntended?: COMMON.ePublishedState): Promise { + // LOG.info(`>>> collectAssets.DownloadMSXMap: ${H.Helpers.JSONStringify(this.DownloadMSXMap)}`,LOG.LS.eDEBUG); if (!this.DownloadMSXMap) return false; this.assetVersions = await DBAPI.AssetVersion.fetchLatestFromSystemObject(this.idSystemObject); @@ -416,6 +421,8 @@ export class PublishScene { } } } + + // LOG.info(`>>> collectAssets.SAC: ${H.Helpers.JSONStringify(this.SacList)}`,LOG.LS.eDEBUG); return true; } @@ -487,6 +494,7 @@ export class PublishScene { this.resourcesHotFolder = path.join(Config.collection.edan.resourcesHotFolder, this.scene.EdanUUID!); // eslint-disable-line @typescript-eslint/no-non-null-assertion for (const SAC of this.SacList.values()) { + // LOG.info(`>>> stageDownloads.SAC: ${H.Helpers.JSONStringify(SAC)}`,LOG.LS.eDEBUG); if (!SAC.model && !SAC.metadataSet) // SAC is not a attachment, skip it continue; @@ -692,7 +700,8 @@ export class PublishScene { case '.usdz': FILE_TYPE = 'usdz'; break; } - switch (SAC.modelSceneXref.Usage?.replace('Download ', '').toLowerCase()) { + // handle download types + switch (SAC.modelSceneXref.Usage?.replace('Download:', '').toLowerCase()) { case undefined: case 'webassetglblowuncompressed': category = 'Low resolution'; MODEL_FILE_TYPE = 'glb'; break; case 'webassetglbarcompressed': category = 'Low resolution'; MODEL_FILE_TYPE = 'glb'; DRACO_COMPRESSED = true; break; diff --git a/server/db/api/ModelSceneXref.ts b/server/db/api/ModelSceneXref.ts index 49d5ba851..f6e6c37b4 100644 --- a/server/db/api/ModelSceneXref.ts +++ b/server/db/api/ModelSceneXref.ts @@ -87,6 +87,7 @@ export class ModelSceneXref extends DBC.DBObject implements } public computeModelAutomationTag(): string { + // LOG.info(`>>> computeModelAutomationTag for ${this.Name} (${this.Usage}|${this.Quality}|${this.UVResolution})`,LOG.LS.eDEBUG); return `scene-${this.Usage}-${this.Quality}-${this.UVResolution}`; } diff --git a/server/db/api/Scene.ts b/server/db/api/Scene.ts index 2ac8b90f8..56f9bc610 100644 --- a/server/db/api/Scene.ts +++ b/server/db/api/Scene.ts @@ -172,6 +172,7 @@ export class Scene extends DBC.DBObject implements SceneBase, SystemO /** fetches scenes which are children of the specified idModelParent */ static async fetchChildrenScenes(idModelParent: number): Promise { + // LOG.info(`>>>> idModelParent: ${idModelParent}`,LOG.LS.eDEBUG); if (!idModelParent) return null; try { diff --git a/server/job/impl/Cook/JobCook.ts b/server/job/impl/Cook/JobCook.ts index e2783222c..089bb8729 100644 --- a/server/job/impl/Cook/JobCook.ts +++ b/server/job/impl/Cook/JobCook.ts @@ -433,7 +433,7 @@ export abstract class JobCook extends JobPackrat { // TODO: more robust support with alt type // const stat = await webdavClient.stat(destination); // const fileSize = (stat as FileStat).size; - // console.log(`>>>> fetchFile file size: ${fileSize} | ${destination}`); + // LOG.info(`>>>> fetchFile file size: ${fileSize} | ${destination}`,LOG.LS.eDEBUG); // if(fileSize <= 0) // throw new Error(`destination file doesn't exist or is empty. (${fileSize} bytes | ${destination})`); diff --git a/server/job/impl/Cook/JobCookSIGenerateDownloads.ts b/server/job/impl/Cook/JobCookSIGenerateDownloads.ts index 4ec3c993b..3cfada57d 100644 --- a/server/job/impl/Cook/JobCookSIGenerateDownloads.ts +++ b/server/job/impl/Cook/JobCookSIGenerateDownloads.ts @@ -16,7 +16,23 @@ import { PublishScene } from '../../../collections/impl/PublishScene'; import { ASL, LocalStore } from '../../../utils/localStore'; import { RouteBuilder, eHrefMode } from '../../../http/routes/routeBuilder'; +// scene speecific +import { SvxReader } from '../../../utils/parser'; +import { JobCookSIVoyagerSceneParameterHelper } from './JobCookSIVoyagerScene'; + +// system specific import * as path from 'path'; +import { Readable } from 'stream'; + +type FileProcessItem = { + downloadType: string; + fileType: string; + fileName: string; + success: boolean; + error?: string; + RSR?: STORE.ReadStreamResult; + data?: any; +}; export class JobCookSIGenerateDownloadsParameters { constructor(idScene: number | undefined, @@ -25,7 +41,11 @@ export class JobCookSIGenerateDownloadsParameters { svxFile: string, sourceDiffuseMapFile: string | undefined = undefined, sourceMTLFile: string | undefined = undefined, - outputFileBaseName: string | undefined = undefined) { + outputFileBaseName: string | undefined = undefined, + units: string | undefined = undefined, + metaDataFile: string | undefined = undefined, + parameterHelper: JobCookSIVoyagerSceneParameterHelper | undefined = undefined) { + this.idScene = idScene; this.idModel = idModel; this.sourceMeshFile = path.basename(sourceMeshFile); @@ -33,7 +53,11 @@ export class JobCookSIGenerateDownloadsParameters { this.sourceDiffuseMapFile = sourceDiffuseMapFile ? path.basename(sourceDiffuseMapFile) : undefined; this.sourceMTLFile = sourceMTLFile ? path.basename(sourceMTLFile) : undefined; this.outputFileBaseName = outputFileBaseName ? path.basename(outputFileBaseName) : undefined; + this.units = units ? units : undefined; + this.metaDataFile = metaDataFile ? metaDataFile : undefined; + this.parameterHelper = parameterHelper ? parameterHelper : undefined; } + idScene: number | undefined; idModel: number | undefined; sourceMeshFile: string; // required @@ -41,6 +65,11 @@ export class JobCookSIGenerateDownloadsParameters { sourceMTLFile?: string | undefined; sourceDiffuseMapFile?: string | undefined; outputFileBaseName?: string | undefined; + units: string | undefined; + metaDataFile?: string | undefined; + + // extract and remove these from the parameter object before passing to Cook + parameterHelper?: JobCookSIVoyagerSceneParameterHelper; } export class JobCookSIGenerateDownloadsOutput { @@ -68,26 +97,62 @@ export class JobCookSIGenerateDownloads extends JobCook { + + // grab our Packrat Scene from the database. idScene is a parameter passed in when creating this object const sceneSource: DBAPI.Scene | null = this.idScene ? await DBAPI.Scene.fetch(this.idScene) : null; if (!sceneSource) return this.logError(`createSystemObjects unable to compute source scene from id ${this.idScene}`); + // grab the scene's SystemObject. const sceneSystemObject: DBAPI.SystemObject | null = await sceneSource.fetchSystemObject(); if (!sceneSystemObject) return this.logError(`createSystemObjects unable to fetch scene system object from ${JSON.stringify(sceneSource, H.Helpers.saferStringify)}`); + // grab our master model's source info const modelSource: DBAPI.Model | null = this.idModel ? await DBAPI.Model.fetch(this.idModel) : null; if (!modelSource) return this.logError(`createSystemObjects unable to compute source model from id ${this.idModel}`); - const MSXSources: DBAPI.ModelSceneXref[] | null = await DBAPI.ModelSceneXref.fetchFromModelAndScene(modelSource.idModel, sceneSource.idScene); - if (!MSXSources) - return this.logError(`createSystemObjects unable to compute ModelSceneXrefs from idModel ${this.idModel}, idScene ${this.idScene}`); - - const vModelGeometryFile: DBAPI.Vocabulary | undefined = await this.computeVocabModelGeometryFile(); - if (!vModelGeometryFile) - return this.logError('createSystemObjects unable to calculate vocabulary needed to ingest generated downloads'); - - // Retrieve generated files - let downloadMap: Map = new Map(); // map from download type -> download filename + // Retrieve generated files from Cook. Cook may return multiple types of objects (models, scenes, etc.) + // map from download type -> download filename + let downloadMap: Map = new Map(); try { downloadMap = await JobCookSIGenerateDownloadsOutput.extractDownloads(JSON.parse(this._dbJobRun.Output || '')); } catch (err) { @@ -140,170 +202,425 @@ export class JobCookSIGenerateDownloads extends JobCook = []; + // record updated asset -> asset version, for use in rolling a new SystemObjectVersion for the scene const assetVersionOverrideMap: Map = new Map(); const LS: LocalStore = await ASL.getOrCreateStore(); const idUserCreator: number = LS?.idUser ?? 0; + // cycle through retrieved downloads, processing them + LOG.info(`JobCookSIGenerateDownloads processing ${downloadMap.size} generated downloads (idScene:${sceneSource.idScene})`,LOG.LS.eJOB); for (const [downloadType, downloadFile] of downloadMap) { + + // fetch the file from WebDav shared space with Cook + // TODO: just check if file exists vs. actually opening stream LOG.info(`JobCookSIGenerateDownloads processing download ${downloadFile} of type ${downloadType}`, LOG.LS.eJOB); const RSR: STORE.ReadStreamResult = await this.fetchFile(downloadFile); if (!RSR.success || !RSR.readStream) return this.logError(`createSystemObjects unable to fetch stream for generated download ${downloadFile}: ${RSR.error}`); - // look for existing model, a child object of modelSource, with the matching downloadType - let model: DBAPI.Model | null = await this.findMatchingModel(modelSource, downloadType); - let modelSO: DBAPI.SystemObject | null = null; - let Asset: DBAPI.Asset | null = null; - - if (model) { - // if we already have a model, look for the asset that we are likely updating: - modelSO = await model.fetchSystemObject(); - if (modelSO) { - const modelAssets: DBAPI.Asset[] | null = await DBAPI.Asset.fetchFromSystemObject(modelSO.idSystemObject); - if (modelAssets) { - for (const modelAsset of modelAssets) { - if (modelAsset.FileName === downloadFile) { - Asset = modelAsset; - break; - } - } - } else - LOG.error(`JobCookSIGenerateDownloads.createSystemObjects unable to fetch assets for model systemobject ${JSON.stringify(modelSO, H.Helpers.saferStringify)}`, LOG.LS.eJOB); - } else - LOG.error(`JobCookSIGenerateDownloads.createSystemObjects unable to fetch system object for ${JSON.stringify(modelSource, H.Helpers.saferStringify)}`, LOG.LS.eJOB); - } else { - // create Model (for each download generated) - model = await this.createModel(downloadFile, downloadType, modelSource); - if (!await model.create()) - return this.logError(`createSystemObjects unable to create model ${JSON.stringify(model, H.Helpers.saferStringify)}`); - - // link each model as derived from both the scene and the master model - const SOX1: DBAPI.SystemObjectXref | null = await DBAPI.SystemObjectXref.wireObjectsIfNeeded(sceneSource, model); - if (!SOX1) - return this.logError(`createSystemObjects unable to wire Scene ${JSON.stringify(sceneSource, H.Helpers.saferStringify)} and Model ${JSON.stringify(model, H.Helpers.saferStringify)} together`); - - const SOX2: DBAPI.SystemObjectXref | null = await DBAPI.SystemObjectXref.wireObjectsIfNeeded(modelSource, model); - if (!SOX2) - return this.logError(`createSystemObjects unable to wire Model Source ${JSON.stringify(modelSource, H.Helpers.saferStringify)} and Model ${JSON.stringify(model, H.Helpers.saferStringify)} together`); + // build our item for tracking the file and push into our queue + const currentItemResult: FileProcessItem = { + downloadType, + fileType: this.getFileTypeFromDownloadType(downloadType), + fileName: downloadFile, + success: true }; + + // scenes get set aside for later processing since it must be done after models + // while models are processed on the spot. + switch(currentItemResult.fileType) { + // if we're a scene file then grab actual data and store it + case 'scene': { + // parse our scene file + const svx: SvxReader = new SvxReader(); + const res: H.IOResults = await svx.loadFromStream(RSR.readStream); + if (!res.success || !svx.SvxExtraction) + return this.logError(`JobCookSIGenerateDownloads.createSystemObjects unable to parse scene file ${downloadFile}: ${res.error}`); + + // store the results. skip additional scenes if any + currentItemResult.data = svx.SvxExtraction; + if(svxSceneFile != null) + this.logError(`JobCookSIGenerateDownloads.createSystemObjects detected multiple scene files in Cook response (idScene: ${sceneSource.idScene} | file: ${downloadFile})`); + svxSceneFile = currentItemResult; + continue; + } + + // models we process in place ingesting into the system and storing the MSX for later use + case 'model': { + const modelProcessingResult = await this.processModelFile(sceneSource,modelSource,currentItemResult,RSR,idUserCreator); + if(!modelProcessingResult) { + // store our info with an error, add to our report, and break out of loop (no point checking other files) + modelFiles.push({ ...currentItemResult, success: false, error: `error processing model '${downloadFile}'` }); + await this.appendToReportAndLog(`JobCookSIGenerateDownloads failed model file: ${downloadFile}`); + break; + } + + // if we have success, extract our overrides and ModelScenXref so we can link this up to the right resources + const { assetVersionOverrideMap: assetVersionOverrides, MSX } = modelProcessingResult; + currentItemResult.data = { assetVersionOverrides, MSX }; + modelFiles.push(currentItemResult); + // LOG.info(`>>> model file results: ${H.Helpers.JSONStringify(currentItemResult)}`,LOG.LS.eDEBUG); + + // need to finish all models and the scene before linking up to the Scene so they connect + // to the correct versions. So we combine our asset overrides for later handling. + // LOG.info(`>>> model (${downloadFile}) assetVersionOverrideMap: ${H.Helpers.JSONStringify(assetVersionOverrideMap)}`,LOG.LS.eDEBUG); + assetVersionOverrides.forEach((value, key) => { assetVersionOverrideMap.set(key, value); }); + + } break; } + } - // ingest model assets, and associate them with the correct model - const ISI: STORE.IngestStreamOrFileInput = { - readStream: RSR.readStream, - localFilePath: null, - asset: Asset, - FileName: downloadFile, - FilePath: '', - idAssetGroup: 0, - idVAssetType: vModelGeometryFile.idVocabulary, - allowZipCracking: false, - idUserCreator, - SOBased: model, - Comment: 'Created by Cook si-generate-downloads', - doNotUpdateParentVersion: true // we create a new system object version below - }; - - LOG.info(`JobCookSIGenerateDownloads.createSystemObjects ingesting ${downloadFile}`, LOG.LS.eJOB); - const IAR: STORE.IngestAssetResult = await STORE.AssetStorageAdapter.ingestStreamOrFile(ISI); - if (!IAR.success) { - await this.appendToReportAndLog(`${this.name()} unable to ingest generated download model ${downloadFile}: ${IAR.error}`, true); - continue; - // return { success: false, error: ISR.error }; - } - if (IAR.assetVersions && IAR.assetVersions.length > 1) - LOG.error(`JobCookSIGenerateDownloads.createSystemObjects created multiple asset versions, unexpectedly, ingesting ${downloadFile}`, LOG.LS.eJOB); - - let idSystemObjectModel: number | null = modelSO ? modelSO.idSystemObject : null; - if (!idSystemObjectModel) { - const SOI: DBAPI.SystemObjectInfo | undefined = await CACHE.SystemObjectCache.getSystemFromModel(model); - idSystemObjectModel = SOI ? SOI.idSystemObject : null; - } - const assetVersion: DBAPI.AssetVersion | null = (IAR.assetVersions && IAR.assetVersions.length > 0) ? IAR.assetVersions[0] : null; - const pathObject: string = idSystemObjectModel ? RouteBuilder.RepositoryDetails(idSystemObjectModel, eHrefMode.ePrependClientURL) : ''; - const hrefObject: string = H.Helpers.computeHref(pathObject, model.Name); - const pathDownload: string = assetVersion ? RouteBuilder.DownloadAssetVersion(assetVersion.idAssetVersion, eHrefMode.ePrependServerURL) : ''; - const hrefDownload: string = pathDownload ? ': ' + H.Helpers.computeHref(pathDownload, 'Download') : ''; - await this.appendToReportAndLog(`${this.name()} ingested generated download model ${hrefObject}${hrefDownload}`); - - if (assetVersion) - assetVersionOverrideMap.set(assetVersion.idAsset, assetVersion.idAssetVersion); - - // create/update ModelSceneXref for each download generated ... do after ingest so that we have the storage size available - const FileSize: bigint | null = assetVersion ? assetVersion.StorageSize : null; - const MSXSource: DBAPI.ModelSceneXref | null = MSXSources.length > 0 ? MSXSources[0] : null; - - const MSXs: DBAPI.ModelSceneXref[] | null = await DBAPI.ModelSceneXref.fetchFromModelSceneAndName(model.idModel, sceneSource.idScene, model.Name); - let MSX: DBAPI.ModelSceneXref | null = (MSXs && MSXs.length > 0) ? MSXs[0] : null; - let MSXResult: boolean = false; - if (MSX) { - MSX.FileSize = FileSize; - MSXResult = await MSX.update(); - } else { - MSX = new DBAPI.ModelSceneXref({ - idModelSceneXref: 0, - idModel: model.idModel, - idScene: sceneSource.idScene, - Name: model.Name, - Usage: `Download ${downloadType}`, - Quality: null, - FileSize, - UVResolution: null, - BoundingBoxP1X: MSXSource?.BoundingBoxP1X ?? null, - BoundingBoxP1Y: MSXSource?.BoundingBoxP1Y ?? null, - BoundingBoxP1Z: MSXSource?.BoundingBoxP1Z ?? null, - BoundingBoxP2X: MSXSource?.BoundingBoxP2X ?? null, - BoundingBoxP2Y: MSXSource?.BoundingBoxP2Y ?? null, - BoundingBoxP2Z: MSXSource?.BoundingBoxP2Z ?? null, - TS0: MSXSource?.TS0 ?? null, - TS1: MSXSource?.TS1 ?? null, - TS2: MSXSource?.TS2 ?? null, - R0: MSXSource?.R0 ?? null, - R1: MSXSource?.R1 ?? null, - R2: MSXSource?.R2 ?? null, - R3: MSXSource?.R3 ?? null, - S0: MSXSource?.S0 ?? null, - S1: MSXSource?.S1 ?? null, - S2: MSXSource?.S2 ?? null, - }); - MSXResult = await MSX.create(); - } + // if we don't have a scene file, then we bail + // TODO: cleanup ingested models on failure + if(!svxSceneFile || svxSceneFile.success===false) + return await this.appendToReportAndLog(`JobCookSIGenerateDownloads failed processing of returned download files (${H.Helpers.JSONStringify(svxSceneFile)})`,true); + + // if we had errors processing models, then we bail + // TODO: rollback to previous versions of all models that were successful on failure (i.e. cleanup) + const hasValidModels: boolean = !modelFiles.some(obj => obj.success === false); + if( hasValidModels === false) { + const errors = `["${ + modelFiles.filter(obj => obj.success === false).map(obj => obj.error).join('","') + }"]`; + return await this.appendToReportAndLog(`JobCookSIGenerateDownloads failed processing of returned download model files (name: ${sceneSource.Name} | idScene: ${sceneSource.idScene} | errors: ${errors})`,true); + } - if (!MSXResult) - return this.logError(`createSystemObjects unable to create/update ModelSceneXref ${JSON.stringify(MSX, H.Helpers.saferStringify)}`); - - // run si-packrat-inspect on this model - if (idSystemObjectModel) { - const results: H.IOResults = await WorkflowUtil.computeModelMetrics(model.Name, model.idModel, idSystemObjectModel, undefined, undefined, - undefined, undefined /* FIXME */, idUserCreator); - if (results.success) - this.appendToReportAndLog(`JobCookSIGenerateDownloads extracted model metrics for ${model.Name}`); - else if (results.error) - this.logError(results.error); - } + // update all ModelSceneXrefs transforms for generated models in the svx scene with what's in the + // scene to ensure the DB matches. this is done to ensure a match when comparing downstream + // Cook/Voyager is assumed to be source of truth. + for(const model of modelFiles) { + + // find modelDetails from scene + const svxModelDetails = svxSceneFile.data.modelDetails.find(svxModel => svxModel.Name === model.fileName); + // LOG.info(`>>> found matching model for MSX update (${model.fileName}): ${H.Helpers.JSONStringify(svxModelDetails)}`, LOG.LS.eDEBUG); + + if(svxModelDetails) { + + // make sure we have a MSX to work with + const MSX: DBAPI.ModelSceneXref = model.data?.MSX ?? null; + if(!MSX) { + LOG.error(`JobCookSIGenerateDownloads cannot update MSX for model (${model.fileName}). invalid input. no MSX found`,LOG.LS.eJOB); + continue; + } + + // overwrite what is currently stored in the transform. assuming Cook + // is the source of truth. + MSX.BoundingBoxP1X = svxModelDetails.BoundingBoxP1X; + MSX.BoundingBoxP1Y = svxModelDetails.BoundingBoxP1Y; + MSX.BoundingBoxP1Z = svxModelDetails.BoundingBoxP1Z; + MSX.BoundingBoxP2X = svxModelDetails.BoundingBoxP2X; + MSX.BoundingBoxP2Y = svxModelDetails.BoundingBoxP2Y; + MSX.BoundingBoxP2Z = svxModelDetails.BoundingBoxP2Z; + MSX.TS0 = svxModelDetails.TS0; + MSX.TS1 = svxModelDetails.TS1; + MSX.TS2 = svxModelDetails.TS2; + MSX.R0 = svxModelDetails.R0; + MSX.R1 = svxModelDetails.R1; + MSX.R2 = svxModelDetails.R2; + MSX.R3 = svxModelDetails.R3; + MSX.S0 = svxModelDetails.S0; + MSX.S1 = svxModelDetails.S1; + MSX.S2 = svxModelDetails.S2; + + // TODO: check if other properties like usage differ + const MSXResult: boolean = await MSX.update(); + if(!MSXResult) + LOG.error(`JobCookSIGenerateDownloads cannot update MSX for model (${model.data.MSX.Name})`,LOG.LS.eJOB); + } else + LOG.info(`JobCookSIGenerateDownloads skipping generated download model (${model.fileName}). assuming not referenced by scene (idScene:${sceneSource.idScene}).`,LOG.LS.eJOB); } + // process the scene file, ingesting it + const result = await this.processSceneFile(modelSource, svxSceneFile, idUserCreator); + if(result.success===false) + await this.appendToReportAndLog(`JobCookSIGenerateDownloads failed to process svx scene (${result.error})`); + else + await this.appendToReportAndLog(`JobCookSIGenerateDownloads successful processing of svx scene: ${svxSceneFile.fileName}`); + + // link the models and assets to this Packrat Scene // Clone scene's systemObjectVersion, using the assetVersionOverrideMap populated with new/updated assets const SOV: DBAPI.SystemObjectVersion | null = await DBAPI.SystemObjectVersion.cloneObjectAndXrefs(sceneSystemObject.idSystemObject, null, 'Created by Cook si-generate-downloads', assetVersionOverrideMap); if (!SOV) - return this.logError(`createSystemObjects unable to clone SystemObjectVersion for ${JSON.stringify(sceneSystemObject, H.Helpers.saferStringify)}`); + return this.logError(`JobCookSIGenerateDownloads.createSystemObjects unable to clone SystemObjectVersion for ${JSON.stringify(sceneSystemObject, H.Helpers.saferStringify)}`); // Add scene asset metadata for attachments // LOG.info('JobCookSIGenerateDownloads.createSystemObjects calling PublishScene.extractSceneMetadata', LOG.LS.eJOB); const metadataResult: H.IOResults = await PublishScene.extractSceneMetadata(sceneSystemObject.idSystemObject, LS?.idUser ?? null); if (!metadataResult.success) - LOG.error(`JobCookSIGenerateDownloads.createSystemObjects unable to persist scene attachment metadata: ${metadataResult.error}`, LOG.LS.eJOB); + return this.logError(`JobCookSIGenerateDownloads.createSystemObjects unable to persist scene attachment metadata: ${metadataResult.error}`); + await this.appendToReportAndLog(`JobCookSIGenerateDownloads successful generation of downloads: ${svxSceneFile.fileName}`); return { success: true }; } protected async getParameters(): Promise { - return this.parameters; + const params: JobCookSIGenerateDownloadsParameters = { ...this.parameters }; + delete params.idModel; + delete params.idScene; + delete params.units; + delete params.metaDataFile; + delete params.parameterHelper; + return params; + } + + private async computeVocabDownload(): Promise { + if (!JobCookSIGenerateDownloads.vocabDownload) { + JobCookSIGenerateDownloads.vocabDownload = await CACHE.VocabularyCache.vocabularyByEnum(COMMON.eVocabularyID.eModelPurposeDownload); + if (!JobCookSIGenerateDownloads.vocabDownload) + LOG.error('JobCookSIGenerateDownloads unable to fetch vocabulary for Download Model Purpose', LOG.LS.eGQL); + } + return JobCookSIGenerateDownloads.vocabDownload; } - public static computeModelAutomationTag(downloadType: string): string { - return `download-${downloadType}`; + // private validFileExtensions = ['usdz','obj','fbx','ply','glb', 'svx', 'zip']; + private validDownloadTypes = [ + 'objZipFull', + 'objZipLow', + 'gltfZipLow', + 'usdz', + 'webAssetGlbARCompressed', + 'webAssetGlbLowUncompressed', + 'scene_document' + ]; + + private getFileTypeFromDownloadType(downloadType: string) { + + if(this.validDownloadTypes.includes(downloadType)==false) { return ''; } + + switch(downloadType) { + case 'objZipFull': + case 'objZipLow': + case 'gltfZipLow': + case 'usdz': + case 'webAssetGlbARCompressed': + case 'webAssetGlbLowUncompressed': + return 'model'; + + case 'scene_document': + return 'scene'; + + default: + return ''; + } + } + + private logError(errorMessage: string): H.IOResults { + // const error: string = `JobCookSIGenerateDownloads.${errorMessage}`; + LOG.error(errorMessage, LOG.LS.eJOB); + return { success: false, error: errorMessage }; + } + + //------------------------------------------------------------------------------ + // MODEL + //------------------------------------------------------------------------------ + // #region model + private async processModelFile(sceneSource: DBAPI.Scene, modelSource: DBAPI.Model, fileItem: FileProcessItem, RSR: STORE.ReadStreamResult, idUserCreator: number ): Promise<{ assetVersionOverrideMap: Map< number, number>, MSX: DBAPI.ModelSceneXref } | null> { //Promise { + + // verify input + if(!sceneSource || fileItem.fileName.length<=0 || idUserCreator < 0 || RSR == null) { + this.logError(`JobCookSIGenerateDownloads.processModelFile invalid parameters passed for ${fileItem.fileName}`); + return null; + } + + // grab our ModelSceneXref from the database for the master model <> scene. + // This is used for defaults linking the model to a Scene in the even that one doesn't already exist + const MSXSources: DBAPI.ModelSceneXref[] | null = await DBAPI.ModelSceneXref.fetchFromModelAndScene(modelSource.idModel, sceneSource.idScene); + if (!MSXSources) { + this.logError(`JobCookSIGenerateDownloads.processModelFile createSystemObjects unable to compute ModelSceneXrefs from idModel ${this.idModel}, idScene ${this.idScene}`); + return null; + } + const MSXSource: DBAPI.ModelSceneXref | null = MSXSources.length > 0 ? MSXSources[0] : null; + // LOG.info(`>>> processModelFile ModelSceneXref.MSXSource: ${H.Helpers.JSONStringify(MSXSource)}`,LOG.LS.eDEBUG); + + // determine the vocabulary needed for ingestion. vocabulary is used for... + const vModelGeometryFile: DBAPI.Vocabulary | undefined = await this.computeVocabModelGeometryFile(); + if (!vModelGeometryFile) { + this.logError('JobCookSIGenerateDownloads.processModelFile createSystemObjects unable to calculate vocabulary needed to ingest generated downloads'); + return null; + } + + // look for existing model, a child object of the master model (modelSource), with the matching downloadType + let model: DBAPI.Model | null = await this.findMatchingModelFromModel(modelSource, fileItem.downloadType); + let modelSO: DBAPI.SystemObject | null = null; + let Asset: DBAPI.Asset | null = null; + + // if we found a matching model then we update it. otherwise, we create a new one + if (model) { + // if we already have a model, look for the asset that we are likely updating: + modelSO = await model.fetchSystemObject(); + if (modelSO) { + const modelAssets: DBAPI.Asset[] | null = await DBAPI.Asset.fetchFromSystemObject(modelSO.idSystemObject); + if (modelAssets) { + for (const modelAsset of modelAssets) { + if (modelAsset.FileName === fileItem.fileName) { + Asset = modelAsset; + break; + } + } + } else { + const name = JSON.stringify(modelSO, H.Helpers.saferStringify); + this.logError(`unable to fetch assets for model system object ${name}`); + return null; + } + } else { + const name = JSON.stringify(modelSource, H.Helpers.saferStringify); + this.logError(`unable to fetch system object ${name}`); + return null; + } + } else { + // create Model (for each download generated) + model = await this.createModel(fileItem.fileName, fileItem.downloadType, modelSource); + if (!await model.create()) { + const name = JSON.stringify(model, H.Helpers.saferStringify); + this.logError(`JobCookSIGenerateDownloads.processModelFile unable to create model: ${name}`); + return null; + } + + // link model as derived from the scene + const SOX1: DBAPI.SystemObjectXref | null = await DBAPI.SystemObjectXref.wireObjectsIfNeeded(sceneSource, model); + if (!SOX1) { + this.logError(`JobCookSIGenerateDownloads.processModelFile unable to wire model to scene: ${JSON.stringify(sceneSource, H.Helpers.saferStringify)} and Model ${JSON.stringify(model, H.Helpers.saferStringify)} together`); + return null; + } + + // link model as derived from the master model + const SOX2: DBAPI.SystemObjectXref | null = await DBAPI.SystemObjectXref.wireObjectsIfNeeded(modelSource, model); + if (!SOX2) { + this.logError(`JobCookSIGenerateDownloads.processModelFile unable to wire model to master model source: ${JSON.stringify(modelSource, H.Helpers.saferStringify)} and Model ${JSON.stringify(model, H.Helpers.saferStringify)} together`); + return null; + } + } + + // ingest model assets, and associate them with the correct model + const ISI: STORE.IngestStreamOrFileInput = { + readStream: RSR.readStream, + localFilePath: null, + asset: Asset, + FileName: fileItem.fileName, + FilePath: '', + idAssetGroup: 0, + idVAssetType: vModelGeometryFile.idVocabulary, + allowZipCracking: false, + idUserCreator, + SOBased: model, + Comment: 'Created by Cook si-generate-downloads', + doNotUpdateParentVersion: true // we create a new system object version below + }; + + // ingest model... + LOG.info(`JobCookSIGenerateDownloads.processModelFile ingesting ${fileItem.fileName}`, LOG.LS.eJOB); + const IAR: STORE.IngestAssetResult = await STORE.AssetStorageAdapter.ingestStreamOrFile(ISI); + if (!IAR.success) { + await this.appendToReportAndLog(`${this.name()} unable to ingest generated download model ${fileItem.fileName}: ${IAR.error}`, true); + return null; + } + + // check for multiple asset versions + // Q: what problem(s) does this cause? + if (IAR.assetVersions && IAR.assetVersions.length > 1) + this.logError(`JobCookSIGenerateDownloads.processModelFile created multiple asset versions, unexpectedly, ingesting ${fileItem.fileName}`); + + // if no SysObj exists for this model then we check our cache for one + let idSystemObjectModel: number | null = modelSO ? modelSO.idSystemObject : null; + if (!idSystemObjectModel) { + const SOI: DBAPI.SystemObjectInfo | undefined = await CACHE.SystemObjectCache.getSystemFromModel(model); + idSystemObjectModel = SOI ? SOI.idSystemObject : null; + } + + // build out our report details and add + const assetVersion: DBAPI.AssetVersion | null = (IAR.assetVersions && IAR.assetVersions.length > 0) ? IAR.assetVersions[0] : null; + const pathObject: string = idSystemObjectModel ? RouteBuilder.RepositoryDetails(idSystemObjectModel, eHrefMode.ePrependClientURL) : ''; + const hrefObject: string = H.Helpers.computeHref(pathObject, model.Name); + const pathDownload: string = assetVersion ? RouteBuilder.DownloadAssetVersion(assetVersion.idAssetVersion, eHrefMode.ePrependServerURL) : ''; + const hrefDownload: string = pathDownload ? ': ' + H.Helpers.computeHref(pathDownload, 'Download') : ''; + await this.appendToReportAndLog(`${this.name()} ingested generated download model ${hrefObject}${hrefDownload}`); + + // currently not passed in. how is this used? + const assetVersionOverrideMap: Map< number, number> = new Map(); + if (assetVersion) + assetVersionOverrideMap.set(assetVersion.idAsset, assetVersion.idAssetVersion); + + // create/update ModelSceneXref for each download generated ... do after ingest so that we have the storage size available + const FileSize: bigint | null = assetVersion ? assetVersion.StorageSize : null; + + // get our ModelSceneXref from the model id, scene id, and objects name in the DB + const MSXs: DBAPI.ModelSceneXref[] | null = await DBAPI.ModelSceneXref.fetchFromModelSceneAndName(model.idModel, sceneSource.idScene, model.Name); + // LOG.info(`>>> processingModelFile current file (${model.Name}) ModelSceneXrefs: ${H.Helpers.JSONStringify(MSXs)}`,LOG.LS.eDEBUG); + + // if we didn't get one then we create one. otherwise we use the first one + let MSX: DBAPI.ModelSceneXref | null = (MSXs && MSXs.length > 0) ? MSXs[0] : null; + let MSXResult: boolean = false; + if (MSX) { + // if we have a record already just update the filesize + MSX.FileSize = FileSize; + + // update our DB record + MSXResult = await MSX.update(); + } else { + // if we don't have a record, create it + const { usage, quality, uvResolution } = JobCookSIGenerateDownloads.computeModelPropertiesFromDownloadType(fileItem.downloadType); + MSX = new DBAPI.ModelSceneXref({ + idModelSceneXref: 0, + idModel: model.idModel, + idScene: sceneSource.idScene, + Name: model.Name, + Usage: usage ?? null, + Quality: quality ?? null, // null + FileSize, + UVResolution: uvResolution ?? null, // null + + // transform + BoundingBoxP1X: MSXSource?.BoundingBoxP1X ?? null, + BoundingBoxP1Y: MSXSource?.BoundingBoxP1Y ?? null, + BoundingBoxP1Z: MSXSource?.BoundingBoxP1Z ?? null, + BoundingBoxP2X: MSXSource?.BoundingBoxP2X ?? null, + BoundingBoxP2Y: MSXSource?.BoundingBoxP2Y ?? null, + BoundingBoxP2Z: MSXSource?.BoundingBoxP2Z ?? null, + TS0: MSXSource?.TS0 ?? null, + TS1: MSXSource?.TS1 ?? null, + TS2: MSXSource?.TS2 ?? null, + R0: MSXSource?.R0 ?? null, + R1: MSXSource?.R1 ?? null, + R2: MSXSource?.R2 ?? null, + R3: MSXSource?.R3 ?? null, + S0: MSXSource?.S0 ?? null, + S1: MSXSource?.S1 ?? null, + S2: MSXSource?.S2 ?? null, + }); + MSXResult = await MSX.create(); + } + + // TODO: if failed cleanup prior ingestion? + if (MSXResult === false) { + this.logError(`JobCookSIGenerateDownloads.processModelFile cannot create/update ModelSceneXref ${JSON.stringify(MSX, H.Helpers.saferStringify)}`); + return null; + } + + // run si-packrat-inspect on this model to get the metrics and make sure it's valid + console.time(`${model.Name} inspection`); + if (idSystemObjectModel) { + const results: H.IOResults = await WorkflowUtil.computeModelMetrics(model.Name, model.idModel, idSystemObjectModel, undefined, undefined, + undefined, undefined /* FIXME */, idUserCreator); + if (results.success) + await this.appendToReportAndLog(`JobCookSIGenerateDownloads extracted model metrics for ${model.Name}`); + else if (results.error) { + // TODO: cleanup ingestion + this.logError(`JobCookSIGenerateDownloads.processModelFile failed inspecting the model: ${model.Name} (${results.error})`); + return null; + } + } + console.timeEnd(`${model.Name} inspection`); + + return { assetVersionOverrideMap, MSX }; } private async createModel(Name: string, downloadType: string, modelSource: DBAPI.Model): Promise { @@ -320,18 +637,64 @@ export class JobCookSIGenerateDownloads extends JobCook { - if (!JobCookSIGenerateDownloads.vocabDownload) { - JobCookSIGenerateDownloads.vocabDownload = await CACHE.VocabularyCache.vocabularyByEnum(COMMON.eVocabularyID.eModelPurposeDownload); - if (!JobCookSIGenerateDownloads.vocabDownload) - LOG.error('JobCookSIGenerateDownloads unable to fetch vocabulary for Download Model Purpose', LOG.LS.eGQL); + public static computeModelPropertiesFromDownloadType(downloadType: string): { usage: string|undefined, quality: string|undefined, uvResolution: number|undefined } { + + // NOTE: caution if return types from Cook change + switch(downloadType) { + + case 'objZipFull': + return { usage: 'Download:'+downloadType, quality: 'Highest', uvResolution: 0 }; + + case 'objZipLow': + case 'gltfZipLow': + case 'webAssetGlbLowUncompressed': + return { usage: 'Download:'+downloadType, quality: 'Low', uvResolution: 4096 }; + + // refers to: -100k-2048_std_draco.glb + case 'webAssetGlbARCompressed': + return { usage: 'App3D', quality: 'AR', uvResolution: 2048 }; + + case 'usdz': + return { usage: 'iOSApp3D', quality: 'AR', uvResolution: 2048 }; } - return JobCookSIGenerateDownloads.vocabDownload; + + LOG.error(`JobCookSIGenerateDownloads.computeModelPropertiesFromDownloadType unsupported downloadType: ${downloadType}`,LOG.LS.eJOB); + return { usage: undefined, quality: undefined, uvResolution: undefined }; + } + + public static computeModelAutomationTagFromDownloadType(downloadType: string): string { + + const { usage, quality, uvResolution } = JobCookSIGenerateDownloads.computeModelPropertiesFromDownloadType(downloadType); + if(!usage || !quality || !uvResolution) + return `error-${downloadType}-null-null`; + + switch(downloadType) { + // HACK: need to hardcode these because the model is created outside ModelScreneXref context + // and doesn't have the needed Usage, Quality, and UVResolution details. skipping 'Usage'. + case 'objZipFull': + case 'objZipLow': + case 'gltfZipLow': + case 'webAssetGlbLowUncompressed': + return `download-${downloadType}-${quality}-${uvResolution}`; + + // HACK: hardcoding these as well expecting them to be reassigned/overwritten by ModelSceneXref + // MSX format is: `scene-${this.Usage}-${this.Quality}-${this.UVResolution}` + case 'webAssetGlbARCompressed': + case 'usdz': + return `scene-${usage}-${quality}-${uvResolution}`; + } + + LOG.error(`JobCookSIGenerateDownloads.computeModelAutomationTag unsupported downloadType: ${downloadType}`,LOG.LS.eJOB); + return `unknown-${downloadType}`; } private async computeVocabModelGeometryFile(): Promise { @@ -343,14 +706,304 @@ export class JobCookSIGenerateDownloads extends JobCook { - const matches: DBAPI.Model[] | null = await DBAPI.Model.fetchChildrenModels(modelSource.idModel, null, JobCookSIGenerateDownloads.computeModelAutomationTag(downloadType)); + private async findMatchingModelFromModel(modelSource: DBAPI.Model, downloadType: string): Promise { + const automationTag = JobCookSIGenerateDownloads.computeModelAutomationTagFromDownloadType(downloadType); + const matches: DBAPI.Model[] | null = await DBAPI.Model.fetchChildrenModels(modelSource.idModel, null, automationTag); + + // LOG.info(`>>>> matching existing model with downloaded model (idModel:${modelSource.idModel} | tag:${automationTag})`,LOG.LS.eDEBUG); return matches && matches.length > 0 ? matches[0] : null; } + // #endregion + + //------------------------------------------------------------------------------ + // SCENE + //------------------------------------------------------------------------------ + // #region scene + private async processSceneFile(modelSource: DBAPI.Model, fileItem: FileProcessItem, idUserCreator: number): Promise { + + if (!this.sceneParameterHelper) + return this.logError('JobCookSIGenerateDownloads.processSceneFile called without needed parameters'); + + const svxFile: string = fileItem.fileName; //this.parameters.svxFile ?? 'scene.svx.json'; + const svxData = fileItem.data; + const vScene: DBAPI.Vocabulary | undefined = await this.computeVocabAssetTypeScene(); + const vModel: DBAPI.Vocabulary | undefined = await this.computeVocabAssetTypeModelGeometryFile(); + if (!vScene || !vModel) + return this.logError(`JobCookSIGenerateDownloads.processSceneFile unable to calculate vocabulary needed to ingest scene file ${svxFile}`); + + // // Retrieve svx.json data + // let RSR: STORE.ReadStreamResult = await this.fetchFile(svxFile); + // if (!RSR.success || !RSR.readStream) + // return this.logError(`JobCookSIGenerateDownloads.processSceneFile unable to fetch stream for scene file ${svxFile}: ${RSR.error}`); + + // Parse Scene + // const svx: SvxReader = new SvxReader(); + // const res: H.IOResults = await svx.loadFromStream(RSR.readStream); + // if (!res.success || !svx.SvxExtraction) + // return this.logError(`JobCookSIGenerateDownloads.processSceneFile unable to parse scene file ${svxFile}: ${res.error}`); + + LOG.info(`JobCookSIGenerateDownloads.processSceneFile[${svxFile}] parse scene`, LOG.LS.eJOB); + LOG.info(`JobCookSIGenerateDownloads.processSceneFile fetched scene:${H.Helpers.JSONStringify(svxData)}`, LOG.LS.eJOB); + + // Look for an existing scene, which is a child of the master model (modelSource) + // TODO: what if there are multiple? + const scenes: DBAPI.Scene[] | null = await DBAPI.Scene.fetchChildrenScenes(modelSource.idModel); + if (!scenes) + return this.logError(`JobCookSIGenerateDownloads.processSceneFile unable to fetch children scenes of master model ${modelSource.idModel}`); + + // if we have more than one scene see if there is a clear path to selecting one (i.e. one has EDAN id) + // TODO: investigate why the system sometimes creates additional scenes + if(scenes.length>1) { + LOG.info(`JobCookSIGenerateDownloads.processSceneFile found multiple(${scenes.length}) scenes. pruning...`,LOG.LS.eJOB); + + // Filter the scenes that have EdanUUID and store the ids of removed scenes + const scenesWithEdanUUID: DBAPI.Scene[] = []; + const removedSceneIds: number[] = []; + + for (const scene of scenes) { + if (scene.EdanUUID !== undefined && scene.EdanUUID !== '' && scene.EdanUUID !==null) { + scenesWithEdanUUID.push(scene); + } else { + removedSceneIds.push(scene.idScene); + } + } - private logError(errorMessage: string): H.IOResults { - const error: string = `JobCookSIGenerateDownloads.${errorMessage}`; - LOG.error(error, LOG.LS.eJOB); - return { success: false, error }; + // Check if there is only one scene with EdanUUID + if (scenesWithEdanUUID.length === 1) { + LOG.error(`JobCookSIGenerateDownloads.processSceneFile pruning found scene. Needs cleanup. (modelSource.idModel: ${modelSource.idModel} | omitted idScene: ${removedSceneIds.join(',')})`,LOG.LS.eJOB); + scenes.length = 0; + scenes.push(...scenesWithEdanUUID); + } else { + // If there are more than one, clear the removed IDs as no scene is removed + removedSceneIds.length = 0; + LOG.error(`JobCookSIGenerateDownloads.processSceneFile pruning returned multiple scenes (${scenesWithEdanUUID.length}). Needs cleanup. (modelSource.idModel: ${modelSource.idModel})`,LOG.LS.eJOB); + } + } + + // if we have multiple valid scenes, bail + if(scenes.length>1) + return this.logError(`multiple valid scenes found (${scenes.length}). cannot find asset to update (idScene: ${fileItem.fileName})`); + + // If needed, create a new scene (if we have no scenes, or if we have multiple scenes, then create a new one); + // If we have just one scene, before reusing it, see if the model names all match up + let createScene: boolean = (scenes.length !== 1); + if (!createScene && scenes.length > 0 && svxData.modelDetails) { + + for (const MSX of svxData.modelDetails) { + if (MSX.Name) { + + // look for existing models, children of our scene, that match this model's purpose + const model: DBAPI.Model | null = await this.findMatchingModelFromScene(scenes[0], MSX.computeModelAutomationTag()); + + if (!model || (model.Name !== MSX.Name)) { + createScene = true; + break; + } // else, found a match for the model + } // else, no MSX found so likely a download + } + } + + const scene: DBAPI.Scene = createScene ? svxData.extractScene() : scenes[0]; + // LOG.info(`createScene: ${createScene}`,LOG.LS.eDEBUG); + // LOG.info(`scene: ${H.Helpers.JSONStringify(scene)}`,LOG.LS.eDEBUG); + + let asset: DBAPI.Asset | null = null; + if (createScene) { + LOG.info(`JobCookSIGenerateDownloads.processSceneFile creating a new scene (${scene.Name}|${scene.EdanUUID})`,LOG.LS.eJOB); + + // compute ItemParent of ModelSource + scene.Name = this.sceneParameterHelper.sceneName; + if (this.sceneParameterHelper.sceneTitle) + scene.Title = this.sceneParameterHelper.sceneTitle; + if (!await scene.create()) + return this.logError(`JobCookSIGenerateDownloads.processSceneFile unable to create Scene file ${svxFile}: database error`); + + // wire ModelSource to Scene + const SOX: DBAPI.SystemObjectXref | null = await DBAPI.SystemObjectXref.wireObjectsIfNeeded(modelSource, scene); + if (!SOX) + return this.logError(`JobCookSIGenerateDownloads.processSceneFile unable to wire Model Source ${JSON.stringify(modelSource, H.Helpers.saferStringify)} to Scene ${JSON.stringify(scene, H.Helpers.saferStringify)}: database error`); + + // wire ItemParent to Scene + const OG: DBAPI.ObjectGraph = this.sceneParameterHelper.OG; + if (OG.item && OG.item.length > 0) { + const SOX2: DBAPI.SystemObjectXref | null = await DBAPI.SystemObjectXref.wireObjectsIfNeeded(OG.item[0], scene); + if (!SOX2) + return this.logError(`JobCookSIGenerateDownloads.processSceneFile unable to wire item ${JSON.stringify(OG.item[0], H.Helpers.saferStringify)} to Scene ${JSON.stringify(scene, H.Helpers.saferStringify)}: database error`); + } + // LOG.info(`JobCookSIGenerateDownloads.processSceneFile[${svxFile}] wire ModelSource to Scene: ${JSON.stringify(SOX, H.Helpers.stringifyMapsAndBigints)}`, LOG.LS.eJOB); + } else { + LOG.info(`JobCookSIGenerateDownloads.processSceneFile updating existing scene (${scene.Name}|${scene.EdanUUID})`,LOG.LS.eJOB); + + // determine if we are updating an existing scene with an existing scene asset: + const sceneSO: DBAPI.SystemObject | null = await scene.fetchSystemObject(); + if (sceneSO) { + const sceneAssets: DBAPI.Asset[] | null = await DBAPI.Asset.fetchFromSystemObject(sceneSO.idSystemObject); + if (sceneAssets) { + for (const sceneAsset of sceneAssets) { + if (await sceneAsset.assetType() === COMMON.eVocabularyID.eAssetAssetTypeScene) { + asset = sceneAsset; + break; + } + } + } else + LOG.error(`JobCookSIGenerateDownloads.processSceneFile unable to fetch assets for scene systemobject ${JSON.stringify(sceneSO, H.Helpers.saferStringify)}`, LOG.LS.eJOB); + } else + LOG.error(`JobCookSIGenerateDownloads.processSceneFile unable to fetch system object for ${JSON.stringify(scene, H.Helpers.saferStringify)}`, LOG.LS.eJOB); + } + + // Scene owns this ingested asset of the SVX File + // Read file a second time ... cloneStream isn't available + const RSR = await this.fetchFile(svxFile); + if (!RSR.success || !RSR.readStream) + return this.logError(`JobCookSIGenerateDownloads.processSceneFile unable to fetch stream for scene file ${svxFile}: ${RSR.error}`); + + // create our configuration for ingesting this svx scene + const ISI: STORE.IngestStreamOrFileInput = { + readStream: RSR.readStream, + localFilePath: null, + asset, + FileName: svxFile, + FilePath: '', + idAssetGroup: 0, + idVAssetType: vScene.idVocabulary, + allowZipCracking: false, + idUserCreator, + SOBased: scene, + Comment: 'Created by Cook si-generate-downloads' + }; + const IAR: STORE.IngestAssetResult = await STORE.AssetStorageAdapter.ingestStreamOrFile(ISI); + if (!IAR.success) + return this.logError(`JobCookSIGenerateDownloads.processSceneFile unable to ingest scene file ${svxFile}: ${IAR.error}`); + if (IAR.assetVersions && IAR.assetVersions.length > 1) + LOG.error(`JobCookSIGenerateDownloads.processSceneFile created multiple asset versions, unexpectedly, ingesting ${svxFile}`, LOG.LS.eJOB); + + const SOI: DBAPI.SystemObjectInfo | undefined = await CACHE.SystemObjectCache.getSystemFromScene(scene); + const assetVersion: DBAPI.AssetVersion | null = (IAR.assetVersions && IAR.assetVersions.length > 0) ? IAR.assetVersions[0] : null; + const pathObject: string = SOI ? RouteBuilder.RepositoryDetails(SOI.idSystemObject, eHrefMode.ePrependClientURL) : ''; + const hrefObject: string = H.Helpers.computeHref(pathObject, scene.Name); + const pathDownload: string = assetVersion ? RouteBuilder.DownloadAssetVersion(assetVersion.idAssetVersion, eHrefMode.ePrependServerURL) : ''; + const hrefDownload: string = pathDownload ? ': ' + H.Helpers.computeHref(pathDownload, 'Download') : ''; + await this.appendToReportAndLog(`${this.name()} ingested scene ${hrefObject}${hrefDownload}`); + + //#region legacy + // previous version handled all models while working with the scene. Order of operations prevents this from working + // in this context. However, the below code may be necessary for additional linking of processed models. Testing didn't + // expose a need for this, but keep temporarily until more models are run through. + // + // const SOV: DBAPI.SystemObjectVersion | null | undefined = IAR.systemObjectVersion; // SystemObjectVersion for updated 'scene', with new version of scene asset + // LOG.info(`JobCookSIGenerateDownloads.processSceneFile[${svxFile}] wire ingestStreamOrFile: ${JSON.stringify(ISI, H.Helpers.stringifyMapsAndBigints)}`, LOG.LS.eJOB); + + // if an asset version was created for ingestion of this model, and if a system object version was created for scene ingestion, + // associate the asset version with the scene's system object version (enabling a scene package to be downloaded, even if some assets + // are owned by the ingested models). Note that if we *updated* models, we will be update the original models' + // SystemObjectVersionAssetVersionXref with records pointing to the new model asset versions + // if (SOV && assetVersion) { + // const SOVAVX: DBAPI.SystemObjectVersionAssetVersionXref | null = + // await DBAPI.SystemObjectVersionAssetVersionXref.addOrUpdate(SOV.idSystemObjectVersion, assetVersion.idAsset, assetVersion.idAssetVersion); + // if (!SOVAVX) + // LOG.error(`JobCookSIGenerateDownloads.processSceneFile unable create/update SystemObjectVersionAssetVersionXref for ${JSON.stringify(SOV, H.Helpers.saferStringify)}, ${JSON.stringify(assetVersion, H.Helpers.saferStringify)}`, LOG.LS.eJOB); + // } + //#endregion + + return { success: true }; + } + + protected async transformModelSceneXrefIntoModel(MSX: DBAPI.ModelSceneXref, source?: DBAPI.Model | undefined): Promise { + const Name: string = MSX.Name ?? ''; + const vFileType: DBAPI.Vocabulary | undefined = await CACHE.VocabularyCache.mapModelFileByExtension(Name); + const vPurpose: DBAPI.Vocabulary | undefined = await this.computeVocabVoyagerSceneModel(); + return new DBAPI.Model({ + idModel: 0, + Name, + Title: source?.Title ?? '', + DateCreated: new Date(), + idVCreationMethod: source?.idVCreationMethod ?? null, + idVModality: source?.idVModality ?? null, + idVPurpose: vPurpose ? vPurpose.idVocabulary : null, + idVUnits: source?.idVUnits ?? null, + idVFileType: vFileType ? vFileType.idVocabulary : null, + idAssetThumbnail: null, CountAnimations: null, CountCameras: null, CountFaces: null, CountLights: null,CountMaterials: null, + CountMeshes: null, CountVertices: null, CountEmbeddedTextures: null, CountLinkedTextures: null, FileEncoding: null, IsDracoCompressed: null, + AutomationTag: MSX.computeModelAutomationTag(), CountTriangles: null + }); } + + static async convertModelUnitsVocabToCookUnits(idVUnits: number | null): Promise { + if (!idVUnits) + return undefined; + + // acceptable units for Cook's si-voyager-scene, as of 1/20/2022: "mm", "cm", "m", "in", "ft", "yd" + const eModelUnits: COMMON.eVocabularyID | undefined = await CACHE.VocabularyCache.vocabularyIdToEnum(idVUnits); + switch (eModelUnits) { + case COMMON.eVocabularyID.eModelUnitsMillimeter: return 'mm'; + case COMMON.eVocabularyID.eModelUnitsCentimeter: return 'cm'; + case COMMON.eVocabularyID.eModelUnitsMeter: return 'm'; + case COMMON.eVocabularyID.eModelUnitsInch: return 'in'; + case COMMON.eVocabularyID.eModelUnitsFoot: return 'ft'; + case COMMON.eVocabularyID.eModelUnitsYard: return 'yd'; + + // not supported by Cook as of 1/20/2022: + case COMMON.eVocabularyID.eModelUnitsMicrometer: + case COMMON.eVocabularyID.eModelUnitsKilometer: + case COMMON.eVocabularyID.eModelUnitsMile: + case COMMON.eVocabularyID.eModelUnitsAstronomicalUnit: + default: + return undefined; + } + } + + private async findMatchingModelFromScene(sceneSource: DBAPI.Scene, automationTag: string): Promise { + // LOG.info(`>>>> matching model from scene: ${sceneSource.idScene} | ${automationTag}`,LOG.LS.eDEBUG); + const matches: DBAPI.Model[] | null = await DBAPI.Model.fetchChildrenModels(null, sceneSource.idScene, automationTag); + + return matches && matches.length > 0 ? matches[0] : null; + } + + private async computeVocabVoyagerSceneModel(): Promise { + if (!JobCookSIGenerateDownloads.vocabVoyagerSceneModel) { + JobCookSIGenerateDownloads.vocabVoyagerSceneModel = await CACHE.VocabularyCache.vocabularyByEnum(COMMON.eVocabularyID.eModelPurposeVoyagerSceneModel); + if (!JobCookSIGenerateDownloads.vocabVoyagerSceneModel) + LOG.error('JobCookSIGenerateDownloads unable to fetch vocabulary for Voyager Scene Model Model Purpose', LOG.LS.eGQL); + } + return JobCookSIGenerateDownloads.vocabVoyagerSceneModel; + } + + private async computeVocabAssetTypeScene(): Promise { + if (!JobCookSIGenerateDownloads.vocabAssetTypeScene) { + JobCookSIGenerateDownloads.vocabAssetTypeScene = await CACHE.VocabularyCache.vocabularyByEnum(COMMON.eVocabularyID.eAssetAssetTypeScene); + if (!JobCookSIGenerateDownloads.vocabAssetTypeScene) + LOG.error('JobCookSIGenerateDownloads unable to fetch vocabulary for Asset Type Scene', LOG.LS.eGQL); + } + return JobCookSIGenerateDownloads.vocabAssetTypeScene; + } + + private async computeVocabAssetTypeModelGeometryFile(): Promise { + if (!JobCookSIGenerateDownloads.vocabAssetTypeModelGeometryFile) { + JobCookSIGenerateDownloads.vocabAssetTypeModelGeometryFile = await CACHE.VocabularyCache.vocabularyByEnum(COMMON.eVocabularyID.eAssetAssetTypeModelGeometryFile); + if (!JobCookSIGenerateDownloads.vocabAssetTypeModelGeometryFile) + LOG.error('JobCookSIGenerateDownloads unable to fetch vocabulary for Asset Type Model Geometry File', LOG.LS.eGQL); + } + return JobCookSIGenerateDownloads.vocabAssetTypeModelGeometryFile; + } + + // private async getModelDetailsFromVoyagerScene(svxFile): Promise { + // // Retrieve svx.json data + // const RSR: STORE.ReadStreamResult = await this.fetchFile(svxFile); + // if (!RSR.success || !RSR.readStream) { + // this.logError(`JobCookSIGenerateDownloads.processSceneFile unable to fetch stream for scene file ${svxFile}: ${RSR.error}`); + // return null; + // } + + // // Parse Scene + // const svx: SvxReader = new SvxReader(); + // const res: H.IOResults = await svx.loadFromStream(RSR.readStream); + // if (!res.success || !svx.SvxExtraction) { + // this.logError(`JobCookSIGenerateDownloads.processSceneFile unable to parse scene file ${svxFile}: ${res.error}`); + // return null; + // } + + // // return result + // return svx.SvxExtraction.modelDetails; + // } + // #endregion } diff --git a/server/job/impl/Cook/JobCookSIPackratInspect.ts b/server/job/impl/Cook/JobCookSIPackratInspect.ts index d3da241b9..80b870329 100644 --- a/server/job/impl/Cook/JobCookSIPackratInspect.ts +++ b/server/job/impl/Cook/JobCookSIPackratInspect.ts @@ -240,7 +240,7 @@ export class JobCookSIPackratInspectOutput implements H.IOResults { for (const modelMaterialChannel of this.modelConstellation.ModelMaterialChannels) { const mappedModelMaterialId: number | undefined = modelMaterialIDMap.get(modelMaterialChannel.idModelMaterial); if (!mappedModelMaterialId) { - const error: string = `Missing ${modelMaterialChannel.idModelMaterial} from model material ID map`; + const error: string = `Missing ${modelMaterialChannel.idModelMaterial} from model material ID map (idModel: ${idModel})`; LOG.info(`JobCookSIPackratInspectOutput.persist: ${error}`, LOG.LS.eJOB); // return { success: false, error }; continue; @@ -250,7 +250,7 @@ export class JobCookSIPackratInspectOutput implements H.IOResults { if (modelMaterialChannel.idModelMaterialUVMap) { mappedModelMaterialUVMapId = modelMaterialUVMapIDMap.get(modelMaterialChannel.idModelMaterialUVMap); if (!mappedModelMaterialUVMapId) { - const error: string = `Missing ${modelMaterialChannel.idModelMaterialUVMap} from model material UV ID map`; + const error: string = `Missing ${modelMaterialChannel.idModelMaterialUVMap} from model material UV ID map (idModel: ${idModel})`; LOG.info(`JobCookSIPackratInspectOutput.persist: ${error}`, LOG.LS.eJOB); // return { success: false, error }; continue; @@ -270,7 +270,7 @@ export class JobCookSIPackratInspectOutput implements H.IOResults { for (const modelObjectModelMaterialXref of this.modelConstellation.ModelObjectModelMaterialXref) { const mappedModelMaterialId: number | undefined = modelMaterialIDMap.get(modelObjectModelMaterialXref.idModelMaterial); if (!mappedModelMaterialId) { - const error: string = `Missing ${modelObjectModelMaterialXref.idModelMaterial} from model material ID map`; + const error: string = `Missing ${modelObjectModelMaterialXref.idModelMaterial} from model material ID map (idModel: ${idModel})`; LOG.info(`JobCookSIPackratInspectOutput.persist: ${error}`, LOG.LS.eJOB); // return { success: false, error }; continue; @@ -278,7 +278,7 @@ export class JobCookSIPackratInspectOutput implements H.IOResults { const mappedModelObjectId: number | undefined = modelObjectIDMap.get(modelObjectModelMaterialXref.idModelObject); if (!mappedModelObjectId) { - const error: string = `Missing ${modelObjectModelMaterialXref.idModelObject} from model object ID map`; + const error: string = `Missing ${modelObjectModelMaterialXref.idModelObject} from model object ID map (idModel: ${idModel})`; LOG.info(`JobCookSIPackratInspectOutput.persist: ${error}`, LOG.LS.eJOB); // return { success: false, error }; continue; diff --git a/server/storage/interface/AssetStorageAdapter.ts b/server/storage/interface/AssetStorageAdapter.ts index 4dc0ba82b..0cde83aec 100644 --- a/server/storage/interface/AssetStorageAdapter.ts +++ b/server/storage/interface/AssetStorageAdapter.ts @@ -536,7 +536,9 @@ export class AssetStorageAdapter { return { success: false, error }; } + // ingest the scene and detect changes for additional events/logic const { transformUpdated } = await AssetStorageAdapter.detectAndHandleSceneIngest(IAR); + // transformUpdated = false; // HACK: setting transform update to 'false' to always accept incoming scene as truth // prepare to wire together ingestion workflow step with output asset versions (in systemObjectSet) const LS: LocalStore | undefined = await ASL.getStore(); @@ -676,7 +678,6 @@ export class AssetStorageAdapter { if (MSXSource) { LOG.info(`AssetStorageAdapter.detectAndHandleSceneIngest found existing ModelSceneXref=${JSON.stringify(MSXSource, H.Helpers.saferStringify)} from referenced model ${JSON.stringify(MSX, H.Helpers.saferStringify)}`, LOG.LS.eSTR); const { transformUpdated: transformUpdatedLocal, updated } = MSXSource.updateIfNeeded(MSX); - if (updated) { if (!await MSXSource.update()) { LOG.error(`AssetStorageAdapter.detectAndHandleSceneIngest unable to update ModelSceneXref ${JSON.stringify(MSXSource, H.Helpers.saferStringify)}`, LOG.LS.eSTR); @@ -1033,6 +1034,7 @@ export class AssetStorageAdapter { return { success: false, error }; } + // if we don't have an existing asset passed in commit a new one, otherwise commit a new version if (!ISI.asset) { comRes = await STORE.AssetStorageAdapter.commitNewAsset({ storageKey: wsRes.storageKey, diff --git a/server/tsconfig.json b/server/tsconfig.json index 48c42cf48..3b7fa74f9 100644 --- a/server/tsconfig.json +++ b/server/tsconfig.json @@ -9,7 +9,8 @@ "rootDirs": [".", "../client/src/types"], "outDir": "build", "declaration": true, - "skipLibCheck": true + "skipLibCheck": true, + "noUnusedLocals": true }, "include": [ "**/*", diff --git a/server/utils/logger.ts b/server/utils/logger.ts index c064001df..a57c2e379 100644 --- a/server/utils/logger.ts +++ b/server/utils/logger.ts @@ -17,6 +17,7 @@ export enum LS { // logger section eCON, // console-redirected messages eCONF, // config eDB, // database + eDEBUG, // debug only output eEVENT, // event eGQL, // graphql eHTTP, // http @@ -58,6 +59,7 @@ function loggerSectionName(eLogSection: LS | undefined): string { case LS.eCON: return 'CON'; case LS.eCONF: return 'CNF'; case LS.eDB: return 'DB '; + case LS.eDEBUG: return 'DBG'; case LS.eEVENT: return 'EVE'; case LS.eGQL: return 'GQL'; case LS.eHTTP: return 'HTP'; diff --git a/server/utils/migration/SceneMigration.ts b/server/utils/migration/SceneMigration.ts index 63acbca2e..2659abbeb 100644 --- a/server/utils/migration/SceneMigration.ts +++ b/server/utils/migration/SceneMigration.ts @@ -766,7 +766,7 @@ export class SceneMigration { this.log('createModel', 'Starting'); const Name: string = resourceInfo.filename ?? ''; const Units: DBAPI.Vocabulary | undefined = resourceInfo.UNITS ? await PublishScene.mapEdanUnitsToPackratVocabulary(resourceInfo.UNITS) : undefined; - const AutomationTag: string = JobCookSIGenerateDownloads.computeModelAutomationTag(downloadType); + const AutomationTag: string = JobCookSIGenerateDownloads.computeModelAutomationTagFromDownloadType(downloadType); const idVPurpose: number | null = SceneMigration.vocabDownload?.idVocabulary ?? null; const idVUnits: number | null = Units?.idVocabulary ?? this.modelSource?.idVUnits ?? null; diff --git a/server/workflow/impl/Packrat/WorkflowEngine.ts b/server/workflow/impl/Packrat/WorkflowEngine.ts index 8172549df..98c1b3496 100644 --- a/server/workflow/impl/Packrat/WorkflowEngine.ts +++ b/server/workflow/impl/Packrat/WorkflowEngine.ts @@ -47,9 +47,6 @@ type ComputeSceneInfoResult = { export class WorkflowEngine implements WF.IWorkflowEngine { private workflowMap: Map = new Map(); - // HACK: temporarily bypass download generation while development on it wraps up. - private tmpSkipGenDownloads: boolean = true; - async create(workflowParams: WF.WorkflowParameters): Promise { if (!workflowParams.eWorkflowType) { LOG.error(`WorkflowEngine.create called without workflow type ${JSON.stringify(workflowParams)}`, LOG.LS.eWF); @@ -131,6 +128,7 @@ export class WorkflowEngine implements WF.IWorkflowEngine { } async generateSceneDownloads(idScene: number, workflowParams: WF.WorkflowParameters): Promise { + LOG.info(`WorkflowEngine.generateSceneDownloads working...(idScene:${idScene})`,LOG.LS.eWF); const scene: DBAPI.Scene | null = await DBAPI.Scene.fetch(idScene); if (!scene) { LOG.error(`WorkflowEngine.generateSceneDownloads unable to fetch scene from idScene ${idScene}`, LOG.LS.eWF); @@ -233,12 +231,13 @@ export class WorkflowEngine implements WF.IWorkflowEngine { let workflows: WF.IWorkflow[] = []; if (CMIR) workflows = workflows.concat(await this.eventIngestionIngestObjectModel(CMIR, workflowParams, assetsIngested) ?? []); - if (CSIR) + if (CSIR) { workflows = workflows.concat(await this.eventIngestionIngestObjectScene(CSIR, workflowParams, assetsIngested) ?? []); + } return workflows.length > 0 ? workflows : null; } - private async eventIngestionIngestObjectModel(CMIR: ComputeModelInfoResult, workflowParams: WF.WorkflowParameters, assetsIngested: boolean): Promise { + private async eventIngestionIngestObjectModel(CMIR: ComputeModelInfoResult, workflowParams: WF.WorkflowParameters, assetsIngested: boolean, generateDownloads: boolean = true): Promise { if (!assetsIngested) { LOG.info(`WorkflowEngine.eventIngestionIngestObjectModel skipping post-ingest workflows as no assets were updated for ${JSON.stringify(CMIR, H.Helpers.saferStringify)}`, LOG.LS.eWF); return null; @@ -315,13 +314,16 @@ export class WorkflowEngine implements WF.IWorkflowEngine { } else LOG.info(`WorkflowEngine.eventIngestionIngestObjectModel skipping si-voyager-scene for master model with unsupported units ${JSON.stringify(CMIR, H.Helpers.saferStringify)}`, LOG.LS.eWF); - // HACK: skip generate downloads while other issues are resolved with it - if(this.tmpSkipGenDownloads===false) { + // do we want to generate downloads for this ingestion + if(generateDownloads===true) { + // does this ingested model have a scene child? If so, initiate WorkflowJob for cook si-generate-downloads const SODerived: DBAPI.SystemObject[] | null = CMIR.idSystemObjectModel ? await DBAPI.SystemObject.fetchDerivedFromXref(CMIR.idSystemObjectModel) : null; if (!SODerived) return workflows.length > 0 ? workflows : null; + LOG.info(`WorkflowEngine.eventIngestionIngestObjectModel generating downloads: ${H.Helpers.JSONStringify(CMIR)}`,LOG.LS.eWF); + for (const SO of SODerived) { if (SO.idScene) { // locate the scene file asset attached to this system object @@ -362,6 +364,8 @@ export class WorkflowEngine implements WF.IWorkflowEngine { parameters: jobParamSIGenerateDownloads, }; + // LOG.info(`WorkflowEngine.eventIngestionIngestObjectScene. generating downloads...\n${H.Helpers.JSONStringify(wfParamSIGenerateDownloads)}`,LOG.LS.eWF); + const wfSIGenerateDownloads: WF.IWorkflow | null = await this.create(wfParamSIGenerateDownloads); if (wfSIGenerateDownloads) workflows.push(wfSIGenerateDownloads); @@ -375,7 +379,7 @@ export class WorkflowEngine implements WF.IWorkflowEngine { return workflows.length > 0 ? workflows : null; } - private async eventIngestionIngestObjectScene(CSIR: ComputeSceneInfoResult, workflowParams: WF.WorkflowParameters, assetsIngested: boolean): Promise { + private async eventIngestionIngestObjectScene(CSIR: ComputeSceneInfoResult, workflowParams: WF.WorkflowParameters, assetsIngested: boolean, generateDownloads: boolean = true): Promise { if (!assetsIngested) { LOG.info(`WorkflowEngine.eventIngestionIngestObjectScene skipping post-ingest workflows as no assets were updated for ${JSON.stringify(CSIR, H.Helpers.saferStringify)}`, LOG.LS.eWF); return null; @@ -408,14 +412,30 @@ export class WorkflowEngine implements WF.IWorkflowEngine { if (SOMTL) idSystemObject.push(SOMTL.idSystemObject); - // HACK: skip generate downloads while other issues are resolved with it - if(this.tmpSkipGenDownloads===false) { + // do we want to generate downloads for this scene + // TODO: currently always true. needs to be fed upstream from business logic + if(generateDownloads===true) { + // initiate WorkflowJob for cook si-generate-download - const { modelBaseName } = await WorkflowEngine.computeSceneAndModelBaseNames(CSIR.idModel, CSIR.assetVersionGeometry.FileName); + const { sceneBaseName } = await WorkflowEngine.computeSceneAndModelBaseNames(CSIR.idModel, CSIR.assetVersionGeometry.FileName); + + // make sure we have our basic parameters + if(!CSIR.idModel || !CSIR.idScene) { + LOG.error(`WorkflowEngine.eventIngestionIngestObjectScene cannot find idModel(${CSIR.idModel}) or idScene(${CSIR.idScene})`, LOG.LS.eWF); + return null; + } + + // get our scene parameters + const parameterHelper: COOK.JobCookSIVoyagerSceneParameterHelper | null = await COOK.JobCookSIVoyagerSceneParameterHelper.compute(CSIR.idModel); + if(parameterHelper==null) { + LOG.error(`WorkflowEngine.eventIngestionIngestObjectScene cannot find model/scene ids\n(CSIR:${JSON.stringify(CSIR, H.Helpers.saferStringify)})`, LOG.LS.eWF); + return null; + } + const jobParamSIGenerateDownloads: WFP.WorkflowJobParameters = new WFP.WorkflowJobParameters(COMMON.eVocabularyID.eJobJobTypeCookSIGenerateDownloads, new COOK.JobCookSIGenerateDownloadsParameters(CSIR.idScene, CSIR.idModel, CSIR.assetVersionGeometry.FileName, - CSIR.assetSVX.FileName, CSIR.assetVersionDiffuse?.FileName, CSIR.assetVersionMTL?.FileName, modelBaseName)); + CSIR.assetSVX.FileName, CSIR.assetVersionDiffuse?.FileName, CSIR.assetVersionMTL?.FileName, sceneBaseName, undefined, undefined, parameterHelper )); const wfParamSIGenerateDownloads: WF.WorkflowParameters = { eWorkflowType: COMMON.eVocabularyID.eWorkflowTypeCookJob, @@ -425,6 +445,8 @@ export class WorkflowEngine implements WF.IWorkflowEngine { parameters: jobParamSIGenerateDownloads, }; + LOG.info(`WorkflowEngine.eventIngestionIngestObjectScene. generating downloads...\n${H.Helpers.JSONStringify(wfParamSIGenerateDownloads)}`,LOG.LS.eWF); + const workflow: WF.IWorkflow | null = await this.create(wfParamSIGenerateDownloads); if (workflow) return [workflow];