Skip to content

Commit

Permalink
Merge pull request #232 from WorldBank-Transport/feature/profile-editor
Browse files Browse the repository at this point in the history
Profile Editor
  • Loading branch information
olafveerman authored Sep 21, 2018
2 parents 1558c45 + 09aff5c commit ce34381
Show file tree
Hide file tree
Showing 22 changed files with 3,540 additions and 1,338 deletions.
7 changes: 6 additions & 1 deletion .babelrc
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
{
"presets": [ "es2015" ]
"presets": [ "es2015" ],
"plugins": [
["transform-object-rest-spread", { "useBuiltIns": true }]
],
"sourceMaps": "inline",
"retainLines": true
}
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,7 @@ dist
.nyc_output
ecs-task-definition-generated.yml
*.pyc
.vscode

# OSM P2P db
osm-p2p-dbs
Expand Down
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ module.exports = {
service: 'docker',
hyperAccess: null,
hyperSecret: null,
container: 'wbtransport/ram-analysis:latest-stable',
container: 'wbtransport/ram-analysis:latest-dev',
db: 'postgresql://ram:ram@ram-postgis:5432/ram',
storageHost: 'ram-minio',
storagePort: 9000
Expand All @@ -143,7 +143,7 @@ module.exports = {
service: 'docker',
hyperAccess: null,
hyperSecret: null,
container: 'wbtransport/ram-vt:latest-stable',
container: 'wbtransport/ram-vt:latest-dev',
storageHost: 'ram-minio',
storagePort: 9000
},
Expand Down
2 changes: 1 addition & 1 deletion app/routes/projects--get.js
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ function attachFinishSetupOperation (project) {
.where('project_id', project.id)
.where('master', true)
.first()
.then(scenario => getOperationData(db, 'project-setup-finish', 'finish_setup', scenario.id))
.then(scenario => getOperationData(db, 'project-setup-finish', scenario.id))
.then(opData => {
project.finish_setup = opData;
return project;
Expand Down
240 changes: 214 additions & 26 deletions app/routes/projects--rah-export.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,13 @@ import Promise from 'bluebird';
import Octokit from '@octokit/rest';
import { safeDump } from 'js-yaml';
import Zip from 'node-zip';
import _ from 'lodash';

import config from '../config';
import db from '../db/';
import { ProjectNotFoundError, DataConflictError } from '../utils/errors';
import { ProjectNotFoundError, DataConflictError, DisabledServiceError, getBoomResponseForError } from '../utils/errors';
import { getFileContents } from '../s3/utils';
import { getFauxPoiFeature } from './scenarios--poi';

const rahExport = config.rahExport;

Expand Down Expand Up @@ -39,6 +41,7 @@ module.exports = [
name: Joi.string().required()
})
).required(),
includeResults: Joi.bool().required(),
contactName: Joi.string().required(),
contactEmail: Joi.string().email().required()
}
Expand All @@ -47,52 +50,175 @@ module.exports = [
handler: async (request, reply) => {
// Check config.
if (config.environment === 'offline') {
return reply(Boom.serverUnavailable('RAH export is disabled for offline instances'));
throw new DisabledServiceError('RAH export is disabled for offline instances');
}
const projId = request.params.projId;
const instId = config.instanceId;
const pieces = (rahExport.ghRepo || '').split('/');
const ghOwner = pieces[0];
const ghRepo = pieces[1];
const ghPath = rahExport.ghPath;
const ghToken = rahExport.ghToken;
const includeResults = request.payload.includeResults;
if (!rahExport || !ghOwner || !ghRepo || !ghPath || !ghToken) {
return reply(Boom.serverUnavailable('RAH export not setup'));
throw new DisabledServiceError('RAH export not setup');
}

try {
const project = await db('projects')
.select('*')
.where('id', request.params.projId)
.where('id', projId)
.first();

if (!project) {
return reply(Boom.notFound(new ProjectNotFoundError()));
throw new ProjectNotFoundError();
}
// It's not possible export pending projects.
if (project.status === 'pending') {
return reply(Boom.conflict(new DataConflictError('Project setup not completed')));
throw new DataConflictError('Project setup not completed');
}

const files = await db('scenarios_files')
.select('*')
.where('project_id', request.params.projId)
.where('project_id', projId)
.whereIn('type', ['results-csv', 'results-geojson']);

if (!files.length) {
return reply(Boom.conflict(new DataConflictError('There are no scenarios with results')));
if (includeResults && !files.length) {
throw new DataConflictError('There are no scenarios with results');
}

// Get the master scenario id. This is used as the base scenario.
const masterScenarioId = await db('scenarios')
.where('project_id', projId)
.where('master', true)
.first('id')
.then(r => r.id);

// Unique scenario ids.
const scIdsWithResults = files.reduce((acc, o) => (
acc.indexOf(o.scenario_id) === -1
? acc.concat(o.scenario_id)
: acc
), []);

// Get:
// Population indicators for the filter bar.
// Poi types for the filter bar.
// Scenarios with results for the result selection.
const [popIndicators, poiTypes, scenarios] = await Promise.all([
getPopulationIndicators(projId),
getPoiTypesOptions(projId, masterScenarioId),
db('scenarios')
.select('id', 'name')
.whereIn('id', scIdsWithResults)
]);

// Get the POI faux features.
const poiFauxFeatures = await Promise.map(poiTypes, async (type) => {
const fauxFeature = await getFauxPoiFeature(projId, masterScenarioId, type.key);
return {
key: `poi-${type.key}.json`,
data: fauxFeature
};
}, {concurrency: 3});

// Build the poi and pop key index to use on the results mapping.
// Eg. {'Townhalls': 'e0'}
const poiKIndex = poiTypes.reduce((acc, o) => ({
...acc, [o.key]: o.prop
}), {});
// Eg. {'pop-m': 'p0'}
const popKIndex = popIndicators.reduce((acc, o) => ({
...acc, [o.key]: o.prop
}), {});

// For each one of the scenarios get the results with the population
// and the poi values. The result is compressed to save bandwidth.
// On the client it must be rehydrated and mapped to the correct
// poi and pop keys using the `prop` attribute.
const scenariosFauxFeatures = await Promise.map(scIdsWithResults, async (scId) => {
// Get the scenario results.
const scenarioResults = await db('results')
.select(
'projects_origins.id as origin_id',
'projects_origins.name as origin_name',
'projects_origins.coordinates as origin_coords',
'projects_origins_indicators.value as pop_value',
'projects_origins_indicators.key as pop_key',
'results_poi.type as poi_type',
'results_poi.time as time_to_poi'
)
.innerJoin('results_poi', 'results.id', 'results_poi.result_id')
.innerJoin('projects_origins', 'projects_origins.id', 'results.origin_id')
.innerJoin('projects_origins_indicators', 'projects_origins_indicators.origin_id', 'projects_origins.id')
.where('results.project_id', projId)
.whereIn('results.scenario_id', scId).then(ids => _.uniq(ids));

// Each feature will look something like:
// {
// "i": 2000021,
// "n": "Tobias Barreto",
// "c": [
// -38.00345,
// -11.18803
// ],
// "p0": 69500,
// "p1": 35418,
// "p2": 34082
// "e1": 4448,
// "e0": 16,
// }
const fauxFeature = scenarioResults.reduce((acc, result) => {
const id = result.origin_id;
const popK = popKIndex[result.pop_key];
const poiK = poiKIndex[result.poi_type];
let object = {
[popK]: result.pop_value,
[poiK]: result.time_to_poi
};
if (!acc[id]) {
object = {
...object,
'i': id,
'n': result.origin_name,
'c': [parseInt(result.origin_coords[0] * 100000) / 100000, parseInt(result.origin_coords[1] * 100000) / 100000]
};
}
return {
...acc,
[id]: {
...acc[id],
...object
}
};
}, {});

return {
key: `results-sc-${scId}.json`,
data: Object.values(fauxFeature)
};
}, {concurrency: 3});

// Meta object
const scenarioMetaInformation = {
bbox: project.bbox,
poiTypes,
popIndicators,
scenarios
// scenariosFauxFeatures, // <-------------- Not meta
// poiFauxFeatures // <-------------- Not meta
};

// Build the markdown file.
const frontmatter = {
title: request.payload.title,
country: request.payload.country,
date: request.payload.date,
authors: request.payload.authors.map(a => a.name),
topics: request.payload.topics.map(t => t.name),
contact: {
name: request.payload.contactName,
email: request.payload.contactEmail
}
include_results: includeResults,
contact_name: request.payload.contactName,
contact_email: request.payload.contactEmail
};

const indexMd = `---
Expand All @@ -104,19 +230,33 @@ ${request.payload.description}

const gClient = new GHClient(ghOwner, ghRepo, ghToken);

// Project folder on the GH repo.
const projectGHFolder = `${ghPath}/project-${instId}-${project.id}`;

// Add all the files.
// Readme.
gClient.addFile(`${ghPath}/project-${instId}-${project.id}/index.md`, indexMd);
// Data files.
const zip = new Zip();
await Promise.map(files, async f => {
const ext = f.type === 'results-csv' ? 'csv' : 'geojson';
zip.file(`${f.name}.${ext}`, await getFileContents(f.path));
});
gClient.addFile(`${projectGHFolder}/index.md`, indexMd);

const zipFile = zip.generate({ base64: true, compression: 'DEFLATE' });
// Results meta file.
gClient.addFile(`${projectGHFolder}/index.json`, JSON.stringify(scenarioMetaInformation));

gClient.addBinaryFile(`${ghPath}/project-${instId}-${project.id}/results.zip`, zipFile);
// Faux features. (poi and results).
[scenariosFauxFeatures, poiFauxFeatures].forEach(featureFiles => {
featureFiles.forEach(fileData => {
gClient.addFile(`${projectGHFolder}/${fileData.key}`, JSON.stringify(fileData.data));
});
});

// Data files.
if (files.length) {
const zip = new Zip();
await Promise.map(files, async f => {
const ext = f.type === 'results-csv' ? 'csv' : 'geojson';
zip.file(`${f.name}.${ext}`, await getFileContents(f.path));
});
const zipFile = zip.generate({ base64: true, compression: 'DEFLATE' });
gClient.addBinaryFile(`${projectGHFolder}/results.zip`, zipFile);
}

// Create branch.
const branchName = `ram-export/${instId}-${project.id}`;
Expand All @@ -141,11 +281,11 @@ ${request.payload.description}
}
// Commit and PR.
await gClient.commit(`RAM automated export of project ${project.id} (${instId})`, committer, author);
const pullReq = await gClient.openPR(`RAM automated export of project ${project.name} from ${instId}`);
// Include mention to moderators to send out notifications.
const pullReq = await gClient.openPR(`RAM automated export of project ${project.name} from ${instId}`, 'cc @WorldBank-Transport/rah-moderators');
return reply({statusCode: 200, message: 'Project exported. Approval pending.', prUrl: pullReq.data.url});
} catch (err) {
console.log('err', err);
reply(Boom.badImplementation(err));
} catch (error) {
return reply(getBoomResponseForError(error));
}
}
}
Expand Down Expand Up @@ -226,3 +366,51 @@ class GHClient {
return this.octokit.pullRequests.create({owner, repo, title, head: branch.name, base: branch.srcName, body});
}
}

async function getPopulationIndicators (projId) {
const originsFiles = await db('projects_files')
.select('data')
.where('project_id', projId)
.where('type', 'origins')
.first();

// Add minified property keys for the results features.
return originsFiles.data.indicators.map((o, i) => Object.assign({}, o, {
prop: `p${i}`
}));
}

async function getPoiTypesOptions (projId, scId) {
const sourceData = await db('scenarios_source_data')
.select('type', 'data')
.where('project_id', projId)
.where('scenario_id', scId)
.where('name', 'poi')
.first();

let poiTypes = [];
if (sourceData.type === 'osm') {
const osmTypesIndex = {
health: 'Health facilities',
education: 'Education facilities',
financial: 'Financial institutions'
};
poiTypes = sourceData.data.osmPoiTypes.map(o => ({
key: o,
label: osmTypesIndex[o]
}));
} else if (sourceData.type === 'file' || sourceData.type === 'wbcatalog') {
const poiFiles = await db('scenarios_files')
.select('subtype')
.where('scenario_id', scId)
.where('type', 'poi');
poiTypes = poiFiles.map(o => ({key: o.subtype, label: o.subtype}));
} else {
throw new Error(`Invalid source for poi: ${sourceData.type}`);
}

// Add minified property keys for the results features.
return poiTypes.map((o, i) => Object.assign({}, o, {
prop: `e${i}`
}));
}
Loading

0 comments on commit ce34381

Please sign in to comment.