From 30401b2c11ec2814f404a407b0fcf952622427eb Mon Sep 17 00:00:00 2001 From: Julian Rojas Date: Tue, 5 Jul 2022 23:38:45 +0200 Subject: [PATCH 1/3] Fix cleanup after each test --- test/resultStream.test.js | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/test/resultStream.test.js b/test/resultStream.test.js index 0639e6e..d94552c 100644 --- a/test/resultStream.test.js +++ b/test/resultStream.test.js @@ -8,14 +8,13 @@ const readFile = util.promisify(fs.readFile); jest.setTimeout(60000); -afterAll(async () => { +afterEach(async () => { await del(['test/sample-feed/linkedConnections*']); }); describe('Testing whether result contains certain objects (regression tests)', () => { var lcstreamToArray = (options, file) => { - console.log(file); return new Promise((resolve, reject) => { exec(`./bin/gtfs2lc.js -s -t -f ${options['format']} -S ${options['store']} --fresh test/sample-feed > test/sample-feed/${file}`, async (err, stdout, stderr) => { @@ -30,17 +29,16 @@ describe('Testing whether result contains certain objects (regression tests)', ( var connections; //This will be the first element when sorted correctly - /*it('Stream should contain a first connection with arrivalStop AMV', async () => { + it('Stream should contain a first connection with arrivalStop AMV', async () => { connections = await lcstreamToArray({}, 'result.json'); assert.equal(JSON.parse(connections[0])['arrivalStop']['stop_id'], 'AMV'); - });*/ + }); it('JSON-LD Stream should contain Connections and use LevelStore for data storage', async () => { var triples = await lcstreamToArray({ format: 'jsonld', store: 'LevelStore' }, 'result.jsonld'); - console.log(triples); assert.equal(JSON.parse(triples[1])['@type'], 'Connection'); }); From a3d383336b637d885b65e25930cd28513ddf3623 Mon Sep 17 00:00:00 2001 From: Julian Rojas Date: Wed, 6 Jul 2022 18:29:19 +0200 Subject: [PATCH 2/3] Finally fixed the service date expansion process --- bin/gtfs2lc.js | 4 +- lib/StreamIterator.js | 57 --------- lib/gtfs2connections.js | 6 +- lib/services/CalendarExpander.js | 70 +++++++++++ lib/services/calendar.js | 154 ----------------------- lib/stoptimes/StopTimes2Cxs.js | 2 +- lib/stoptimes/st2c.js | 2 +- lib/stores/StoreManager.js | 77 +++++++----- package-lock.json | 11 -- package.json | 5 +- test/dataConversion.test.js | 2 +- test/resultStream.test.js | 13 +- test/sample-feed-test/calendar_dates.txt | 3 + 13 files changed, 139 insertions(+), 267 deletions(-) delete mode 100644 lib/StreamIterator.js create mode 100644 lib/services/CalendarExpander.js delete mode 100644 lib/services/calendar.js diff --git a/bin/gtfs2lc.js b/bin/gtfs2lc.js index 37a61d1..7092381 100755 --- a/bin/gtfs2lc.js +++ b/bin/gtfs2lc.js @@ -14,7 +14,6 @@ program .option('-s, --stream', 'Get the connections as a stream on the standard output') .option('-S, --store ', 'Store type: LevelStore (uses your disk to avoid that you run out of RAM) or MemStore (default)') .option('--fresh', 'Make sure to convert all Connection and ignore existing Historic records (which will be deleted)') - .option('-t --test', 'Flag that this is a test...nevermind this') .arguments('', 'Path to sorted GTFS files') .action(function (path) { program.path = path; @@ -44,8 +43,7 @@ var mapper = new gtfs2lc.Connections({ store: !program.store || program.store === 'undefined' ? 'MemStore' : program.store, format: !program.format || program.format === 'undefined' ? 'json' : program.format, fresh: program.fresh, - baseUris: baseUris, - isTest: program.test + baseUris: baseUris }); var resultStream = null; diff --git a/lib/StreamIterator.js b/lib/StreamIterator.js deleted file mode 100644 index 1134aaa..0000000 --- a/lib/StreamIterator.js +++ /dev/null @@ -1,57 +0,0 @@ -const util = require('util'), - { AsyncIterator } = require('asynciterator'), - EventEmitter = require('events').EventEmitter; - -// TODO: Reimplement this. Is error prone -var StreamIterator = function (stream, test) { - this._iterator = AsyncIterator.wrap(stream); - this._currentCB; - this._currentPromise; - this._test = test; -}; - -util.inherits(StreamIterator, EventEmitter); - -StreamIterator.prototype.getCurrentObject = function () { - return this._currentObject; -}; - -StreamIterator.prototype.next = function (callback) { - return new Promise(async (resolve, reject) => { - this._currentCB = callback; - this._currentPromise = resolve; - var object = this._iterator.read(); - - // Hack to prevent edge case where stream didn't update its ended state on time - // This only fails on the tests for some reason and the hack makes real data to crash :( - // We really need to reactor all this code. - if (this._test) { - await new Promise(resolve => setTimeout(resolve, 50)); - } - - if (!object && !this._iterator.ended && this._iterator._state < 2) { - this._iterator.once("readable", async () => { - object = await this.next(callback); - resolve(object); - }); - //Filter our object on the date property and check whether it’s in our interval. - } else if (object) { - this._currentObject = object; - if (callback) - callback(object); - resolve(object); - } else if (!object) { - //stream ended - this._currentObject = null; - if (callback) - callback(null); - resolve(null); - } else { - //We didn't find a solution this time, let's find it next time - resolve(await this.next(callback)); - } - }); -}; - -module.exports = StreamIterator; - diff --git a/lib/gtfs2connections.js b/lib/gtfs2connections.js index 39e3d20..2147aad 100644 --- a/lib/gtfs2connections.js +++ b/lib/gtfs2connections.js @@ -47,7 +47,7 @@ Mapper.prototype.resultStream = async function (path, output, done) { // Step 2: Read all the required GTFS files and create reusable indexes console.error('Creating index stores...'); - const stores = await StoreManager(output, this._options.store, this._options.isTest); + const stores = await StoreManager(output, this._options.store); // Step 3: Produce (diff) connection rules based on available CPU cores console.error('Creating Connection rules...'); @@ -94,7 +94,7 @@ Mapper.prototype.resultStream = async function (path, output, done) { try { console.error('Merging final Linked Connections file...'); // Join all resulting files into one - await exec(`for i in ${raws.map(r => {return `${r}.${ext}`}).join(" ")} ; do cat "$i" >> linkedConnections.${ext} && rm "$i" || break ; done`, { cwd: output }); + await exec(`for i in ${raws.map(r => { return `${r}.${ext}` }).join(" ")} ; do cat "$i" >> linkedConnections.${ext} && rm "$i" || break ; done`, { cwd: output }); let t1 = new Date(); console.error('linkedConnections.' + ext + ' File created in ' + (t1.getTime() - t0.getTime()) + ' ms'); await del( @@ -125,7 +125,7 @@ Mapper.prototype.resultStream = async function (path, output, done) { async function cleanUpSources(sources) { try { await exec(`${path.resolve(`${__dirname}/../bin/gtfs2lc-clean.sh`)} ${sources}`); - } catch(err) { + } catch (err) { console.error(err); throw new Error('Process gtfs2lc-clean.sh exit with code: ' + code); } diff --git a/lib/services/CalendarExpander.js b/lib/services/CalendarExpander.js new file mode 100644 index 0000000..477133e --- /dev/null +++ b/lib/services/CalendarExpander.js @@ -0,0 +1,70 @@ +/** + * Pieter Colpaert and Julián Rojas © Ghent University - imec + * Make sure that the stop_times.txt is ordered by trip_id and stop_sequence before piping it to this library + */ +const Transform = require('stream').Transform; +const { format, eachDayOfInterval } = require('date-fns'); + +class CalendarExpander extends Transform { + constructor(calendarDates) { + super({ objectMode: true }); + this._calendarDates = calendarDates; + } + + _transform(calendar, encoding, done) { + // Parse and expand the calendar in memory + // GTFS specification declares a date as yyyyMMdd. No other formats possible. + // Parsing with substr should be safe. Mind that timezones don’t matter here. + const startDate = this.createDate(calendar['start_date']); + const endDate = this.createDate(calendar['end_date']); + const days = eachDayOfInterval({ start: startDate, end: endDate }); + const calDates = this.calendarDates.get(calendar['service_id']); + const expanded = new Set(); + + if (calDates) { + // Add already all added service dates + calDates.added.forEach(d => expanded.add(format(this.createDate(d), 'yyyyMMdd'))); + + for (const d of days) { + // Check this date is an actual service date and it hasn't been removed + if (calendar[format(d, 'iiii').toLowerCase()] === '1' + && !calDates.removed.has(d)) { + expanded.add(format(d, 'yyyyMMdd')); + } + } + // Delete calendar_dates rule since is no longer needed + this.calendarDates.delete(calendar['service_id']); + } else { + // There are not additional service date rules for this calendar + for (const d of days) { + if (calendar[format(d, 'iiii').toLowerCase()] === '1') { + expanded.add(format(d, 'yyyyMMdd')); + } + } + } + + this.push({ 'service_id': calendar['service_id'], dates: Array.from(expanded) }); + done(); + } + + _flush(done) { + // Deal with all the calendar_dates that didn't have a corresponding calendar rule + for(const [service_id, obj] of this.calendarDates) { + const dates = [] + obj.added.forEach(d => dates.push(format(this.createDate(d), 'yyyyMMdd'))); + + this.push({ service_id, dates }); + } + done(); + } + + createDate(dateString) { + return new Date(dateString.substr(0, 4), parseInt(dateString.substr(4, 2)) - 1, dateString.substr(6, 2)); + } + + get calendarDates() { + return this._calendarDates; + } +} + +module.exports = CalendarExpander; \ No newline at end of file diff --git a/lib/services/calendar.js b/lib/services/calendar.js deleted file mode 100644 index fe2aa82..0000000 --- a/lib/services/calendar.js +++ /dev/null @@ -1,154 +0,0 @@ -/** - * Pieter Colpaert © Ghent University - iMinds - * Transforms a CSV export of calendar.txt and calendar_dates.txt to a service object with an expanded list of dates - */ -const Transform = require('stream').Transform, - util = require('util'), - StreamIterator = require('../StreamIterator'), - { format, eachDayOfInterval } = require('date-fns'); - -var CalendarToServices = function (calendarDatesStream, isTest) { - Transform.call(this, {objectMode : true}); - this._calendarDatesIterator = new StreamIterator(calendarDatesStream, isTest); -}; - -util.inherits(CalendarToServices, Transform); - -/** - * Calls done when all the rules in calendar_dates.txt are processed and a full calendar is found. - * May do intermediate pushes when a service_id is found in calendar_dates.txt that don't belong to a service_id in calendar.txt - * ¡This function only works when calendar.txt and calendar_dates.txt are ordered by service_id! - * @param calendar is the expanded list of calendars - * @param serviceId is the service id we're currently dealing with - */ -CalendarToServices.prototype._matchCalendarDates = function (calendar, serviceId, done) { - //If no current calendar date is set, set it and call the function again - var currentCD = this._calendarDatesIterator.getCurrentObject(); - if (!currentCD) { - this._calendarDatesIterator.next((calendarDate) => { - //if we're processing a new calendar date that's - if (calendarDate) { - this._matchCalendarDates(calendar, serviceId, done); - } else { - //if no next calendar date could be found, return the calendar: we could not find a match for the current calendar - done(calendar, serviceId); - } - }); - } else if (currentCD['service_id'] == serviceId) { - //If the current service_id is the same as the the service id of the calendar that's given, we're going to read current calendar date process it. - var d = currentCD['date']; - if (currentCD['exception_type'] === '1' && calendar.indexOf(d) === -1) { - //This date has been added and doesn't already exist: push it to the back - calendar.push(d); - } else if (currentCD['exception_type'] === '2') { - //Has been removed: remove it from the array - var index = calendar.indexOf(d); - if (index > -1) { - calendar.splice(index, 1); - } - } - - //We have successfully parsed the calendar date, let's find the next one - this._calendarDatesIterator.next((calendarDate) => { - //And rerun this function (which will automatically fetch the next calendar date) - if (calendarDate) { - this._matchCalendarDates(calendar, serviceId, done); - } else { - done(calendar, serviceId); - } - }); - } else if (currentCD['service_id'] < serviceId) { - //If the current service_id is smaller than the calendar service_id, process it: it's a calendar date without a match in calendar.txt, so it should push a calendar update, but done shouldn't be called until we've fixed the next iteration - this._processCalendarDates([], currentCD['service_id'], (intermediateCalendar, intermediateServiceId) => { - this.push({'service_id' : intermediateServiceId, - dates: intermediateCalendar - }); - this._matchCalendarDates(calendar, serviceId, done); - }); - } else { - //This is part of a service id we still might discover, so we should leave it to the next iteration. Return the result of our current endeavour and return nothing - done(calendar, serviceId); - } -}; - -/** - * Processes only calendar additions and returns a calendar. The cursor of the iterator will be at the next calendar date when finished - */ -CalendarToServices.prototype._processCalendarDates = function (calendar, serviceId, done) { - var currentCD = this._calendarDatesIterator.getCurrentObject(); - if (currentCD['service_id'] === serviceId) { - //process it and call done when ready - calendar.push(currentCD['date']); - this._calendarDatesIterator.next((calendarDate) => { - if (calendarDate) { - this._processCalendarDates(calendar, serviceId, done); - } else { - done(calendar, serviceId); - } - }); - } else { - done(calendar, serviceId); - } -}; - -CalendarToServices.prototype._transform = function (calendar, encoding, done) { - //Step one: parse and expand the calendar in memory - //GTFS specification declares a date as yyyyMMdd. No other formats possible. Parsing with substr should be safe. Mind that timezones don’t matter here. They only matter in the ConnectionsBuilder - var startDate = new Date(calendar['start_date'].substr(0,4), parseInt(calendar['start_date'].substr(4,2))-1, calendar['start_date'].substr(6,2)); - var endDate = new Date(calendar['end_date'].substr(0,4), parseInt(calendar['end_date'].substr(4,2))-1, calendar['end_date'].substr(6,2)); - var expanded = []; - let days = eachDayOfInterval({ start: startDate, end: endDate }); - - for (let d of days) { - if (calendar[format(d, 'iiii').toLowerCase()] === '1') { - expanded.push(format(d,'yyyyMMdd')); - } - } - //Step two: match potential exceptions - this._matchCalendarDates(expanded, calendar['service_id'], function (expandedWithExceptions) { - done(null, { - 'service_id' : calendar['service_id'], - dates: expandedWithExceptions - }); - }); -}; - -CalendarToServices.prototype._flush = function (done) { - //read the rest of the calendarDatesIterator - var self = this; - var recursiveCB = function (calendar, serviceId) { - if (calendar) { - self.push({ - 'service_id' : serviceId, - dates: calendar - }); - } - var currentCD = self._calendarDatesIterator.getCurrentObject(); - if (currentCD) { - self._processCalendarDates([], currentCD['service_id'], function (calendar, serviceId) { - recursiveCB(calendar, serviceId); - }); - } else { - //No next calendar date found - done(); - } - }; - //Initialization: if we're still handling a calendar date, handle this one first - var currentCD = this._calendarDatesIterator.getCurrentObject(); - if (currentCD) { - this._processCalendarDates([], currentCD['service_id'], function (calendar, serviceId) { - recursiveCB(calendar, serviceId); - }); - } else { - //If we aren't, check whether we're out of calendar dates or not - this._calendarDatesIterator.next(function (currentCD) { - if (currentCD) { - recursiveCB(); - } else { - done(); - } - }); - } -}; - -module.exports = CalendarToServices; diff --git a/lib/stoptimes/StopTimes2Cxs.js b/lib/stoptimes/StopTimes2Cxs.js index 9e8d70a..fba8d2e 100644 --- a/lib/stoptimes/StopTimes2Cxs.js +++ b/lib/stoptimes/StopTimes2Cxs.js @@ -38,7 +38,7 @@ module.exports = function (sourcePath, outPath, stores, fresh) { )); connectionRules.on('error', err => { - console.error(err.message); + console.error(err); process.exit(-1); }) diff --git a/lib/stoptimes/st2c.js b/lib/stoptimes/st2c.js index 034bd5e..ec75b5b 100644 --- a/lib/stoptimes/st2c.js +++ b/lib/stoptimes/st2c.js @@ -107,7 +107,7 @@ const arrivalStop = await this.stopsDB.get(arrival); const trip = await this.tripsDB.get(this.previousStopTime['trip_id']); const route = await this.routesDB.get(trip['route_id']); - const serviceDates = await this.servicesDB.get(trip['service_id']); + const serviceDates = await this.servicesDB.get(trip['service_id']) || []; return { departureStop, arrivalStop, diff --git a/lib/stores/StoreManager.js b/lib/stores/StoreManager.js index a4c0be4..82ece91 100644 --- a/lib/stores/StoreManager.js +++ b/lib/stores/StoreManager.js @@ -1,14 +1,10 @@ const fs = require('fs'); const csv = require('fast-csv'); -const util = require('util'); -const ChildProcess = require('child_process'); const Store = require('./Store'); -const Services = require('../services/calendar'); +const CalendarExpander = require('../services/CalendarExpander'); -const exec = util.promisify(ChildProcess.exec); - -module.exports = async function (outPath, storeType, isTest) { - // Step 2: Read all the required GTFS files in a streamed-fashion +module.exports = async function (outPath, storeType) { + // Step 2: Read all the required GTFS files in a stream-fashion const stops = fs.createReadStream(`${outPath}/stops.txt`, { encoding: 'utf8', objectMode: true }) .pipe(csv.parse({ objectMode: true, headers: true })) .on('error', function (e) { @@ -27,23 +23,6 @@ module.exports = async function (outPath, storeType, isTest) { console.error(e); }); - const calendarDates = fs.createReadStream(`${outPath}/calendar_dates.txt`, { encoding: 'utf8', objectMode: true }) - .pipe(csv.parse({ objectMode: true, headers: true })) - .on('error', function (e) { - console.error(e); - }); - - // Check if calendar.txt exists, otherwise create a dummy file to prevent stream issue - if(!fs.existsSync(`${outPath}/calendar.txt`)) { - await exec('touch calendar.txt', { cwd: outPath }); - } - const services = fs.createReadStream(`${outPath}/calendar.txt`, { encoding: 'utf8', objectMode: true }) - .pipe(csv.parse({ objectMode: true, headers: true })) - .pipe(new Services(calendarDates, isTest)) - .on('error', function (e) { - console.error(e); - }); - // Store in LevelDB or in memory Map depending on the options const [ stopsDB, routesDB, tripsDB, servicesDB @@ -69,19 +48,53 @@ module.exports = async function (outPath, storeType, isTest) { encoding: 'json', key: 'trip_id', }), - loadIndexData({ - stream: services, - type: storeType, - fileName: `${outPath}/services.db`, - encoding: 'json', - key: 'service_id', - value: 'dates' - }) + loadServiceDates(outPath, storeType) ]); return { stopsDB, routesDB, tripsDB, servicesDB }; } +async function loadServiceDates(outPath, storeType) { + // Load all calendar_dates in memory store + const calendarDates = new Map(); + + const calendarDatesStream = fs.createReadStream(`${outPath}/calendar_dates.txt`, { encoding: 'utf8', objectMode: true }) + .pipe(csv.parse({ objectMode: true, headers: true })) + .on('error', function (e) { + console.error(e); + }); + + for await (const cd of calendarDatesStream) { + // Initialize calendar_date rule object + if(!calendarDates.has(cd['service_id'])) { + calendarDates.set(cd['service_id'], { added: new Set(), removed: new Set() }); + } + // Set date addition/removal accordingly + if(cd['exception_type'] === '1') { + calendarDates.get(cd['service_id']).added.add(cd['date']); + } else if(cd['exception_type'] === '2') { + calendarDates.get(cd['service_id']).removed.add(cd['date']); + } + } + + // Load and merge all expanded service dates in data store. + const calendar = fs.createReadStream(`${outPath}/calendar.txt`, { encoding: 'utf8', objectMode: true }) + .pipe(csv.parse({ objectMode: true, headers: true })) + .pipe(new CalendarExpander(calendarDates)) + .on('error', function (e) { + console.error(e); + }); + + return loadIndexData({ + stream: calendar, + type: storeType, + fileName: `${outPath}/services.db`, + encoding: 'json', + key: 'service_id', + value: 'dates' + }); +} + async function loadIndexData({ stream, type, fileName, encoding, key, value }) { try { const store = Store({ fileName, encoding }, type); diff --git a/package-lock.json b/package-lock.json index 0031f03..e5fd673 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,7 +9,6 @@ "version": "2.0.3", "license": "MIT", "dependencies": { - "asynciterator": "^2.0.1", "commander": "^4.1.1", "date-fns": "^2.18.0", "del": "^5.1.0", @@ -1269,11 +1268,6 @@ "node": ">=0.8" } }, - "node_modules/asynciterator": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/asynciterator/-/asynciterator-2.0.1.tgz", - "integrity": "sha512-aVLheZsDNU5qpOv6jZEHnFv79GfEi+N0w/OLmMmXZfGD8XFFmPsRhkSqleNl9jS6mqy/DNoV7tXGcI0S3cUvHQ==" - }, "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", @@ -5579,11 +5573,6 @@ "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==", "dev": true }, - "asynciterator": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/asynciterator/-/asynciterator-2.0.1.tgz", - "integrity": "sha512-aVLheZsDNU5qpOv6jZEHnFv79GfEi+N0w/OLmMmXZfGD8XFFmPsRhkSqleNl9jS6mqy/DNoV7tXGcI0S3cUvHQ==" - }, "asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", diff --git a/package.json b/package.json index 172ae76..cac34a8 100644 --- a/package.json +++ b/package.json @@ -9,8 +9,8 @@ "linkedconnections-joinandsort": "./bin/linkedconnections-sort.sh" }, "scripts": { - "test": "rm -rf test/sample-feed/ ; cp -r test/sample-feed-test/ test/sample-feed/ ; ./bin/gtfs2lc.js -s -t -f jsonld test/sample-feed > test/sample-feed/connections-notjoined.nldjsonld ; ./bin/linkedconnections-sort.sh test/sample-feed/connections-notjoined.nldjsonld > test/sample-feed/connections.nldjsonld ; rm test/sample-feed/linkedConnections.json ; jest --runInBand", - "test-ci": "rm -rf test/sample-feed/ ; cp -r test/sample-feed-test/ test/sample-feed/ ; ./bin/gtfs2lc.js -s -t -f jsonld test/sample-feed > test/sample-feed/connections-notjoined.nldjsonld ; ./bin/linkedconnections-sort.sh test/sample-feed/connections-notjoined.nldjsonld > test/sample-feed/connections.nldjsonld ; rm test/sample-feed/linkedConnections.json ; jest --ci --runInBand --coverage", + "test": "rm -rf test/sample-feed/ ; cp -r test/sample-feed-test/ test/sample-feed/ ; ./bin/gtfs2lc.js -s -f jsonld test/sample-feed > test/sample-feed/connections-notjoined.nldjsonld ; ./bin/linkedconnections-sort.sh test/sample-feed/connections-notjoined.nldjsonld > test/sample-feed/connections.nldjsonld ; rm test/sample-feed/linkedConnections.json ; jest --runInBand", + "test-ci": "rm -rf test/sample-feed/ ; cp -r test/sample-feed-test/ test/sample-feed/ ; ./bin/gtfs2lc.js -s -f jsonld test/sample-feed > test/sample-feed/connections-notjoined.nldjsonld ; ./bin/linkedconnections-sort.sh test/sample-feed/connections-notjoined.nldjsonld > test/sample-feed/connections.nldjsonld ; rm test/sample-feed/linkedConnections.json ; jest --ci --runInBand --coverage", "coveralls": "jest --coverage && coveralls < coverage/lcov.info" }, "repository": { @@ -28,7 +28,6 @@ "url": "https://github.com/LinkedConnections/gtfs2lc/issues" }, "dependencies": { - "asynciterator": "^2.0.1", "commander": "^4.1.1", "date-fns": "^2.18.0", "del": "^5.1.0", diff --git a/test/dataConversion.test.js b/test/dataConversion.test.js index fe9c1d8..7942122 100644 --- a/test/dataConversion.test.js +++ b/test/dataConversion.test.js @@ -23,7 +23,7 @@ test('Convert connections to csv', async () => { function doBasicParsing() { return new Promise((resolve, reject) => { - exec(`./bin/gtfs2lc.js -t -s --fresh test/sample-feed > test/sample-feed/formats.json`, + exec(`./bin/gtfs2lc.js -s --fresh test/sample-feed > test/sample-feed/formats.json`, async (err, stdout, stderr) => { if (err) { reject(stderr); diff --git a/test/resultStream.test.js b/test/resultStream.test.js index d94552c..9cb4169 100644 --- a/test/resultStream.test.js +++ b/test/resultStream.test.js @@ -16,7 +16,7 @@ describe('Testing whether result contains certain objects (regression tests)', ( var lcstreamToArray = (options, file) => { return new Promise((resolve, reject) => { - exec(`./bin/gtfs2lc.js -s -t -f ${options['format']} -S ${options['store']} --fresh test/sample-feed > test/sample-feed/${file}`, + exec(`./bin/gtfs2lc.js -s -f ${options['format']} -S ${options['store']} --fresh test/sample-feed > test/sample-feed/${file}`, async (err, stdout, stderr) => { if (err) { reject(stderr); @@ -49,4 +49,15 @@ describe('Testing whether result contains certain objects (regression tests)', ( }, 'turtle.ttl'); assert.equal(triples[4].includes('a lc:Connection'), true); }); + + it('RDF Stream should be produced from feed without calendar.txt', async () => { + // Hide calendar.txt for this test + fs.renameSync('./test/sample-feed/calendar.txt', './test/sample-feed/calendar.txt.bkp'); + var triples = await lcstreamToArray({ + format: 'turtle', + store: 'MemStore' + }, 'turtle.ttl'); + fs.renameSync('./test/sample-feed/calendar.txt.bkp', './test/sample-feed/calendar.txt'); + assert.equal(triples[4].includes('a lc:Connection'), true); + }); }); diff --git a/test/sample-feed-test/calendar_dates.txt b/test/sample-feed-test/calendar_dates.txt index 0d0664d..c5a5429 100644 --- a/test/sample-feed-test/calendar_dates.txt +++ b/test/sample-feed-test/calendar_dates.txt @@ -1,2 +1,5 @@ service_id,date,exception_type FULLW,20070604,2 +FULLW,20070605,1 +FULLW,20070606,1 +FULLW,20070607,1 From 7486f94fc57f64c04ae0dc0c59a3a9805060a9bb Mon Sep 17 00:00:00 2001 From: Julian Rojas Date: Wed, 6 Jul 2022 18:30:16 +0200 Subject: [PATCH 3/3] 2.1.0 --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index e5fd673..3436ec5 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "gtfs2lc", - "version": "2.0.3", + "version": "2.1.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "gtfs2lc", - "version": "2.0.3", + "version": "2.1.0", "license": "MIT", "dependencies": { "commander": "^4.1.1", diff --git a/package.json b/package.json index cac34a8..426968e 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "gtfs2lc", - "version": "2.0.3", + "version": "2.1.0", "description": "Mapping script from gtfs to (linked) connections", "main": "lib/gtfs2lc.js", "bin": {