diff --git a/.api-version b/.api-version
index 46edf4d..8641eab 100644
--- a/.api-version
+++ b/.api-version
@@ -1 +1 @@
-1.9.1-open
\ No newline at end of file
+1.9.2-open
\ No newline at end of file
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
new file mode 100644
index 0000000..f495a39
--- /dev/null
+++ b/.github/workflows/ci.yml
@@ -0,0 +1,46 @@
+name: Open Pryv.io CI
+
+on:
+ pull_request:
+ branches:
+ - '*'
+ push:
+ branches:
+ - '*'
+
+jobs:
+ build:
+ runs-on: ubuntu-22.04
+
+ strategy:
+ matrix:
+ node-version: [18.16.0]
+
+ steps:
+ - name: Install `just`
+ uses: extractions/setup-just@v2
+
+ - name: Checkout repository with submodules
+ uses: actions/checkout@v4
+
+ - name: Install Node.js with version ${{ matrix.node-version }}
+ uses: actions/setup-node@v4
+ with:
+ node-version: ${{ matrix.node-version }}
+
+
+ - name: Setup Open Pryv.io
+ run: |
+ npm run setup-dev-env
+ npm install
+ sudo apt-get install graphicsmagick
+
+ - name: Run tests on Open Pryv.io with coverage
+ run: |
+ IS_CI=true just test-cover-lcov
+
+ - name: Upload coverage to Codecov
+ uses: codecov/codecov-action@v4.0.1
+ with:
+ token: ${{ secrets.CODECOV_TOKEN }}
+ slug: pryv/open-pryv.io
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index 507743a..d1a3b3b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -3,8 +3,8 @@ public_html/
app-web-auth3/
node_modules/
docker/dockerized-open-pryv
-ferretDB/data
-
+.nyc_output/
+coverage/
# not commiting this into OS version
diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 0000000..a444ec2
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,182 @@
+
+## 1.9
+
+### 1.9.2
+- Refactored Attachments (Event Files) Logic to be modular for future cloud storage of files such as S3.
+
+### 1.9.1
+- Implemented ferretDB compatibility allowing full-open source modules
+- Replaced rec.la by backloop.dev
+
+### 1.9.0
+
+- Remove FlowType and convert (best-effort) typing information into JSDoc comments
+- Update to MongoDB v6
+- Update to node v18
+- Stream deletion eventIds when deleting streams to avoid timeout
+- Introduce platform DB for future cross-cores usage
+- Unify SQLite usage across audit and storage
+- Move attachments to per-user directories
+- Finalize data-store API for first public release
+- Many linting fixes
+- Support for multiple CAA (certificate autorities issuer)
+- Bug fixes:
+ - Non-reusable deleted streamIds when following auth process #484
+ - SQLITE_BUSY error thrown in multi-core #487
+
+## 1.8
+
+### 1.8.1
+
+- Fix migration 1.6.x to 1.8.0 bug
+
+### 1.8.0
+
+- Add support for password rules: complexity, age, reuse; see API server's `auth.password*` settings
+ - Affected methods are: create user (`POST /users`), change password (`{user endpoint}/account/change-password`), reset password (`{user endpoint}/account/reset-password`) and login (`{user endpoint}/auth/login`)
+- Add undocumented support for external stores (a.k.a. "data mapping" feature); see component `pryv-datastore` (will be published separately when appropriate)
+
+## 1.7
+
+### 1.7.14
+- Fix crash caused by permissions selfRevoke used in combinaison with BACKWARD_COMPATIBILITY_SYSTEM_STREAMS_PREFIX set to true.
+- Fix issue with `accesses.create` theand selfRevoke permissions that was only possible with a personalToken.
+
+### 1.7.13
+
+- Fix another issue when BACKWARD_COMPATIBILITY_SYSTEM_STREAMS_PREFIX is set to "true" - children streams' ids were not following the correct format
+- Fix a performance issue when querying events by type
+- Fix an issue which caused the service not to restart properly in some situations
+
+### 1.7.12
+
+- Fix issue when BACKWARD_COMPATIBILITY_SYSTEM_STREAMS_PREFIX is set to "true" - "account" streamId was handled as ".account"
+
+### 1.7.10
+
+- API change: Don't coerce event content and simplify known type validation process in api-server
+- serviceInfo:eventTypes URL now supports `file://` protocol allowing it to load definition from file system
+
+### 1.7.9
+
+- Fix issue with events.getAttachment making core crash if filename contained fancy characters by putting it in the 'Content-disposition' header
+- Security fix: make password reset token single-use
+- Security fix: hide "newPassword" in logs when an error occurs in account.resetPassword
+
+### 1.7.7
+
+- Fix issue where a deleted user was kept in the cache, thus rendering the reuse of username possible, but failing all subsequent calls as the password and tokens were not returned (since the wrong userId was returned by the cache)
+- Fix issue where attempting to create streams with id 'size' would return an error
+- Fix socket.io CORS issue
+
+### 1.7.6
+
+- Fix access-info permissions
+
+### 1.7.5
+
+- add missing system stream permissions accesses
+- change __unique properties cleanup, just match them by key suffix, not from current serializer unique props. Avoids migration error if uniqueness has been modified.
+
+
+### 1.7.1
+
+- migrate tags into streams
+
+### 1.7.0
+
+- introduce mall abstraction
+- add integrity
+- refactor access permissions logic
+
+## 1.6
+
+### 1.6.21
+
+Fixes:
+
+- fix boolean/bool event type that was not allowed
+- fix HF null values for optional values that was not fully working
+
+Changes:
+
+- increase username characters limit to 60
+
+### 1.6.20
+
+- Implement system route to deactivate MFA
+
+### 1.6.18
+
+- Fix welcome email: don't wait for welcome email sending before replying to client.
+
+### 1.6.16
+
+- Fix versioning: update unique system events bug
+
+### 1.6.15
+
+- Fix user deletion
+
+### 1.6.14
+
+- personal token can delete an account
+- add external licenser: pryv/app-node-licenser
+- fix security issue with users registration conflicts leaking random email addresses
+
+### 1.6.13
+
+- Unify configuration into boiler
+- Fixes for Open Pryv.io
+
+### 1.6.12
+
+Fixes:
+
+- versioning now works when trashing event
+
+### 1.6.7
+
+New Features:
+
+- Stream queries for events.get
+
+Fixes:
+
+- usernames starting with "system" are available
+- personal token expiration now fixed
+- Users create call on core username error message now specifies that letters must be lowercase
+
+Changes:
+
+- In configuration, rename "singleNode" to "dnsLess", keeping retro-compatibility for "singleNode" with warning message
+
+Removals:
+
+- Deprecated "GET /who-am-i" API method removed
+- Remove pryvuser-cli code (the image was not built since July)
+
+### 1.6.3
+
+Custom Auth function now has access to all headers.
+
+### 1.6.2
+
+- Fix migration that was skipping passwordHash leading to users not being able to login
+- add errors if this cases arises
+
+### 1.6.1
+
+Fixes for dnsLess/openSource:
+
+- /reg/service/info
+- dependencies
+- boost POST payload to 10MB for HF server
+
+### 1.6.0
+
+system streams:
+
+- customizable (& extendable) unique and indexed account properties
+- access to account properties through the events API with its access management
+- user account deletion through administration API
\ No newline at end of file
diff --git a/README-DBs.md b/README-DBs.md
new file mode 100644
index 0000000..2472d2e
--- /dev/null
+++ b/README-DBs.md
@@ -0,0 +1,102 @@
+# Pryv.io Databases
+
+Initially Pryv.io was built on top of MongoDB with separated collection per user. This initial design allowed to isolate peruser data on the file system.
+
+This design has a drawback as MongoDB was consuming a fixed amount of RAM per collection and the with growing sets of users (over 40'000 per node) 16Gb was needed. In v1.6.0 an option to merge the Mongo's collection was added, resulting in an average RAM requirement of 4Gb for 100'000 users.
+
+From v1.7.0 Sqlite has been investigated in order to provide back the ability to isolate peruser data on the file system. The motivation is to provide full control over the user's data in order to facilitate and prove the "right to be forgotten."
+
+From v1.8.0 a Sqlite version for Event has been provided on top of the [datastore](https://github.com/pryv/pryv-datastore) abstraction.
+
+From v1.9.x [FerretDB](https://www.ferretdb.com) has been implemented as on optional replacement of MongoDB.
+
+Since v1.9.2 Pryv.io can be deployed in "full-cloud" setup without relying on the file system. This can be done by configuring all storage modules to use MongoDB. For the attachments and S3 implementation is in development.
+
+For future v1.9.3 Pryv.io will be also capable in being "full local" with only SQLite databases.
+
+## List of storage used in Pryv.io
+
+#### User local directory
+
+base code: [components/storage/src/userLocalDirectory.js](components/storage/src/userLocalDirectory.js)
+
+Localization of user data on the host file system, usually in `var-pryv/users` then a directory path is constructed using the 3 last characters of the userId and the userId.
+
+Exemple with userId `c123456789abc`: `var-pryv/users/c/b/a/c123456789abc/`
+
+In this directory, the attachments and any user attributed data and sqlite db should be stored.
+
+#### User local index
+
+base code: [components/storage/src/userLocalIndex.js](components/storage/src/userLocalIndex.js)
+
+This database is a per-server index to map userId and userName. In the future it could be extended to allow user aliases.
+
+- With SQLite (default) the db file can be usually found at `var-pryv/user-index.db`
+- With MongoDB the collection is `id4name` and stored in the main host database `pryv-node`
+
+Settings to activate MongoDB/ferretDB instead of SQLite: `storageUserIndex:engine = 'mongodb'`
+
+Script to migrate userIndex from SQLite to MongoDB: [read first](#sql2mongo)
+`LOGS=info node components/storage/src/migrations/switchSqliteMongo/usersIndex.js --config configs/api.yml`
+
+#### User account storage
+
+base code: [components/storage/src/userAccountStorage*.js](components/storage/src/) *: Mongo or Sqlite
+
+This database contains the password and passwords history of the user.
+
+- With SQLite (default) it can be found in the "User local directory" named as `account-1.0.0.sqlite` .
+- With MongoDB the collection is `passwords` and stored in the main host database `pryv-node`
+
+Settings to activate MongoDB/ferretDB instead of SQLite: `storageUserAccount:engine = 'mongodb'`
+
+Script to migrate from SQLite to MongoDB: [read first](#sql2mongo)
+`LOGS=info node components/storage/src/migrations/switchSqliteMongo/userAccountStorage.js --config configs/api.yml`
+
+#### Platform Wide Shared Storage
+
+base code: [components/platform](components/platform)
+
+This database contains all indexed and unique fields for users such as emails and custom systems streams data.
+
+In the Enterprise version of Pryv, it acts as a local cache and report to `service-register` being the main index. For Open-Pryv.io platformDB should evolve in a shared database between running service-core.
+
+- With SQLite (default) the db file can be usually found at `var-pryv/platform-wide.db`
+- With MongoDB
+
+Settings to activate MongoDB/ferretDB instead of SQLite:`storagePlatform:engine = 'mongodb'`
+
+Script to migrate from SQLite to MongoDB: [read first](#sql2mongo)
+
+`LOGS=info node components/storage/src/migrations/switchSqliteMongo/platformDB.js --config configs/api.yml`
+
+#### Events, Streams & Attachments Storage
+
+base code: [components/storage/src/localDataStore](components/storage/src/localDataStore) and [localDataStoreSQLite](components/storage/src/localDataStoreSqlite)
+
+Main storage for `events` , `streams` & `attachments` this implementation follows the modular API of [datastore](https://github.com/pryv/pryv-datastore) abstraction.
+
+- Fully implemented with MongoDB/FerretDB
+- Only events are implemented with SQLite - Expecting full SQLite implementation in v1.9.3
+
+#### Profile, Accesses, FollowedSlices & Webhooks Storage
+
+base code: [components/storage/src/user](components/storage/src/user)
+
+Only implemented for MongoDB/FerretDB - Expecting full SQLite implementation in v1.9.3
+
+### Notes
+
+#### Known issues
+
+- [ ] test B2I7 is failing when testing `storage` with `full-mongo` as indexes for password is not yet created. Run `just test-full-mongo storage` to reproduce
+
+#### Using SQlite to MongoDB migration scripts
+
+1. Make sure that all Pryv.io components are stopped but `MongoDB`
+2. Do not set the `storage*:engine` setting to `mongodb` yet !
+3. Run the scripts
+4. Change appropriate setting to `storage*:engine = 'mongodb'`
+5. Start all services and check
+6. If all is fine, related SQLite DB should be deleted manually
\ No newline at end of file
diff --git a/README.md b/README.md
index e6b0dac..6cdf4e4 100644
--- a/README.md
+++ b/README.md
@@ -1,3 +1,5 @@
+[](https://codecov.io/gh/pryv/open-pryv.io)
+
# Open Pryv.io

@@ -12,6 +14,11 @@ Maintained and developed by Pryv.

+## Digital Public Good
+
+
+Open-Pryv.io is recognized as a digital public good by [DPGAlliance](https://digitalpublicgoods.net/registry/) a UN-endorsed initiative that facilitates the discovery and deployment of open-source technologies.
+
## Features
- Provides latest Pryv.io core system ready for production
@@ -57,7 +64,7 @@ Choose your Set-up
### Docker
-The dockerized versions and their instructions are available at this link: [Download link](https://pryv.github.io/open-pryv.io/docker/dockerized-open-pryv-1.9.0.tgz).
+The dockerized versions and their instructions are available at this link: [Download link](https://pryv.github.io/open-pryv.io/docker/dockerized-open-pryv-1.9.2.tgz).
If you wish to build the images yourself, refer to the following README: [docker/README-build.md](docker/README-build.md).
@@ -171,6 +178,10 @@ You can also try our [example apps with guides and tutorials](https://github.com
## Options & Customization
+### From version 1.9.2 OpenPryv.io supports Full MongoDB/FerretDB
+
+Read [README-DBs](README-DBs.md) for more information.
+
### Authentication & Registration web app.
Open Pryv.io comes packaged with [app-web-auth3](https://github.com/pryv/app-web-auth3), the default web pages for app authentication, user registration and password reset.
diff --git a/RELEASE-1.9.2-TASKLIST.md b/RELEASE-1.9.2-TASKLIST.md
new file mode 100644
index 0000000..05a0cb4
--- /dev/null
+++ b/RELEASE-1.9.2-TASKLIST.md
@@ -0,0 +1,67 @@
+# RELEASE 1.9.2
+
+- Provide a Full-Mongo distribution
+- Provide cloud management for Attachments (Files)
+- Provide ferretDB Compantibility to be full Open-Source
+- (Optional) provides hooks for encryption mechanisms
+
+## TASKLIST
+
+### Remove 'sqlite' from
+
+- [x] [platform/DB](https://github.com/pryv/open-pryv.io/blob/full-mongo/components/platform/src/) which contains all unique and indexed field. This DB should be distributed among servers
+- [x] [userLocalDir](https://github.com/pryv/open-pryv.io/blob/full-mongo/components/storage/src/userLocalDirectory.js) map userId / userName
+- [x] [userAccountStorage](https://github.com/pryv/open-pryv.io/full-mongo/master/components/storage/src/userAccountStorage.js) contains password and password history
+
+Task is completed when a script to migrate is provided and settings to activate.
+
+### Known issues
+- [ ] test B2I7 is failing when testing `storage` with `full-mongo` as indexes for passowrd is not yet created. Run `just test-full-mongo storage` to reproduce
+
+### Move Attachments to an online storage
+
+- [ ] GridFS
+- [ ] S3
+
+### Documentation
+
+- [ ] Add instructions on how to move / copy previous user data
+- [ ] Add instructions on how to remove previous configurations associated with user files
+
+### (Optional) Put all config in MongoDB
+
+- For docker version of open-pryv.io.
+ - default config to be hardcoded in container
+ - Custom value saved in mongoDB, with connection parameters given by `env`
+
+## Usage
+
+#### Migration scripts
+
+- platform: `LOGS=info node components/storage/src/migrations/switchSqliteMongo/platformDB.js --config configs/api.yml`
+- userStorage: `LOGS=info node components/storage/src/migrations/switchSqliteMongo/userAccountStorage.js --config configs/api.yml`
+- usersIndex: `LOGS=info node components/storage/src/migrations/switchSqliteMongo/usersIndex.js --config configs/api.yml`
+
+#### Settings
+
+- Platform: `storagePlatform:engine = 'mongodb'`
+- userStorage: `storageUserAccount:engine = 'mongodb'`
+- storageUserIndex: `storageUserIndex:engine = 'mongodb'`
+
+### Know issue
+- [ ] B2I7 **storage** test is failing after migrations test because indexes are lost. But runs fine idenpendently.
+
+## Log
+
+27/03/2024 - Made a MongoDB version of platform/DB
+
+- migrated in a sperated db: `pryv-node-platform` as it should behave differntly that user-based DB (`pryv-node`).
+- Collections are: `keyValueIndexed` and `keyValueUnique`
+
+27/03/2024 - Made a MongoDB verion of userStorage
+
+- migrated to in `pryv-node` in collections `passwords` & `stores-key-value`
+
+28/03/2024 - Made a MongoDB verion of userIndex
+
+- migrated to in `pryv-node` in collection `id4name`
diff --git a/components/api-server/bin/nightly b/components/api-server/bin/nightly
index 11bae11..20cdbff 100755
--- a/components/api-server/bin/nightly
+++ b/components/api-server/bin/nightly
@@ -18,11 +18,7 @@ const { getUsersRepository } = require('business/src/users/repository');
// Construct storage size object
const storage = require('../../storage');
- const storageLayer = app.storageLayer;
- const storageSize = new storage.Size(
- [storageLayer.accesses, storageLayer.followedSlices, storageLayer.profile],
- [storageLayer.eventFiles]
- );
+ const storageSize = new storage.Size();
// This bit is useful to trace down promise rejections that aren't caught.
process.on('unhandledRejection', (reason, promise) => {
@@ -36,7 +32,7 @@ const { getUsersRepository } = require('business/src/users/repository');
process.exit(2);
});
- runNightlyTasks(storageLayer, storageSize)
+ runNightlyTasks(storageSize)
.then(() => {
logger.info('Nightly tasks completed.');
process.exit(0);
@@ -50,7 +46,7 @@ const { getUsersRepository } = require('business/src/users/repository');
/**
* Standalone script to perform nightly tasks (such as updating storage sizes for all users).
*/
-async function runNightlyTasks (storageLayer, storageSize) {
+async function runNightlyTasks (storageSize) {
logger.info('Starting update of storage size');
// Retrieve all existing users
diff --git a/components/api-server/test/account.test.js b/components/api-server/test/account.test.js
index 20fd91c..766f4fb 100644
--- a/components/api-server/test/account.test.js
+++ b/components/api-server/test/account.test.js
@@ -47,11 +47,11 @@ const ErrorIds = require('errors').ErrorIds;
const validation = helpers.validation;
const methodsSchema = require('../src/schema/accountMethods');
const pwdResetReqsStorage = helpers.dependencies.storage.passwordResetRequests;
-const storageSize = helpers.dependencies.storage.size;
const testData = helpers.data;
const { getUsersRepository } = require('business/src/users');
const { getUserAccountStorage } = require('storage');
const { getConfig } = require('@pryv/boiler');
+const { getMall } = require('mall');
const encryption = require('utils').encryption;
let isOpenSource = false;
@@ -60,12 +60,14 @@ describe('[ACCO] account', function () {
const user = structuredClone(testData.users[0]);
let usersRepository = null;
let userAccountStorage = null;
+ let mall = null;
before(async () => {
const config = await getConfig();
isOpenSource = config.get('openSource:isActive');
usersRepository = await getUsersRepository();
userAccountStorage = await getUserAccountStorage();
+ mall = await getMall();
});
const basePath = '/' + user.username + '/account';
@@ -84,7 +86,6 @@ describe('[ACCO] account', function () {
testData.resetFollowedSlices,
testData.resetStreams,
- testData.resetAttachments,
server.ensureStarted.bind(server, helpers.dependencies.settings),
function (stepDone) {
request = helpers.request(server.url);
@@ -247,7 +248,6 @@ describe('[ACCO] account', function () {
testData.resetFollowedSlices,
testData.resetStreams,
- testData.resetAttachments,
server.ensureStarted.bind(server, helpers.dependencies.settings),
function (stepDone) {
request = helpers.request(server.url);
@@ -264,27 +264,22 @@ describe('[ACCO] account', function () {
it('[NFJQ] must properly compute used storage size for a given user when called', async () => {
const newAtt = testData.attachments.image;
- let storageUsed = await storageSize.computeForUser(user);
- assert.isAbove(storageUsed.dbDocuments, 0);
+ const storageInfoInitial = await mall.getUserStorageInfos(user.id);
const expectedAttsSize = _.reduce(testData.events, function (total, evt) {
return total + getTotalAttachmentsSize(evt);
}, 0);
// On Ubuntu with ext4 FileSystem the size difference is 4k, not 1k. I still dunno why.
- assert.approximately(storageUsed.attachedFiles, expectedAttsSize, filesystemBlockSize);
- const initialStorageUsed = storageUsed;
+ assert.approximately(storageInfoInitial.local.files.sizeKb, expectedAttsSize, filesystemBlockSize);
await bluebird.fromCallback(cb => addEventWithAttachment(newAtt, cb));
- storageUsed = await storageSize.computeForUser(user);
+ const storageInfoAfter = await mall.getUserStorageInfos(user.id);
// hard to know what the exact difference should be, so we just expect it's bigger
- assert.isAbove(storageUsed.dbDocuments, initialStorageUsed.dbDocuments);
- assert.approximately(storageUsed.attachedFiles, initialStorageUsed.attachedFiles +
+ assert.isAbove(storageInfoAfter.local.events.count, storageInfoInitial.local.events.count);
+ assert.approximately(storageInfoAfter.local.files.sizeKb, storageInfoInitial.local.files.sizeKb +
newAtt.size, filesystemBlockSize);
- const updatedStorageUsed = storageUsed;
- const retrievedUser = await usersRepository.getUserById(user.id);
- assert.deepEqual(retrievedUser.storageUsed, updatedStorageUsed);
});
// test nightly job script
@@ -296,8 +291,8 @@ describe('[ACCO] account', function () {
execSync('node ./bin/nightly');
// Verify initial storage usage
- const initialStorageUsed = await storageSize.computeForUser(user);
- initialStorageUsed.attachedFiles.should.be.above(0);
+ const initialStorageInfo = await mall.getUserStorageInfos(user.id);
+ initialStorageInfo.local.files.sizeKb.should.be.above(0);
// Add an attachment
await bluebird.fromCallback(
@@ -307,11 +302,11 @@ describe('[ACCO] account', function () {
execSync('node ./bin/nightly');
// Verify updated storage usage
- const updatedStorageUsed = await storageSize.computeForUser(user);
+ const updatedStorageInfo = await mall.getUserStorageInfos(user.id);
- updatedStorageUsed.dbDocuments.should.be.above(initialStorageUsed.dbDocuments);
- updatedStorageUsed.attachedFiles.should.be.approximately(
- initialStorageUsed.attachedFiles + newAtt.size, filesystemBlockSize);
+ updatedStorageInfo.local.events.count.should.be.above(initialStorageInfo.local.events.count);
+ updatedStorageInfo.local.files.sizeKb.should.be.approximately(
+ initialStorageInfo.local.files.sizeKb + newAtt.size, filesystemBlockSize);
});
function addEventWithAttachment (attachment, callback) {
@@ -351,8 +346,8 @@ describe('[ACCO] account', function () {
});
it('[93AP] must be approximately updated (diff) when deleting an attached file', async function () {
- const deletedAtt = testData.events[0].attachments[0];
- const initialStorageUsed = await storageSize.computeForUser(user);
+ const deletedAtt = testData.dynCreateAttachmentIdMap[testData.events[0].id][0];
+ const initialStorageInfo = await mall.getUserStorageInfos(user.id);
const path = '/' + user.username + '/events/' + testData.events[0].id + '/' +
deletedAtt.id;
@@ -363,17 +358,17 @@ describe('[ACCO] account', function () {
// either we do the request with superagent, or we update request()
}
- const updatedStoragedUsed = await storageSize.computeForUser(user);
- assert.equal(updatedStoragedUsed.dbDocuments, initialStorageUsed.dbDocuments);
- assert.approximately(updatedStoragedUsed.attachedFiles,
- initialStorageUsed.attachedFiles - deletedAtt.size,
+ const updatedStorageInfo = await mall.getUserStorageInfos(user.id);
+ assert.equal(updatedStorageInfo.local.events.count, initialStorageInfo.local.events.count);
+ assert.approximately(updatedStorageInfo.local.files.sizeKb,
+ initialStorageInfo.local.files.sizeKb - deletedAtt.size,
filesystemBlockSize);
});
it('[5WO0] must be approximately updated (diff) when deleting an event', async function () {
const deletedEvt = testData.events[2];
const deletedEvtPath = '/' + user.username + '/events/' + deletedEvt.id;
- const initialStorageUsed = await storageSize.computeForUser(user);
+ const initialStorageInfo = await mall.getUserStorageInfos(user.id);
try {
await request.del(deletedEvtPath);
} catch (e) {}
@@ -381,10 +376,10 @@ describe('[ACCO] account', function () {
await request.del(deletedEvtPath);
} catch (e) {}
- const updatedStoragedUsed = await storageSize.computeForUser(user);
- assert.equal(updatedStoragedUsed.dbDocuments, initialStorageUsed.dbDocuments);
- assert.approximately(updatedStoragedUsed.attachedFiles,
- initialStorageUsed.attachedFiles - getTotalAttachmentsSize(deletedEvt),
+ const updatedStorageInfo = await mall.getUserStorageInfos(user.id);
+ assert.equal(updatedStorageInfo.local.events.count, initialStorageInfo.local.events.count);
+ assert.approximately(updatedStorageInfo.local.files.sizeKb,
+ initialStorageInfo.local.files.sizeKb - getTotalAttachmentsSize(deletedEvt),
filesystemBlockSize);
});
diff --git a/components/api-server/test/deletion.test.js b/components/api-server/test/deletion.test.js
index e263611..3c21f26 100644
--- a/components/api-server/test/deletion.test.js
+++ b/components/api-server/test/deletion.test.js
@@ -92,7 +92,6 @@ describe('[PGTD] DELETE /users/:username', () => {
influx = produceInfluxConnection(app.config);
influxRepository = new InfluxRepository(influx);
usersRepository = await getUsersRepository();
- app.storageLayer.eventFiles.removeAll();
username1 = charlatan.Internet.userName();
username2 = charlatan.Internet.userName();
authKey = config.get('auth:adminAccessKey');
@@ -101,7 +100,6 @@ describe('[PGTD] DELETE /users/:username', () => {
after(async function () {
config.injectTestConfig({});
await mongoFixtures.context.cleanEverything();
- app.storageLayer.eventFiles.removeAll();
});
describe('[USAD] depending on "user-account:delete" config parameter', function () {
let personalAccessToken;
@@ -277,9 +275,8 @@ describe('[PGTD] DELETE /users/:username', () => {
assert(sessions === null || sessions === []);
});
it(`[${testIDs[i][2]}] should delete user event files`, async function () {
- const pathToUserFiles = app.storageLayer.eventFiles.getUserPath(userToDelete.attrs.id);
- const userFileExists = fs.existsSync(pathToUserFiles);
- assert.isFalse(userFileExists);
+ const infos = await mall.getUserStorageInfos(userToDelete.attrs.id);
+ assert.equal(infos.local.files.sizeKb, 0);
});
it(`[${testIDs[i][8]}] should delete HF data`, async function () {
if (isOpenSource) { this.skip(); }
@@ -328,8 +325,8 @@ describe('[PGTD] DELETE /users/:username', () => {
assert(sessions !== null || sessions !== []);
});
it(`[${testIDs[i][4]}] should not delete other user event files`, async function () {
- const totalFilesSize = await app.storageLayer.eventFiles.getTotalSize({ id: username2 });
- assert.notEqual(totalFilesSize, 0);
+ const sizeInfo = await mall.getUserStorageInfos(username2);
+ assert.notEqual(sizeInfo.local.files.sizeKb, 0);
});
it(`[${testIDs[i][7]}] should delete on register`, async function () {
if (settingsToTest[i][0]) { this.skip(); } // isDnsLess
@@ -435,13 +432,15 @@ describe('[PGTD] DELETE /users/:username', () => {
});
});
/**
- * @param {string} username
+ * @param {string} userId
* @returns {Promise}
*/
-async function initiateUserWithData (username) {
- const user = await mongoFixtures.user(username);
+async function initiateUserWithData (userId) {
+ const user = await mongoFixtures.user(userId);
const stream = await user.stream({ id: charlatan.Lorem.word() });
+ const eventId = cuid();
await stream.event({
+ id: eventId,
type: 'mass/kg',
content: charlatan.Number.digit()
});
@@ -454,18 +453,25 @@ async function initiateUserWithData (username) {
});
await user.session(charlatan.Lorem.word());
if (!isOpenSource) { user.webhook({ id: charlatan.Lorem.word() }, charlatan.Lorem.word()); }
- const filePath = `test-file-${username}`;
+ const filePath = `test-file-${userId}`;
fs.writeFileSync(filePath, 'Just some text');
- await app.storageLayer.eventFiles.saveAttachmentFromTemp(path.resolve(filePath), username, charlatan.Lorem.word());
+ const attachmentItem = {
+ fileName: 'sample-file.txt',
+ type: 'text/txt',
+ size: 'Just some text'.length,
+ attachmentData: fs.createReadStream(path.resolve(filePath)) // simulate full pass-thru of attachement until implemented
+ };
+ await mall.events.addAttachment(userId, eventId, attachmentItem);
+ await fs.promises.unlink(filePath);
if (!isOpenSource) {
- const usersSeries = await influxRepository.get(`user.${username}`, `event.${cuid()}`);
+ const usersSeries = await influxRepository.get(`user.${userId}`, `event.${cuid()}`);
const data = new DataMatrix(['deltaTime', 'value'], [
[0, 10],
[1, 20]
]);
usersSeries.append(data);
// generate audit trace
- await request.get(`/${username}/events`).set('Authorization', token);
+ await request.get(`/${userId}/events`).set('Authorization', token);
}
return user;
}
diff --git a/components/api-server/test/events.test.js b/components/api-server/test/events.test.js
index e9d8519..4f99cd5 100644
--- a/components/api-server/test/events.test.js
+++ b/components/api-server/test/events.test.js
@@ -49,9 +49,9 @@ const attachmentsCheck = helpers.attachmentsCheck;
const commonTests = helpers.commonTests;
const validation = helpers.validation;
const ErrorIds = require('errors').ErrorIds;
-const eventFilesStorage = helpers.dependencies.storage.user.eventFiles;
const methodsSchema = require('../src/schema/eventsMethods');
const testData = helpers.data;
+const addCorrectAttachmentIds = testData.addCorrectAttachmentIds;
const { TAG_PREFIX } = require('api-server/src/methods/helpers/backwardCompatibility');
const { integrity } = require('business');
@@ -59,7 +59,7 @@ const { getMall } = require('mall');
require('date-utils');
-describe('events', function () {
+describe('[EVNT] events', function () {
const user = structuredClone(testData.users[0]);
const basePath = '/' + user.username + '/events';
const testType = 'test/test';
@@ -126,7 +126,7 @@ describe('events', function () {
let accountStreamsEvents;
async.series([
async function createEvents () {
- return mall.events.createMany(user.id, additionalEvents);
+ for (const event of additionalEvents) await mall.events.create(user.id, event);
},
function getDefault (stepDone) {
request.get(basePath).end(function (res) {
@@ -151,12 +151,15 @@ describe('events', function () {
stepDone();
},
function checkResponse (stepDone) {
+ const allEventsCorrected = addCorrectAttachmentIds(allEvents);
+ const body = { events: _.take(_.sortBy(allEventsCorrected, 'time').reverse(), 20 - accountStreamsEvents.length) };
+
validation.check(response, {
status: 200,
schema: methodsSchema.get.result,
sanitizeFn: validation.sanitizeEvents,
sanitizeTarget: 'events',
- body: { events: _.take(_.sortBy(allEvents, 'time').reverse(), 20 - accountStreamsEvents.length) }
+ body
}, stepDone);
},
testData.resetEvents
@@ -171,13 +174,14 @@ describe('events', function () {
sortAscending: false // explicitly set default value to check it works too...
};
request.get(basePath).query(params).end(function (res) {
+ const correctedEvents = addCorrectAttachmentIds(_.at(testData.events, 9, 7, 6, 4, 3, 2, 1, 0));
validation.check(res, {
status: 200,
schema: methodsSchema.get.result,
sanitizeFn: validation.sanitizeEvents,
sanitizeTarget: 'events',
body: {
- events: _.at(testData.events, 9, 7, 6, 4, 3, 2, 1, 0)
+ events: correctedEvents
}
}, done);
});
@@ -200,13 +204,14 @@ describe('events', function () {
fromTime: timestamp.now('-48h')
};
request.get(basePath).query(params).end(function (res) {
+ const correctedEvents = addCorrectAttachmentIds(_.at(testData.events, 3, 2, 0));
validation.check(res, {
status: 200,
schema: methodsSchema.get.result,
sanitizeFn: validation.sanitizeEvents,
sanitizeTarget: 'events',
body: {
- events: _.at(testData.events, 3, 2, 0)
+ events: correctedEvents
}
}, done);
});
@@ -224,7 +229,7 @@ describe('events', function () {
sanitizeFn: validation.sanitizeEvents,
sanitizeTarget: 'events',
body: {
- events: _.at(testData.events, 11, 3, 2, 0)
+ events: addCorrectAttachmentIds(_.at(testData.events, 11, 3, 2, 0))
}
}, done);
});
@@ -242,7 +247,7 @@ describe('events', function () {
sanitizeFn: validation.sanitizeEvents,
sanitizeTarget: 'events',
body: {
- events: _.at(testData.events, 12, 4, 2)
+ events: addCorrectAttachmentIds(_.at(testData.events, 12, 4, 2))
}
}, done);
});
@@ -294,7 +299,7 @@ describe('events', function () {
sanitizeFn: validation.sanitizeEvents,
sanitizeTarget: 'events',
body: {
- events: _.at(testData.events, 1, 2, 3)
+ events: addCorrectAttachmentIds(_.at(testData.events, 1, 2, 3))
}
}, done);
});
@@ -350,7 +355,7 @@ describe('events', function () {
sanitizeFn: validation.sanitizeEvents,
sanitizeTarget: 'events',
body: {
- events: _.at(testData.events, 2)
+ events: addCorrectAttachmentIds(_.at(testData.events, 2))
}
}, done);
});
@@ -421,8 +426,8 @@ describe('events', function () {
sanitizeFn: validation.sanitizeEvents,
sanitizeTarget: 'events',
body: {
- events: _.sortBy(validation.removeDeletionsAndHistory(testData.events), 'time')
- .reverse()
+ events: addCorrectAttachmentIds(_.sortBy(validation.removeDeletionsAndHistory(testData.events), 'time')
+ .reverse())
}
}, done);
});
@@ -452,7 +457,7 @@ describe('events', function () {
sanitizeFn: validation.sanitizeEvents,
sanitizeTarget: 'events',
body: {
- events
+ events: addCorrectAttachmentIds(events)
}
}, done);
});
@@ -509,7 +514,7 @@ describe('events', function () {
sanitizeFn: validation.sanitizeEvents,
sanitizeTarget: 'events',
body: {
- events,
+ events: addCorrectAttachmentIds(events),
eventDeletions
}
}, cb));
@@ -536,7 +541,7 @@ describe('events', function () {
sanitizeFn: validation.sanitizeEvents,
sanitizeTarget: 'events',
body: {
- events
+ events: addCorrectAttachmentIds(events)
}
}, done);
});
@@ -553,8 +558,9 @@ describe('events', function () {
it('[F29M] must return the attached file with the correct headers', function (done) {
const event = testData.events[0];
const attachment = event.attachments[0];
+ const effectiveAttachmentId = testData.dynCreateAttachmentIdMap[event.id][0].id;
- request.get(path(event.id) + '/' + attachment.id).end(function (res) {
+ request.get(path(event.id) + '/' + effectiveAttachmentId).end(function (res) {
res.statusCode.should.eql(200);
res.headers.should.have.property('content-type', attachment.type);
@@ -1084,63 +1090,67 @@ describe('events', function () {
testData.attachments.document.filename)
.attach('image', testData.attachments.image.path,
testData.attachments.image.filename)
- .end(function (res) {
- validation.check(res, {
- status: 201,
- schema: methodsSchema.create.result
- });
-
- createdEvent = res.body.event;
+ .end(async function (res) {
+ try {
+ validation.check(res, {
+ status: 201,
+ schema: methodsSchema.create.result
+ });
- validation.checkFilesReadToken(createdEvent, access, filesReadTokenSecret);
- validation.sanitizeEvent(createdEvent);
- expected = _.extend(data, {
- id: createdEvent.id,
- integrity: createdEvent.integrity,
- attachments: [
- {
- id: createdEvent.attachments[0].id,
- fileName: testData.attachments.document.filename,
- type: testData.attachments.document.type,
- size: testData.attachments.document.size,
- integrity: testData.attachments.document.integrity
- },
- {
- id: createdEvent.attachments[1].id,
- fileName: testData.attachments.image.filename,
- type: testData.attachments.image.type,
- size: testData.attachments.image.size,
- integrity: testData.attachments.image.integrity
- }
- ],
- streamIds: data.streamIds.concat(data.tags.map(t => TAG_PREFIX + t))
- });
+ createdEvent = res.body.event;
+
+ validation.checkFilesReadToken(createdEvent, access, filesReadTokenSecret);
+ validation.sanitizeEvent(createdEvent);
+ expected = _.extend(data, {
+ id: createdEvent.id,
+ integrity: createdEvent.integrity,
+ attachments: [
+ {
+ id: createdEvent.attachments[0].id,
+ fileName: testData.attachments.document.filename,
+ type: testData.attachments.document.type,
+ size: testData.attachments.document.size,
+ integrity: testData.attachments.document.integrity
+ },
+ {
+ id: createdEvent.attachments[1].id,
+ fileName: testData.attachments.image.filename,
+ type: testData.attachments.image.type,
+ size: testData.attachments.image.size,
+ integrity: testData.attachments.image.integrity
+ }
+ ],
+ streamIds: data.streamIds.concat(data.tags.map(t => TAG_PREFIX + t))
+ });
- expected.created = createdEvent.created;
- expected.createdBy = createdEvent.createdBy;
- expected.modified = createdEvent.modified;
- expected.modifiedBy = createdEvent.modifiedBy;
- if (!integrity.attachments.isActive) {
- delete expected.attachments[0].integrity;
- delete expected.attachments[1].integrity;
- }
- if (!integrity.events.isActive) {
- delete expected.integrity;
+ expected.created = createdEvent.created;
+ expected.createdBy = createdEvent.createdBy;
+ expected.modified = createdEvent.modified;
+ expected.modifiedBy = createdEvent.modifiedBy;
+ if (!integrity.attachments.isActive) {
+ delete expected.attachments[0].integrity;
+ delete expected.attachments[1].integrity;
+ }
+ if (!integrity.events.isActive) {
+ delete expected.integrity;
+ }
+ integrity.events.set(expected);
+ validation.checkObjectEquality(createdEvent, expected);
+
+ // check attached files
+ assert.isTrue(await attachmentsCheck.compareTestAndAttachedFiles(user, createdEvent.id,
+ createdEvent.attachments[0].id,
+ testData.attachments.document.filename));
+ assert.isTrue(await attachmentsCheck.compareTestAndAttachedFiles(user, createdEvent.id,
+ createdEvent.attachments[1].id,
+ testData.attachments.image.filename));
+
+ eventsNotifCount.should.eql(1, 'events notifications');
+
+ done();
+ } catch (e) {
+ done(e);
}
- integrity.events.set(expected);
- validation.checkObjectEquality(createdEvent, expected);
-
- // check attached files
- attachmentsCheck.compareTestAndAttachedFiles(user, createdEvent.id,
- createdEvent.attachments[0].id,
- testData.attachments.document.filename).should.equal('');
- attachmentsCheck.compareTestAndAttachedFiles(user, createdEvent.id,
- createdEvent.attachments[1].id,
- testData.attachments.image.filename).should.equal('');
-
- eventsNotifCount.should.eql(1, 'events notifications');
-
- done();
});
}
@@ -1168,44 +1178,48 @@ describe('events', function () {
.attach('$name.with:special-chars/',
fs.createReadStream(testData.attachments.document.path),
{ filename: 'file.name.with.many.dots.pdf' })
- .end(function (res) {
- validation.check(res, {
- status: 201,
- schema: methodsSchema.create.result
- });
+ .end(async function (res) {
+ try {
+ validation.check(res, {
+ status: 201,
+ schema: methodsSchema.create.result
+ });
- const createdEvent = validation.sanitizeEvent(res.body.event);
- const expected = _.extend(data, {
- id: createdEvent.id,
- attachments: [
- {
- id: createdEvent.attachments[0].id,
- fileName: 'file.name.with.many.dots.pdf',
- type: testData.attachments.document.type,
- size: testData.attachments.document.size,
- integrity: testData.attachments.document.integrity
- }
- ],
- streamIds: data.streamIds.concat(data.tags.map(t => TAG_PREFIX + t)),
- integrity: createdEvent.integrity
- });
+ const createdEvent = validation.sanitizeEvent(res.body.event);
+ const expected = _.extend(data, {
+ id: createdEvent.id,
+ attachments: [
+ {
+ id: createdEvent.attachments[0].id,
+ fileName: 'file.name.with.many.dots.pdf',
+ type: testData.attachments.document.type,
+ size: testData.attachments.document.size,
+ integrity: testData.attachments.document.integrity
+ }
+ ],
+ streamIds: data.streamIds.concat(data.tags.map(t => TAG_PREFIX + t)),
+ integrity: createdEvent.integrity
+ });
- if (!integrity.attachments.isActive) {
- delete expected.attachments[0].integrity;
- }
- if (!integrity.events.isActive) {
- delete expected.integrity;
- }
- validation.checkObjectEquality(createdEvent, expected);
+ if (!integrity.attachments.isActive) {
+ delete expected.attachments[0].integrity;
+ }
+ if (!integrity.events.isActive) {
+ delete expected.integrity;
+ }
+ validation.checkObjectEquality(createdEvent, expected);
- // check attached files
- attachmentsCheck.compareTestAndAttachedFiles(user, createdEvent.id,
- createdEvent.attachments[0].id,
- testData.attachments.document.filename).should.equal('');
+ // check attached files
+ assert.isTrue(await attachmentsCheck.compareTestAndAttachedFiles(user, createdEvent.id,
+ createdEvent.attachments[0].id,
+ testData.attachments.document.filename));
- eventsNotifCount.should.eql(1, 'events notifications');
+ eventsNotifCount.should.eql(1, 'events notifications');
- done();
+ done();
+ } catch (e) {
+ done(e);
+ }
});
});
@@ -1246,62 +1260,66 @@ describe('events', function () {
testData.attachments.image.fileName)
.attach('text', testData.attachments.text.path,
testData.attachments.text.fileName)
- .end(function (res) {
- validation.check(res, {
- status: 200,
- schema: methodsSchema.update.result
- });
+ .end(async function (res) {
+ try {
+ validation.check(res, {
+ status: 200,
+ schema: methodsSchema.update.result
+ });
- const updatedEvent = res.body.event;
- validation.checkFilesReadToken(updatedEvent, access, filesReadTokenSecret);
- validation.sanitizeEvent(updatedEvent);
+ const updatedEvent = res.body.event;
+ validation.checkFilesReadToken(updatedEvent, access, filesReadTokenSecret);
+ validation.sanitizeEvent(updatedEvent);
- const updatedEventAttachments = {};
- updatedEvent.attachments.forEach(function (attachment) {
- updatedEventAttachments[attachment.fileName] = attachment;
- });
+ const updatedEventAttachments = {};
+ updatedEvent.attachments.forEach(function (attachment) {
+ updatedEventAttachments[attachment.fileName] = attachment;
+ });
- const expected = structuredClone(event);
- expected.attachments = [];
- updatedEvent.attachments.forEach(function (attachment) {
- if (attachment.fileName === testData.attachments.image.filename) {
- const attData = {
- id: attachment.id,
- fileName: testData.attachments.image.filename,
- type: testData.attachments.image.type,
- size: testData.attachments.image.size
- };
- if (integrity.attachments.isActive) attData.integrity = testData.attachments.image.integrity;
- expected.attachments.push(attData);
- }
- if (attachment.fileName === testData.attachments.text.filename) {
- const attData = {
- id: attachment.id,
- fileName: testData.attachments.text.filename,
- type: testData.attachments.text.type,
- size: testData.attachments.text.size
- };
- if (integrity.attachments.isActive) attData.integrity = testData.attachments.text.integrity;
- expected.attachments.push(attData);
- }
- });
- expected.modified = updatedEvent.modified;
- expected.modifiedBy = access.id;
- integrity.events.set(expected);
+ const expected = structuredClone(event);
+ expected.attachments = [];
+ updatedEvent.attachments.forEach(function (attachment) {
+ if (attachment.fileName === testData.attachments.image.filename) {
+ const attData = {
+ id: attachment.id,
+ fileName: testData.attachments.image.filename,
+ type: testData.attachments.image.type,
+ size: testData.attachments.image.size
+ };
+ if (integrity.attachments.isActive) attData.integrity = testData.attachments.image.integrity;
+ expected.attachments.push(attData);
+ }
+ if (attachment.fileName === testData.attachments.text.filename) {
+ const attData = {
+ id: attachment.id,
+ fileName: testData.attachments.text.filename,
+ type: testData.attachments.text.type,
+ size: testData.attachments.text.size
+ };
+ if (integrity.attachments.isActive) attData.integrity = testData.attachments.text.integrity;
+ expected.attachments.push(attData);
+ }
+ });
+ expected.modified = updatedEvent.modified;
+ expected.modifiedBy = access.id;
+ integrity.events.set(expected);
- validation.checkObjectEquality(updatedEvent, expected);
+ validation.checkObjectEquality(updatedEvent, expected);
- // check attached files
- attachmentsCheck.compareTestAndAttachedFiles(user, event.id,
- updatedEventAttachments[testData.attachments.image.filename].id,
- testData.attachments.image.filename).should.equal('');
- attachmentsCheck.compareTestAndAttachedFiles(user, event.id,
- updatedEventAttachments[testData.attachments.text.filename].id,
- testData.attachments.text.filename).should.equal('');
+ // check attached files
+ assert.isTrue(await attachmentsCheck.compareTestAndAttachedFiles(user, event.id,
+ updatedEventAttachments[testData.attachments.image.filename].id,
+ testData.attachments.image.filename));
+ assert.isTrue(await attachmentsCheck.compareTestAndAttachedFiles(user, event.id,
+ updatedEventAttachments[testData.attachments.text.filename].id,
+ testData.attachments.text.filename));
- eventsNotifCount.should.eql(1, 'events notifications');
+ eventsNotifCount.should.eql(1, 'events notifications');
- done();
+ done();
+ } catch (e) {
+ done(e);
+ }
});
});
@@ -1314,35 +1332,42 @@ describe('events', function () {
.attach('text',
testData.attachments.text.path,
testData.attachments.text.fileName)
- .end(function (res) {
- validation.check(res, {
- status: 200,
- schema: methodsSchema.update.result
- });
+ .end(async function (res) {
+ try {
+ validation.check(res, {
+ status: 200,
+ schema: methodsSchema.update.result
+ });
- const updatedEvent = validation.sanitizeEvent(res.body.event);
- const expectedAttachments = event.attachments.slice();
- const attData = {
- id: updatedEvent.attachments[updatedEvent.attachments.length - 1].id,
- fileName: testData.attachments.text.filename,
- type: testData.attachments.text.type,
- size: testData.attachments.text.size
- };
- if (integrity.attachments.isActive) attData.integrity = testData.attachments.text.integrity;
- expectedAttachments.push(attData);
+ const updatedEvent = validation.sanitizeEvent(res.body.event);
+ const expectedAttachments = event.attachments.slice();
- const attachments = updatedEvent.attachments;
- should(attachments.length).be.eql(expectedAttachments.length);
+ // reset new attachment id after creation
+ for (let i = 0; i < expectedAttachments.length; i++) expectedAttachments[i].id = updatedEvent.attachments[i].id;
- attachments.should.eql(expectedAttachments);
+ const attData = {
+ id: updatedEvent.attachments[updatedEvent.attachments.length - 1].id,
+ fileName: testData.attachments.text.filename,
+ type: testData.attachments.text.type,
+ size: testData.attachments.text.size
+ };
+ if (integrity.attachments.isActive) attData.integrity = testData.attachments.text.integrity;
+ expectedAttachments.push(attData);
- attachmentsCheck.compareTestAndAttachedFiles(user, event.id,
- attachments[attachments.length - 1].id,
- testData.attachments.text.filename).should.equal('');
+ const attachments = updatedEvent.attachments;
+ should(attachments.length).be.eql(expectedAttachments.length);
+ attachments.should.eql(expectedAttachments);
- eventsNotifCount.should.eql(1, 'events notifications');
+ assert.isTrue(await attachmentsCheck.compareTestAndAttachedFiles(user, event.id,
+ attachments[attachments.length - 1].id,
+ testData.attachments.text.filename));
- done();
+ eventsNotifCount.should.eql(1, 'events notifications');
+
+ done();
+ } catch (e) {
+ done(e);
+ }
});
});
});
@@ -1407,7 +1432,7 @@ describe('events', function () {
expected.tags = ['yippiya'];
expected.modified = time;
expected.modifiedBy = access.id;
- expected.attachments = original.attachments;
+ expected.attachments = testData.dynCreateAttachmentIdMap[expected.id];
expected.streamIds = data.streamIds.concat(expected.tags.map(t => TAG_PREFIX + t));
validation.checkObjectEquality(res.body.event, expected);
@@ -1711,41 +1736,43 @@ describe('events', function () {
describe('DELETE //', function () {
beforeEach(resetEvents);
- it('[RW8M] must delete the attachment (reference in event + file)', function (done) {
+ it('[RW8M] must delete the attachment (reference in event + file)', async function () {
const event = testData.events[0];
- const fPath = path(event.id) + '/' + event.attachments[0].id;
- request.del(fPath).end(function (res) {
- validation.check(res, {
- status: 200,
- schema: methodsSchema.update.result
- });
-
- const updatedEvent = res.body.event;
- validation.checkFilesReadToken(updatedEvent, access, filesReadTokenSecret);
- validation.sanitizeEvent(updatedEvent);
- const expected = structuredClone(testData.events[0]);
- expected.attachments = expected.attachments.slice();
- // NOTE We cannot be sure that we still are at the exact same second that
- // we were just now when we did the call. So don't use time here, test
- // for time delta below.
- delete expected.modified;
- expected.modifiedBy = access.id;
- expected.modified = updatedEvent.modified;
- expected.attachments.shift();
- integrity.events.set(expected);
- validation.checkObjectEquality(updatedEvent, expected);
-
- const time = timestamp.now();
- should(updatedEvent.modified).be.approximately(time, 2);
-
- const filePath = eventFilesStorage.getAttachmentPath(user.id, event.id,
- event.attachments[0].id);
- fs.existsSync(filePath).should.eql(false, 'deleted file existence');
+ const attachmentId = testData.dynCreateAttachmentIdMap[event.id][0].id;
+ const fPath = path(event.id) + '/' + attachmentId;
+ const res = await request.del(fPath);
+ validation.check(res, {
+ status: 200,
+ schema: methodsSchema.update.result
+ });
- eventsNotifCount.should.eql(1, 'events notifications');
+ const updatedEvent = res.body.event;
+ validation.checkFilesReadToken(updatedEvent, access, filesReadTokenSecret);
+ validation.sanitizeEvent(updatedEvent);
+ const expected = structuredClone(testData.events[0]);
+ expected.attachments = expected.attachments.slice();
+ // NOTE We cannot be sure that we still are at the exact same second that
+ // we were just now when we did the call. So don't use time here, test
+ // for time delta below.
+ delete expected.modified;
+ expected.modifiedBy = access.id;
+ expected.modified = updatedEvent.modified;
+ expected.attachments = structuredClone(testData.dynCreateAttachmentIdMap[event.id]);
+ expected.attachments.shift();
+ integrity.events.set(expected);
+ validation.checkObjectEquality(updatedEvent, expected);
+
+ const time = timestamp.now();
+ should(updatedEvent.modified).be.approximately(time, 2);
+
+ try {
+ await mall.events.getAttachment(user.id, { id: event.id }, event.attachments[0].id);
+ throw new Error('Should not find attachment');
+ } catch (err) {
+ err.id.should.eql('unknown-resource');
+ }
- done();
- });
+ eventsNotifCount.should.eql(1, 'events notifications');
});
it('[ZLZN] must return an error if not existing', function (done) {
@@ -1784,24 +1811,24 @@ describe('events', function () {
});
it('[73CD] must delete the event when already trashed including all its attachments', function (done) {
- const id = testData.events[0].id;
+ const eventId = testData.events[0].id;
let event;
async.series([
async function getEvent () {
- event = await mall.events.getOne(user.id, id);
+ event = await mall.events.getOne(user.id, eventId);
},
async function trashEvent () {
event.trashed = true;
await mall.events.update(user.id, event);
},
function deleteEvent (stepDone) {
- request.del(path(id)).end(function (res) {
+ request.del(path(eventId)).end(function (res) {
validation.check(res, {
status: 200,
schema: methodsSchema.del.result
});
- res.body.eventDeletion.should.eql({ id });
+ res.body.eventDeletion.should.eql({ id: eventId });
eventsNotifCount.should.eql(1, 'events notifications');
stepDone();
});
@@ -1809,15 +1836,20 @@ describe('events', function () {
async function verifyEventData () {
const deletedEvents = await mall.events.getDeletions('local', user.id, { deletedSince: 0 });
const deletion = _.find(deletedEvents, function (event) {
- return event.id === id;
+ return event.id === eventId;
});
assert.exists(deletion);
- const expected = { id, deleted: deletion.deleted };
+ const expected = { id: eventId, deleted: deletion.deleted };
integrity.events.set(expected);
validation.checkObjectEquality(deletion.integrity, expected.integrity);
-
- const dirPath = eventFilesStorage.getEventPath(user.id, id);
- fs.existsSync(dirPath).should.eql(false, 'deleted event directory existence');
+ for (const attachment of event.attachments) {
+ try {
+ await mall.events.getAttachment(user.id, { id: eventId }, attachment.id);
+ throw new Error('Should not find attachment');
+ } catch (err) {
+ err.id.should.eql('unknown-resource');
+ }
+ }
}
],
done
@@ -1828,8 +1860,7 @@ describe('events', function () {
function resetEvents (done) {
eventsNotifCount = 0;
async.series([
- testData.resetEvents,
- testData.resetAttachments
+ testData.resetEvents
], done);
}
});
diff --git a/components/api-server/test/helpers/validation.js b/components/api-server/test/helpers/validation.js
index 7bad4f6..90f6c70 100644
--- a/components/api-server/test/helpers/validation.js
+++ b/components/api-server/test/helpers/validation.js
@@ -110,7 +110,12 @@ exports.check = function (response, expected, done) {
expected.sanitizeFn(response.body[expected.sanitizeTarget]);
}
if (expected.body) {
- assert.deepEqual(response.body, expected.body);
+ try {
+ assert.deepEqual(response.body, expected.body);
+ } catch (e) {
+ if (e.messgae) e.message = e.message.substr(0, 3000);
+ throw (e);
+ }
}
// restore ignored metadata
@@ -153,17 +158,21 @@ function checkAccessIntegrity (access) {
* @param {Function} [done] Optional
*/
exports.checkError = function (response, expected, done) {
- response.statusCode.should.eql(expected.status);
- checkJSON(response, schemas.errorResult);
+ try {
+ response.statusCode.should.eql(expected.status);
+ checkJSON(response, schemas.errorResult);
- const error = response.body.error;
- assert.equal(error.id, expected.id);
+ const error = response.body.error;
+ assert.equal(error.id, expected.id);
- if (expected.data != null) {
- assert.deepEqual(error.data, expected.data);
+ if (expected.data != null) {
+ assert.deepEqual(error.data, expected.data);
+ }
+ if (done) done();
+ } catch (e) {
+ if (done) return done(e);
+ throw (e);
}
-
- if (done) done();
};
function checkJSON (response, schema) {
diff --git a/components/api-server/test/permissions.test.js b/components/api-server/test/permissions.test.js
index 4f94a4c..20a52c5 100644
--- a/components/api-server/test/permissions.test.js
+++ b/components/api-server/test/permissions.test.js
@@ -83,8 +83,7 @@ describe('[ACCP] Access permissions', function () {
describe('Events', function () {
before(function (done) {
async.series([
- testData.resetStreams,
- testData.resetAttachments
+ testData.resetStreams
], done);
});
@@ -113,7 +112,7 @@ describe('[ACCP] Access permissions', function () {
filesReadTokenSecret);
validation.sanitizeEvents(res.body.events);
events.forEach(integrity.events.set);
- res.body.events.should.eql(events);
+ res.body.events.should.eql(testData.addCorrectAttachmentIds(events));
done();
});
});
@@ -129,7 +128,8 @@ describe('[ACCP] Access permissions', function () {
filesReadTokenSecret);
validation.sanitizeEvents(res.body.events);
res.body.events = validation.removeAccountStreamsEvents(res.body.events);
- res.body.events.should.eql(validation.removeDeletionsAndHistory(testData.events).sort(
+ const cEvents = testData.addCorrectAttachmentIds(testData.events);
+ res.body.events.should.eql(validation.removeDeletionsAndHistory(cEvents).sort(
function (a, b) {
return b.time - a.time;
}
diff --git a/components/api-server/test/sockets.test.js b/components/api-server/test/sockets.test.js
index 2f230b1..a4ee03d 100644
--- a/components/api-server/test/sockets.test.js
+++ b/components/api-server/test/sockets.test.js
@@ -238,7 +238,7 @@ describe('Socket.IO', function () {
const actualAccountStreamsEvents = separatedEvents.accountStreamsEvents;
validation.validateAccountEvents(actualAccountStreamsEvents);
expectedEvents.forEach(integrity.events.set);
- result.events.should.eql(expectedEvents);
+ result.events.should.eql(testData.addCorrectAttachmentIds(expectedEvents));
// check deletions
const deleted = _.filter(testData.events, { deleted: true });
for (const el of deleted) {
@@ -253,7 +253,7 @@ describe('Socket.IO', function () {
.sortBy('id')
.value();
activeTestEvents.forEach(integrity.events.set);
- should(resultEvents).be.eql(activeTestEvents);
+ should(resultEvents).be.eql(testData.addCorrectAttachmentIds(activeTestEvents));
validation.checkMeta(result);
done();
});
diff --git a/components/api-server/test/streams.test.js b/components/api-server/test/streams.test.js
index 0d273f6..8daa008 100644
--- a/components/api-server/test/streams.test.js
+++ b/components/api-server/test/streams.test.js
@@ -33,11 +33,9 @@
*/
const async = require('async');
-const fs = require('fs');
const should = require('should'); // explicit require to benefit from static function
const timestamp = require('unix-timestamp');
const _ = require('lodash');
-const bluebird = require('bluebird');
const chai = require('chai');
const assert = chai.assert;
@@ -47,7 +45,6 @@ const server = helpers.dependencies.instanceManager;
const commonTests = helpers.commonTests;
const validation = helpers.validation;
const ErrorIds = require('errors').ErrorIds;
-const eventFilesStorage = helpers.dependencies.storage.user.eventFiles;
const methodsSchema = require('../src/schema/streamsMethods');
const testData = helpers.data;
@@ -876,6 +873,7 @@ describe('[STRE] streams', function () {
return e.streamIds[0] === id;
});
const deletedEventWithAtt = deletedEvents[0];
+ let deletedEventWithAttPost = null;
let deletionTime;
const ADD_N_EVENTS = 100;
@@ -886,6 +884,7 @@ describe('[STRE] streams', function () {
.attach('image', testData.attachments.image.path,
testData.attachments.image.fileName)
.end(function (res) {
+ deletedEventWithAttPost = res.body.event;
validation.check(res, { status: 200 });
eventsNotifCount = 0; // reset
stepDone();
@@ -946,39 +945,14 @@ describe('[STRE] streams', function () {
'Deletion time must be correct.');
assert.equal(actual.id, e.id);
});
-
- const dirPath = eventFilesStorage.getEventPath(user.id, deletedEventWithAtt.id);
-
- // some time after returning to the client. Let's hang around and try
- // this several times.
- await bluebird.fromCallback(cb => {
- assertEventuallyTrue(
- () => !fs.existsSync(dirPath),
- 5, // second(s)
- 'Event directory must be deleted' + dirPath,
- cb
- );
- });
+ try {
+ await mall.events.getAttachment(user.id, { id: deletedEventWithAttPost.id }, deletedEventWithAttPost.attachments[0].id);
+ throw new Error('Should not find attachment');
+ } catch (err) {
+ err.id.should.eql('unknown-resource');
+ }
}
], done);
-
- function assertEventuallyTrue (property, maxWaitSeconds, msg, cb) {
- const deadline = new Date().getTime() + maxWaitSeconds;
- const checker = () => {
- if (new Date().getTime() > deadline) {
- return cb(new chai.AssertionError('Timeout: ' + msg));
- }
-
- const result = property();
- if (result) return cb();
-
- // assert: result is false, try again in a bit.
- setImmediate(checker);
- };
-
- // Launch first check
- setImmediate(checker);
- }
});
it('[1U1M] must return a correct error if the item is unknown', function (done) {
diff --git a/components/api-server/test/versioning.test.js b/components/api-server/test/versioning.test.js
index 4764bcd..739aa38 100644
--- a/components/api-server/test/versioning.test.js
+++ b/components/api-server/test/versioning.test.js
@@ -82,7 +82,6 @@ describe('Versioning', function () {
testData.resetAccesses,
testData.resetStreams,
testData.resetEvents,
- testData.resetAttachments,
server.ensureStarted.bind(server, settings),
function (stepDone) {
request = helpers.request(server.url);
diff --git a/components/business/src/auth/deletion.js b/components/business/src/auth/deletion.js
index 407ac68..c0199d8 100644
--- a/components/business/src/auth/deletion.js
+++ b/components/business/src/auth/deletion.js
@@ -111,12 +111,10 @@ class Deletion {
*/
async validateUserFilepaths (context, params, result, next) {
const dirPaths = [
- this.storageLayer.eventFiles.getUserPath(context.user.id),
path.join(this.config.get('eventFiles:previewsDirPath'), context.user.id)
];
// NOTE User specific paths are constructed by appending the user _id_ to the
- // `paths` constant above. I know this because I read EventFiles#getXPath(...)
- // in components/storage/src/user/EventFiles.js.
+ // `paths` constant above.
// NOTE Since user specific paths are created lazily, we should not expect
// them to be there. But _if_ they are, they need be accessible.
// Let's check if we can change into and write into the user's paths:
diff --git a/components/business/src/users/repository.js b/components/business/src/users/repository.js
index 51dc1d6..252b864 100644
--- a/components/business/src/users/repository.js
+++ b/components/business/src/users/repository.js
@@ -307,7 +307,7 @@ class UsersRepository {
const events = await user.getEvents();
// add the user to local index
await this.usersIndex.addUser(user.username, user.id);
- await this.mall.events.createMany(user.id, events, mallTransaction);
+ for (const event of events) await this.mall.events.create(user.id, event, mallTransaction);
// set user password
if (user.passwordHash) {
// if coming from deprecated `system.createUser`; TODO: remove when that method is removed
diff --git a/components/mall/src/Mall.js b/components/mall/src/Mall.js
index bf2e2c2..c486a83 100644
--- a/components/mall/src/Mall.js
+++ b/components/mall/src/Mall.js
@@ -125,29 +125,28 @@ class Mall {
}
/**
- * Return the quantity of storage used by the user in bytes.
+ * Return storage informations per store Id.
* @param {string} userId
- * @returns {Promise}
+ * @returns {Promise