diff --git a/.github/assets/aerospike.conf b/.github/assets/aerospike.conf new file mode 100644 index 000000000..8c28acc5a --- /dev/null +++ b/.github/assets/aerospike.conf @@ -0,0 +1,47 @@ +service { + cluster-name vector-search + feature-key-file /etc/aerospike/features.conf +} +logging { + console { + context any info + } +} +network { + service { + address any + port 3000 + } + heartbeat { + address local + interval 150 + mode mesh + port 3002 + timeout 10 + } + fabric { + address local + port 3001 + } +} +namespace avs-meta { + allow-ttl-without-nsup true + replication-factor 2 + storage-engine memory { + data-size 2G + } +} +namespace test { + allow-ttl-without-nsup true + replication-factor 2 + storage-engine memory { + data-size 1G + } +} +namespace index_storage { + allow-ttl-without-nsup true + replication-factor 2 + storage-engine memory { + data-size 1G + } +} diff --git a/.github/workflows/build-bindings.yml b/.github/workflows/build-bindings.yml index 042d7258c..6a5faba98 100644 --- a/.github/workflows/build-bindings.yml +++ b/.github/workflows/build-bindings.yml @@ -141,7 +141,11 @@ jobs: - name: Run Aerospike server if: ${{ !contains(github.event.pull_request.labels.*.name, 'new-server-features') }} - run: docker run -d --name aerospike -p 3000-3002:3000-3002 aerospike/aerospike-server + run: | + npm i @types/mocha @types/yargs @types/semver @types/chai + docker run -d --name aerospike -p 3000-3002:3000-3002 aerospike/aerospike-server + + # - name: Set config.conf to use Docker IP address of Aerospike server # # config.conf should be copied into the cibuildwheel Docker container @@ -154,7 +158,7 @@ jobs: # working-directory: test - name: Enable tests - run: echo "TEST_COMMAND=npm test -- --h 127.0.0.1 --port 3000" >> $GITHUB_ENV + run: echo "TEST_COMMAND=npm run ts-test -- --h 127.0.0.1 --port 3000" >> $GITHUB_ENV # - name: Disable tests (only run basic import test) # if: ${{ !inputs.run_tests }} @@ -553,12 +557,27 @@ jobs: - name: Run Aerospike server release candidate with latest tag if: ${{ contains(github.event.pull_request.labels.*.name, 'new-server-features') }} - run: docker run -d --name aerospike -p 3000-3002:3000-3002 aerospike/aerospike-server-rc:latest + run: docker run -d --name aerospike -p 3000-3002:3000-3002 aerospike/aerospike-server-rc:latest - name: Run Aerospike server if: ${{ !contains(github.event.pull_request.labels.*.name, 'new-server-features') }} - run: docker run -d --name aerospike -p 3000-3002:3000-3002 aerospike/aerospike-server - + run: | + cd ts-test; + npm i --save-dev @types/mocha; + npm i --save-dev @types/yargs; + npm i --save-dev @types/semver; + npm i --save-dev @types/chai; + tsc; + cd ..; + npm install; + docker run -d --name aerospike -p 3000-3002:3000-3002 aerospike/aerospike-server + + + + - name: Wait for database to be ready + # Should be ready after 3 seconds + run: sleep 15 + - name: Modify the package.json run: | sudo npm install -g json diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 0d642062b..1ba5f9cb1 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -153,7 +153,7 @@ jobs: run: cp -r node-${{ env.LOWEST_SUPPORTED_NODE_MODULE }}-linux-x64 lib/binding/node-${{ env.LOWEST_SUPPORTED_NODE_MODULE }}-linux-x64 - name: Run Aerospike server - run: docker run -d --name aerospike -p 3000-3002:3000-3002 aerospike/aerospike-server + run: docker run -d --name aerospike -p 3000-3002:3000-3002 aerospike/aerospike-server:7.1 - name: Wait for database to be ready # Should be ready after 3 seconds @@ -217,14 +217,19 @@ jobs: - name: Run Aerospike server if: ${{ !contains(github.event.pull_request.labels.*.name, 'new-server-features') }} - run: docker run -d --name aerospike -p 3000-3002:3000-3002 aerospike/aerospike-server + run: docker run -d --name aerospike -p 3000-3002:3000-3002 aerospike/aerospike-server:7.1 - name: Wait for database to be ready # Should be ready after 3 seconds run: sleep 3 - name: Run tests - run: npm run test + run: | + cd ts-test; + npm install typescript --save-dev; + npx tsc; + cd ..; + npm run ts-test test-lowest-supported-server: runs-on: ubuntu-latest @@ -372,7 +377,7 @@ jobs: - name: Run Aerospike server if: ${{ !contains(github.event.pull_request.labels.*.name, 'new-server-features') }} - run: docker run -d --name aerospike -p 3000-3002:3000-3002 aerospike/aerospike-server + run: docker run -d --name aerospike -p 3000-3002:3000-3002 aerospike/aerospike-server:7.1 - name: Wait for database to be ready # Should be ready after 3 seconds diff --git a/.gitignore b/.gitignore index f475410e2..a24a2ad00 100644 --- a/.gitignore +++ b/.gitignore @@ -10,3 +10,6 @@ npm-debug.log test.log aerospike-*.tgz scripts/build-c-client.sh-output.txt +libuv* +docs* +ts-test/dist diff --git a/README.md b/README.md index f33adabc0..6eb00a43e 100644 --- a/README.md +++ b/README.md @@ -313,7 +313,7 @@ The API docs also contain a few basic tutorials: * [Handling asynchronous database operations using Callbacks, Promises or `async`/`await`](https://www.aerospike.com/apidocs/nodejs/tutorial-callbacks_promises_async_await.html) A variety of additional example applications are provided in the -[`examples`](examples) directory of this repository. +[`examples`](https://github.com/aerospike/aerospike-client-nodejs/tree/master/examples) directory of this repository. Access backward incompatible API changes by a release at: https://developer.aerospike.com/client/nodejs/usage/incompatible. diff --git a/aerospike-client-c b/aerospike-client-c index 54e250756..87a9dd67c 160000 --- a/aerospike-client-c +++ b/aerospike-client-c @@ -1 +1 @@ -Subproject commit 54e25075646e815f023b5923603d35fa9d9b8f00 +Subproject commit 87a9dd67c6bb707f915bab6909a11d78e46890f4 diff --git a/docs/api-changes.md b/docs/api-changes.md deleted file mode 100644 index 87026fcc8..000000000 --- a/docs/api-changes.md +++ /dev/null @@ -1,4 +0,0 @@ -# Backward Incompatible API Changes - -The documentation of backward incompatible API changes has been moved to -https://www.aerospike.com/docs/client/nodejs/usage/incompatible.html. diff --git a/docs/assets/vc-2015-3-v140.png b/docs/assets/vc-2015-3-v140.png deleted file mode 100644 index 7d80a2d70..000000000 Binary files a/docs/assets/vc-2015-3-v140.png and /dev/null differ diff --git a/docs/assets/windows-8-1-sdk.png b/docs/assets/windows-8-1-sdk.png deleted file mode 100644 index 07d29587d..000000000 Binary files a/docs/assets/windows-8-1-sdk.png and /dev/null differ diff --git a/docs/overview.md b/docs/overview.md deleted file mode 100644 index e736bf954..000000000 --- a/docs/overview.md +++ /dev/null @@ -1,170 +0,0 @@ -# Aerospike Node.js Client API - -This documentation describes the Aerospike Node.js Client API in detail. The -Aerospike Client API package is available for download from -[www.aerospike.com](http://www.aerospike.com/download/client/nodejs/). -The source code is available on [GitHub](https://github.com/aerospike/aerospike-client-nodejs). -The Aerospike Client API package can also be -installed via npm from the [npmjs.com](https://www.npmjs.com/package/aerospike) -package repository. For more information about the Aerospike high-performance -NoSQL database, please refer to [http://www.aerospike.com/](http://www.aerospike.com/). - -## Contents - -The `aerospike` package provides the `aerospike` module, which includes -submodules, classes, and module level functions that enable Node.js -applications to connect to Aerospike database clusters. - -### Modules - -The main modules included in the `aerospike` package are: - -* The {@link module:aerospike|aerospike module} contains the core classes - that make up the Client API, such as the {@link Client}, {@link Record}, - {@link Query} classes, etc. It provides module level functions to connect to - an Aerospike cluster. -* The {@link module:aerospike/policy|policy module} defines policies and policy - values that define the behavior of database operations. -* The {@link module:aerospike/filter|filter module} and {@link - module:aerospike/exp|exp module} define secondary index (SI) filters and - expressions that can be used to limit the scope of query - operations. -* The {@link module:aerospike/operations|operations}, {@link - module:aerospike/lists|lists} and {@link module:aerospike/maps|maps} modules - define the operations on scalar, list and map values that can be executed - with the {@link Client#operate} command. -* The {@link module:aerospike/info|info module} includes utility functions - for parsing the info data returned by the Aerospike cluster. - The info protocol provides access to configuration and statistics for the Aerospike server. - -### Classes - -The main classes included in the `aerospike` module are: - -* {@link Client} - The main interface of the Aerospike client. Commands such as put, get or query can be sent to an Aerospike - database cluster using the Client class. -* {@link Key} - Keys are used to uniquely identify a record in the Aerospike database. -* {@link Record} - Records consists of one or more record "bins" (name-value - pairs) and meta-data (time-to-live and generation); a - record is uniquely identified by it's key within a given namespace. -* {@link Query} - The Query class can be used to perform value-based searches - on secondary indexes. -* {@link Scan} - The Scan class scans the entirety of a namespace and performs - various read and write operations on records within. -* {@link RecordStream} - Queries and scans return records through a - RecordStream instance which acts as an EventEmitter. -* {@link Job} - The Job class is used to query the status of long running - background jobs, such as background scans or index creation. -* {@link Double} - Wrapper class for double precision floating point values. -* {@link GeoJSON} - A representation of GeoJSON values. -* {@link AerospikeError} - Error class representing a Aerospike server and/or client error. - -All modules and classes can also be accessed directly through the drop-down menu at the top of each page. - -## Supported Data Types - -Aerospike supports the following data types: - -| Aerospike data type | Mapping to Node.js data type | -|---------------------|------------------------------| -| Integer | Number or BigInt | -| Double | Number | -| String | String | -| Boolean | Boolean | -| Bytes | Buffer | -| List | Array | -| Map | Object | -| HyperLogLog | Buffer | - -**Note:** Support for the **Boolean** data type requires server 5.6+ -and Aerospike Node.js client version 4.0+. - -### Nested Data Structure - -Lists and Maps can contain any of the other supported data types and -can be nested, e.g. lists-within-lists, maps-within-maps, lists-within-maps, -etc., to an arbitrary depth. To perform operations on nested lists and maps, -you can provide a {@link CdtContext CDT Context} object to the list and map -operations. - -## Example - -The following is very simple example of how to write and read a record from Aerospike. - -```js -const Aerospike = require('aerospike') - -// INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! -const config = { - hosts: '192.168.33.10:3000', -} - -const key = new Aerospike.Key('test', 'demo', 'demo') - -Aerospike.connect(config) - .then(client => { - const bins = { - i: 123, - s: 'hello', - b: Buffer.from('world'), - d: new Aerospike.Double(3.1415), - g: Aerospike.GeoJSON.Point(103.913, 1.308), - l: [1, 'a', {x: 'y'}], - m: {foo: 4, bar: 7} - } - const meta = { ttl: 10000 } - const policy = new Aerospike.WritePolicy({ - exists: Aerospike.policy.exists.CREATE_OR_REPLACE, - // Timeouts disabled, latency dependent on server location. Configure as needed. - socketTimeout : 0, - totalTimeout : 0 - }) - - return client.put(key, bins, meta, policy) - .then(() => { - const ops = [ - Aerospike.operations.incr('i', 1), - Aerospike.operations.read('i'), - Aerospike.lists.append('l', 'z'), - Aerospike.maps.removeByKey('m', 'bar') - ] - - return client.operate(key, ops) - }) - .then(result => { - console.log(result.bins) // => { i: 124, l: 4, m: null } - - return client.get(key) - }) - .then(record => { - console.log(record.bins) // => { i: 124, - // s: 'hello', - // b: , - // d: 3.1415, - // g: '{"type":"Point","coordinates":[103.913,1.308]}', - // l: [ 1, 'a', { x: 'y' }, 'z' ], - // m: { foo: 4 } } - }) - .then(() => client.close()) - }) - .catch(error => { - console.error('Error: %s [%i]', error.message, error.code) - if (error.client) { - error.client.close() - } - }) -``` - -## Tutorials - -The following tutorials provide more in-depth examples for specific aspects of working with the Aerospike Node.js Client SDK: - -* {@tutorial getting_started} -* {@tutorial node_clusters} -* {@tutorial callbacks_promises_async_await} - -## Further Documentation - -For a detailed technical documentation of the Aerospike distributed NoSQL -database, including an architecture overview and in-depth feature guides, -please visit http://www.aerospike.com/docs. diff --git a/docs/tutorials/callbacks_promises_async_await.md b/docs/tutorials/callbacks_promises_async_await.md deleted file mode 100644 index 8575c1b2b..000000000 --- a/docs/tutorials/callbacks_promises_async_await.md +++ /dev/null @@ -1,181 +0,0 @@ -Starting with version 3.0, the Aerospike Node.js client supports both -Node.js-style callbacks as well as Promises for all asynchronous database -operations. If you are using Node.js v8 or later, that means you can also use -the client with async/await. This brief tutorial demonstrates how to use all -three methods of dealing with asynchronous client operations. - -In all three cases we will implement a trivial demo program, that writes a new -record to an Aerospike database, then reads that record back, and finally -deletes the record. Potential errors will need to be handled correctly in each -of these steps. We also need to take care to close the client connection at the -end of the program because if the connection is left open this will prevent the -Node.js event loop from closing down. - -## Callbacks - -First we will use traditional Node.js style callbacks to handle asynchronous -database operations. All client commands accept a callback function as the last -function parameter. This callback function will be called once the database -operation has completed. The exact method signature for the callback varies -from command to command. But all callback functions take an `error` value as -their first argument. The `error` value will either be `null`, if the operation -was successful, or else it will be an instance of the `AerospikeError` class. - -As a second parameter, some callbacks will receive a single result value. E.g. -the callback for the client's `get` command returns the `Record` object that it -read from the database. Other operations, such as the `truncate` command do not -return a result value. In any case, the result value will be `undefined` if the -operation failed, i.e. if the callback is called with an `AerospikeError`. - -Here is our simple demo that writes, reads and finally deletes a single record -from the database: - -```javascript -const Aerospike = require('aerospike') - -function abort (error, client) { - console.error('Error:', error) - if (client) client.close() - process.exit(1) -} - -Aerospike.connect(function (error, client) { - if (error) abort(error, client) - - let key = new Aerospike.Key('test', 'test', 'abcd') - let bins = { - name: 'Norma', - age: 31 - } - - client.put(key, bins, function (error) { - if (error) abort(error, client) - client.get(key, function (error, record) { - if (error) abort(error, client) - console.info('Record:', record) - client.remove(key, function (error) { - if (error) abort(error, client) - client.close() - }) - }) - }) -}) -``` - -Notice how we need to check for errors in every single callback function. If -the none of the database operations fails, we need to close the client -connection in the last callback that is being executed, i.e. after the `remove` -operation was successfully completed. In case of an error we also need to close -the connection before terminating the program. - -## Promises - -The Mozilla Developer Network (MDN) describes Promises as follows: "A Promise -is an object representing the eventual completion or failure of an asynchronous -operation. [...] Essentially, a promise is a returned object to which you -attach callbacks, instead of passing callbacks into a function." More -information about the usage of Promises can be found in [this excellent -guide](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Using_promises) -on the MDN web site. - -To use Promises with the Aerospike Node.js client, you simply omit the callback -parameter when calling any of the client's asynchronous database operations. If -no callback function is passed, the client will return a Promise instead. - -In the case of a successful completion of the database operation, the Promise -will resolve to the same result value passed to the Node.js-style callback. In -the case of a failure, the Promise resolves to an `AerospikeError` instance -instead. - -Let's see, how our simple demo looks like when using Promises instead of -Node.js-style callback functions: - -```javascript -const Aerospike = require('aerospike') - -Aerospike.connect() - .then(client => { - let key = new Aerospike.Key('test', 'test', 'abcd') - let bins = { - name: 'Norma', - age: 31 - } - - return client.put(key, bins) - .then(() => client.get(key)) - .then(record => console.info('Record:', record)) - .then(() => client.remove(key)) - .then(() => client.close()) - .catch(error => { - client.close() - throw error - }) - }) - .catch(error => { - console.error('Error:', error) - process.exit(1) - }) -``` - -You can see that the demo makes heavy use of "promise chaining" to execute two -or more asynchronous operations back to back, where each subsequent operation -starts when the previous operation succeeds, with the result from the previous -step. This simplifies error handling, as we only need to handle errors once at -the end of the chain. - -But note that we still need to take care to close the client connection -regardless of whether the operations succeed or fail. - -## async/await - -In our last version of the demo program, we are making use of the new `await` -operator and `async` functions introduced in Node.js v8. To [quote -MDN](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function) -again, "the purpose of async/await functions is to simplify the behavior of -using promises synchronously and to perform some behavior on a group of -Promises. Just as Promises are similar to structured callbacks, async/await is -similar to combining generators and promises." - -Using the new `await` operator, our Promises-based demo can be simplified -further: - -```javascript -const Aerospike = require('aerospike') - -;(async function () { - let client - try { - client = await Aerospike.connect() - let key = new Aerospike.Key('test', 'test', 'abcd') - let bins = { - name: 'Norma', - age: 31 - } - - await client.put(key, bins) - let record = await client.get(key) - console.info('Record:', record) - await client.remove(key) - } catch (error) { - console.error('Error:', error) - process.exit(1) - } finally { - if (client) client.close() - } -})() -``` - -The `await` expression causes async function execution to pause, to wait for -the Promise's resolution, and to resume the async function execution when the -value is resolved. It then returns the resolved value. If the value is not a -Promise, it's converted to a resolved Promise. - -If the Promise is rejected, the `await` expression throws the rejected value. - -Note that we have to wrap our code in an anonymous, `async` function to use -`await`. - -Because we can use regular `try...catch...finally` statements to handle -synchronous as well as asynchronous errors when using `await`, we can ensure -that the client connection gets close regardless of whether the database -operations succeeded or failed. diff --git a/docs/tutorials/getting_started.md b/docs/tutorials/getting_started.md deleted file mode 100644 index c7f55fda0..000000000 --- a/docs/tutorials/getting_started.md +++ /dev/null @@ -1,120 +0,0 @@ -### Installing - -Install the `aerospike` npm package and save it in your applications dependency list: - - npm install aerospike --save - -(To install the package temporarily and not add it to the dependencies list, omit the `--save` option.) - -The package contains a native C/C++ add-on which will be build automatically -during the installation. For more information, please refer to the -[prerequisites](https://github.com/aerospike/aerospike-client-nodejs#prerequisites) -and detailed [installation -instructions](https://github.com/aerospike/aerospike-client-nodejs#installation) -in the `aerospike-client-nodejs` Github repository. - -### Configuration - -First, you need to require the `aerospike` module in your application: - - const Aerospike = require('aerospike') - -Before you can connect to the Aerospike database, you need to configure the -client and specify the hostname of at least one of the Aerospike server nodes -in your cluster. The client will be able to retrieve the details of the entire -cluster from the first node it manages to connect to. But you can specify the -hostnames of more than one cluster node, in case one of them is not reachable. - -The specify the hostnames, you can either set the `AEROSPIKE_HOSTS` environment -variable, or you can set the hostnames in the client configuration inside your -code. - -The `AEROSPIKE_HOSTS` environment variable can take a comma-separate list of -hostnames with optional port numbers, e.g. - - export AEROSPIKE_HOSTS=192.168.1.10:3000,192.168.1.11:3100 - -The port number is optional and defaults to the default port of the Aerospike -server (3000). If no hostname is specified, the client will try to connect to -localhost (127.0.0.1:3000) by default. - -Alternatively, you can configure the client programatically by creating a config object: - - var config = { - hosts: "192.168.1.10:3000,192.168.1.11:3100" - } - -The `hosts` key takes the same comma-separated list of hostnames and ports as -the `AEROSPIKE_HOSTS` environment variable. - -The config object can also be used to configure other aspects of the client. -Please refer to the [Config]{@link Client~Config} type definition for more -details. - -### Connecting - -With the client configured, it's time to connect to the database cluster: - - Aerospike.connect(config, function (error, client) { - // client is ready to accept commands - }) - -The [Aerospike.connect()]{@link module:aerospike.connect} method takes the -config object as an optional parameter and connects to the Aerospike cluster. -It will establish separate connections to all the nodes in the cluster and will -maintain the a copy of the cluster configuration for as long as it is -connected. That allows it so send client commands to one or more server nodes -as appropriate. - -Once the client has established the connections, it will call the callback -method passed in the connect() function. If there was an error connecting, the -`error` parameter will contain an instance of the {@link AerospikeError} class. -Otherwise it will be `null` and the `client` parameter will contain an instance -of the {@link Client} class. - -### Sending Commands - -The client instance can be used to send various commands to the Aerospike -cluster for creating records, reading records, running queries, etc. Please -refer to the documentation of the {@link Client} class for details. Most client -commands work asynchronously and use callback functions to return the status of -the command and results (if any) back to the client. - -Example of writing, then reading a database record: - -``` -const Aerospike = require('aerospike') -const Key = Aerospike.Key - -function assertOk (error, message) { - if (error) { - console.error('ERROR - %s: %s [%s]\n%s', message, error.message, error.code, error.stack) - throw error - } -} - -Aerospike.connect(function (error, client) { - assertOk(error, 'Connecting to Aerospike cluster') - - var key = new Key('test', 'demo', 'test1') - client.put(key, {name: 'Bob', age: 49}, function (error) { - assertOk(error, 'Writing database record') - - client.get(key, function (error, record) { - assertOk(error, 'Reading database record') - - console.info(record) // => { name: 'Bob', age: 49 } - - client.close() - }) - }) -}) -``` - -### Closing the Connection - -As seen in the previous example, it is important to close the connection to the -Aerospike cluster once it is no longer required. The client's C/C++ add-on is -using Node.js's libuv event loop for executing the client commands -asynchronously, and keeping the connection open will prevent the event loop -from terminating. diff --git a/docs/tutorials/index.json b/docs/tutorials/index.json deleted file mode 100644 index 6b26219ec..000000000 --- a/docs/tutorials/index.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "getting_started": { - "title": "Getting Started - Connecting to an Aerospike database cluster" - }, - "node_clusters": { - "title": "Managing Aerospike connections in a Node cluster" - }, - "callbacks_promises_async_await": { - "title": "Handling asynchronous database operations using Callbacks, Promises or async/await" - } -} diff --git a/docs/tutorials/node_clusters.md b/docs/tutorials/node_clusters.md deleted file mode 100644 index edb69dceb..000000000 --- a/docs/tutorials/node_clusters.md +++ /dev/null @@ -1,224 +0,0 @@ -### The Node.js Cluster Module - -The `cluster` module, that's part of the core Node.js library, provides a -simple, yet powerful interface to launch a cluster of Node.js processes. This -allows a Node.js application to take advantage of multi-core systems to handle -more load than a single Node.js instance running in a single thread. - -The child processes can all share server ports and the cluster module can provide -simple load balancing to distribute incoming requests between worker processes. - -### Connecting to the Aerospike Cluster - -In a Node.js application using the cluster module, each worker process will -have to maintain it's own connection to the Aerospike database cluster. That -means the {@link Client#connect} method must be called in the worker, not in -the master process. - -### Using Shared Memory to More Efficiently Manage Cluster Status - -Each Aerospike client instance runs an extra cluster _tend_ thread that -periodically polls the server nodes for cluster status. This is required to, -among other things, maintain the data partition map, so that the client knows -how to send database commands to the cluster node that holds the master replica -of a given record. - -To reduce the overhead of the cluster tend process in a -multi-process/single-thread environment like a Node.js cluster, the client can -use shared memory to store cluster status, including nodes and data partition -maps, in a shared memory segment. Only one cluster tend owner process polls the -server nodes for cluster status and writes to this shared memory segment. All -other client processes read the cluster status from shared memory and do not -have to poll the server nodes. - -The usage of shared memory needs to be configured when a new client instance is -created, e.g. using {@link module:aerospike.connect}: - -``` -const config = { - sharedMemory: { - key: 0xa5000000 - } -} -const client = Aerospike.client(config) -``` - -See {@link Config#sharedMemory} for more details. - -### Closing the Aerospike Connection - -It is recommended that you close the connection to the Aerospike cluster using -the {@link Client#close} method, before you terminate the worker process. When -the Aerospike client is configured to use shared memory to maintain the cluster status, -one of the client instances will hold a lock on the shared memory region. If -this instance is killed without closing the connection, the lock will not be -released. Another instance may have to wait for the lock to expire (default: 30 -seconds) before it can take over the cluster tending process. - -### A Simple Example - -In this simple example, we will setup a clustered HTTP server that connects to -an Aerospike database to read and write database records to server the incoming -HTTP requests. Besides the `Aerospike` module we will use the `cluster` module -and the `http` module that ship with Node.js, as well as a few utility modules: - -``` -const Aerospike = require('aerospike') -const cluster = require('cluster') -const http = require('http') -const url = require('url') -const debug = require('util').debuglog('server') -``` - -We setup the Aerospike client to use shared memory: - -``` -const config = { - sharedMemory: { - key: 0xa5000000 - } -} -const client = Aerospike.client(config) -``` - -If this is the master process, we spawn off a number of worker processes. If we -are in a worker process, then start the server. Before the worker process -exists we should stop the server cleanly. - -``` -const noWorkers = 4 // pick this no. based on number of CPUs, size of RAM, etc. - -if (cluster.isMaster) { - // spawn new worker processes - for (var i = 0; i < noWorkers; i++) { - cluster.fork() - } -} else { - // in spawend worker process - var id = cluster.worker.id - debug('worker %s starting', id) - startServer() - process.on('SIGINT', () => { - debug('worker %s exiting', id) - stopServer() - process.exit() - }) -} -``` - -Next we setup our HTTP server and connect to the Aerospike cluster. For -incoming GET requests we read a record from the database and return it; a POST -request is used to write a new record to the database. The key is derived from -the request path. - -``` -function startServer () { - client.connect((err) => { if (err) throw err }) - http.createServer((req, res) => { - debug('incoming request on worker %s', cluster.worker.id) - var key = keyFromPath(req) - var responder = sendResponse.bind(null, res) - switch (req.method) { - case 'GET': - client.get(key, responder) - break - case 'POST': - var body = '' - req.on('data', (chunk) => { body += chunk }) - req.on('end', () => { - var record = JSON.parse(body) - client.put(key, record, responder) - }) - break - } - }).listen(8000) -} - -function stopServer () { - client.close() -} - -function keyFromPath (req) { - var path = url.parse(req.url)['pathname'] - var key = path.slice(1) // remove leading '/' - return new Aerospike.Key('test', 'demo', key) -} -``` - -Next we define a method `sendResponse` that send the response back to the -client. If there was an error, we send an HTTP error status code, e.g. "404 Not -Found" if the database record was not found. In case of a succesful request, we -format the response as JSON and send it to the client. - -``` -function sendResponse (res, error, body) { - if (error) { - switch (error.code) { - case Aerospike.status.ERR_RECORD_NOT_FOUND: - res.writeHead(404, error.message) - break - default: - res.writeHead(500, error.message) - } - } else if (body) { - res.writeHead(200, { 'Content-Type': 'application/json' }) - res.write(JSON.stringify(body)) - } - res.end() -} -``` - -And that's it! We can test our sample application and set `NODE_DEBUG=server` for some extra debugging output: - -``` -$ NODE_DEBUG=server node node_clusters.js -SERVER 56288: worker 1 starting -SERVER 56290: worker 3 starting -SERVER 56289: worker 2 starting -SERVER 56291: worker 4 starting -``` - -Let's do some test requests to read and write some records: - -``` -$ curl -i -H "Content-Type: application/json" http://localhost:8000/myTestKey -HTTP/1.1 404 Not Found -Date: Thu, 12 May 2016 08:21:31 GMT -Connection: keep-alive -Transfer-Encoding: chunked - -$ curl -i -H "Content-Type: application/json" --data-binary '{"x": 1234, "y": "abcd"}' http://localhost:8000/myTestKey -HTTP/1.1 200 OK -Content-Type: application/json -Date: Thu, 12 May 2016 08:21:49 GMT -Connection: keep-alive -Transfer-Encoding: chunked - -{"ns":"test","set":"demo","key":"myTestKey","digest":{"type":"Buffer","data":[149,186,200,254,138,5,9,52,115,70,1,194,131,184,51,95,232,241,179,74]}} - -$ curl -i -H "Content-Type: application/json" http://localhost:8000/myTestKey -HTTP/1.1 200 OK -Content-Type: application/json -Date: Thu, 12 May 2016 08:21:55 GMT -Connection: keep-alive -Transfer-Encoding: chunked - -{"x":1234,"y":"abcd"} -``` - -We can see the incoming requests in the server console: - -``` -SERVER 56288: incoming GET request for /myTestKey on worker 1 -SERVER 56290: incoming POST request for /myTestKey on worker 3 -SERVER 56289: incoming GET request for /myTestKey on worker 2 -``` - -When we end the server by pressing CTRL+C in the console, we can see that the worker processes are being shutdown cleanly: - -``` -SERVER 56288: worker 1 exiting -SERVER 56290: worker 3 exiting -SERVER 56289: worker 2 exiting -SERVER 56291: worker 4 exiting -``` diff --git a/eslint.config.mjs b/eslint.config.mjs new file mode 100644 index 000000000..3276c526e --- /dev/null +++ b/eslint.config.mjs @@ -0,0 +1,10 @@ +import globals from 'globals' +import pluginJs from '@eslint/js' +import tseslint from 'typescript-eslint' + +export default [ + { files: ['**/*.{js,mjs,cjs,ts}'] }, + { languageOptions: { globals: globals.browser } }, + pluginJs.configs.recommended, + ...tseslint.configs.recommended +] diff --git a/k.js b/k.js new file mode 100644 index 000000000..52b8a5bb0 --- /dev/null +++ b/k.js @@ -0,0 +1,2 @@ +aerospike = require("./lib/aerospike") +console.log(aerospike.exp.operations) \ No newline at end of file diff --git a/lib/aerospike.js b/lib/aerospike.js index b5f0b230b..6c44fbc34 100644 --- a/lib/aerospike.js +++ b/lib/aerospike.js @@ -272,6 +272,20 @@ exports.Record = require('./record') */ exports.Bin = require('./bin') +/** + * Scan class return by the {@link Client#scan} method. + * + * @summary {@link Scan} class + */ +exports.Scan = require('./scan') + +/** + * Query class returned by the {@link Client#query} method. + * + * @summary {@link Query} class + */ +exports.Query = require('./query') + // ======================================================================== // Enumerations // ======================================================================== @@ -402,7 +416,7 @@ exports.log = as.log * * @property {number} NAMESPACE_DEFAULT - Use the default TTL value for the * namespace of the record. - * @property {number} NEVER_EXIRE - Never expire the record. + * @property {number} NEVER_EXpIRE - Never expire the record. * @property {number} DONT_UPDATE - Update the record without changing the * record's TTL value. Requires server 3.10.1 or later. * @@ -461,6 +475,20 @@ exports.ttl = as.ttl */ exports.jobStatus = as.jobStatus +/** + * @summary IndexJob class + * + * @see {@link IndexJob} for more information. + */ +exports.IndexJob = require('./index_job') + +/** + * @summary Job class + * + * @see {@link Job} for more information. + */ +exports.Job = require('./job') + /** * @summary Enumeration of SI data types. * diff --git a/lib/batch_type.js b/lib/batch_type.js index bd5017045..f1b6d208b 100644 --- a/lib/batch_type.js +++ b/lib/batch_type.js @@ -30,25 +30,25 @@ const batchType = as.batchTypes // ======================================================================== module.exports = { /** - * An instance of class {@link Record} that is used in a batch for read operations. + * Indicates that a {@link Record} instance is used in a batch for read operations. * @const {number} */ BATCH_READ: batchType.BATCH_READ, /** - * An instance of class {@link Record} that is used in a batch for write operations. + * Indicates that a {@link Record} instance is used in a batch for write operations. * @const {number} */ BATCH_WRITE: batchType.BATCH_WRITE, /** - * An instance of class {@link Record} that is used in a batch for applying record. + * Indicates that a {@link Record} instance is used in a batch for applying record. * @const {number} */ BATCH_APPLY: batchType.BATCH_APPLY, /** - * An instance of class {@link Record} that is used in a batch for removal operations. + * Indicates that a {@link Record} instance is used in a batch for removal operations. * @const {number} */ BATCH_REMOVE: batchType.BATCH_REMOVE diff --git a/lib/bitwise.js b/lib/bitwise.js index 110dd5189..3e58d3ed0 100644 --- a/lib/bitwise.js +++ b/lib/bitwise.js @@ -157,7 +157,7 @@ exports.resize = function (bin, size, flags = 0) { * a value. * * @param {string} bin - The name of the bin. The bin must contain a byte value. - * @param {number} offset - Offset in bytes. + * @param {number} byteOffset - Offset in bytes. * @param {Buffer} value - Bytes to insert. * @returns {BitwiseOperation} Operation that can be passed to the {@link Client#operate} command. */ diff --git a/lib/client.js b/lib/client.js index cff231723..876c0c950 100644 --- a/lib/client.js +++ b/lib/client.js @@ -2285,7 +2285,17 @@ Client.prototype.exists = function exists (key, policy, callback) { policy = null } - const cmd = new Commands.Exists(this, [key, policy], callback) + const cmd = new Commands.Exists(this, null, [key, policy], callback) + return cmd.execute() +} + +Client.prototype.existsWithMetadata = function exists (key, policy, callback) { + if (typeof policy === 'function') { + callback = policy + policy = null + } + + const cmd = new Commands.Exists(this, key, [policy], callback) return cmd.execute() } diff --git a/lib/commands/batch_command.js b/lib/commands/batch_command.js index eb0185f27..6d0c4793c 100644 --- a/lib/commands/batch_command.js +++ b/lib/commands/batch_command.js @@ -20,9 +20,10 @@ const Command = require('./command') const Record = require('../record') class BatchResult { - constructor (status, record) { + constructor (status, record, inDoubt) { this.status = status this.record = record + this.inDoubt = inDoubt } } @@ -32,7 +33,7 @@ module.exports = asCommand => class BatchCommand extends Command(asCommand) { return results.map(result => { const record = new Record(result.key, result.bins, result.meta) - return new BatchResult(result.status, record) + return new BatchResult(result.status, record, result.inDoubt) }) } } diff --git a/lib/commands/exists_command.js b/lib/commands/exists_command.js index b9dd36653..08b739100 100644 --- a/lib/commands/exists_command.js +++ b/lib/commands/exists_command.js @@ -18,16 +18,35 @@ const status = require('../status') const Command = require('./command') +const Record = require('../record') module.exports = asCommand => class ExistsCommand extends Command(asCommand) { - convertResponse (error) { + constructor (client, key, args, callback) { + if (key) { + args = [key].concat(args) + } + super(client, args, callback) + this.key = key + } + + convertResponse (error, bins, metadata) { error = this.convertError(error) if (error && error.code === status.ERR_RECORD_NOT_FOUND) { + if (this.key) { + return [null, this.convertResult({ ttl: null, gen: null })] + } return [null, false] } else if (error) { return [error, null] } else { + if (this.key) { + return [null, this.convertResult(metadata)] + } return [null, true] } } + + convertResult (metadata) { + return new Record(this.key, null, metadata) + } } diff --git a/lib/exp_maps.js b/lib/exp_maps.js index db439f60e..34b7b66bf 100644 --- a/lib/exp_maps.js +++ b/lib/exp_maps.js @@ -46,7 +46,7 @@ function getMapType (type, returnType, isMulti) { throw new TypeError('either set the return type as auto or match with return object data type') } - if (type === exp.type.AUTO || type === expected) { + if ((type === exp.type.AUTO || type === expected)) { return expected } diff --git a/lib/policy.js b/lib/policy.js index 5414512c1..49543e7e4 100644 --- a/lib/policy.js +++ b/lib/policy.js @@ -17,27 +17,27 @@ 'use strict' const as = require('bindings')('aerospike.node') - -const BasePolicy = require('./policies/base_policy') +const AdminPolicy = require('./policies/admin_policy') const ApplyPolicy = require('./policies/apply_policy') -const OperatePolicy = require('./policies/operate_policy') -const QueryPolicy = require('./policies/query_policy') -const ReadPolicy = require('./policies/read_policy') -const RemovePolicy = require('./policies/remove_policy') -const ScanPolicy = require('./policies/scan_policy') -const WritePolicy = require('./policies/write_policy') -const BatchPolicy = require('./policies/batch_policy') +const BasePolicy = require('./policies/base_policy') const BatchApplyPolicy = require('./policies/batch_apply_policy') +const BatchPolicy = require('./policies/batch_policy') const BatchReadPolicy = require('./policies/batch_read_policy') const BatchRemovePolicy = require('./policies/batch_remove_policy') const BatchWritePolicy = require('./policies/batch_write_policy') - +const BitwisePolicy = require('./policies/bitwise_policy') const CommandQueuePolicy = require('./policies/command_queue_policy') const HLLPolicy = require('./policies/hll_policy') const InfoPolicy = require('./policies/info_policy') const ListPolicy = require('./policies/list_policy') const MapPolicy = require('./policies/map_policy') -const AdminPolicy = require('./policies/admin_policy') +const OperatePolicy = require('./policies/operate_policy') +const QueryPolicy = require('./policies/query_policy') +const ReadPolicy = require('./policies/read_policy') +const RemovePolicy = require('./policies/remove_policy') +const ScanPolicy = require('./policies/scan_policy') +const WritePolicy = require('./policies/write_policy') + /** * @module aerospike/policy * @@ -395,6 +395,12 @@ exports.BatchRemovePolicy = BatchRemovePolicy */ exports.BatchWritePolicy = BatchWritePolicy +/** + * A policy affecting the behavior of batchWrite operations. + * + * @summary {@link BitwisePolicy} class + */ +exports.BitwisePolicy = BitwisePolicy /** * A policy affecting the use of the global command queue. * diff --git a/lib/status.js b/lib/status.js index d003cc3e5..5632a7e8b 100644 --- a/lib/status.js +++ b/lib/status.js @@ -67,6 +67,12 @@ exports.ERR_ASYNC_QUEUE_FULL = exports.AEROSPIKE_ERR_ASYNC_QUEUE_FULL = as.statu */ exports.ERR_CONNECTION = exports.AEROSPIKE_ERR_CONNECTION = as.status.AEROSPIKE_ERR_CONNECTION +/** + * TLS related error + * @const {number} + */ +exports.ERR_TLS_ERROR = exports.AEROSPIKE_ERR_TLS_ERROR = as.status.AEROSPIKE_ERR_TLS_ERROR + /** * Node invalid or could not be found. * @const {number} @@ -375,6 +381,12 @@ exports.FORBIDDEN_PASSWORD = exports.AEROSPIKE_FORBIDDEN_PASSWORD = as.status.AE */ exports.INVALID_CREDENTIAL = exports.AEROSPIKE_INVALID_CREDENTIAL = as.status.AEROSPIKE_INVALID_CREDENTIAL +/** + * Login session expired. + * @const {number} + */ +exports.EXPIRED_SESSION = exports.AEROSPIKE_EXPIRED_SESSION = as.status.AEROSPIKE_EXPIRED_SESSION + /** * Role name is invalid. * @const {number} @@ -536,7 +548,7 @@ exports.ERR_LUA_FILE_NOT_FOUND = exports.AEROSPIKE_ERR_LUA_FILE_NOT_FOUND = as.s // ======================================================================== /** - * Prodeces a human-readable error message for the given status code. + * Produces a human-readable error message for the given status code. */ exports.getMessage = function (code) { /* istanbul ignore next */ diff --git a/package-lock.json b/package-lock.json index de872168c..48dac7f84 100644 --- a/package-lock.json +++ b/package-lock.json @@ -19,7 +19,7 @@ "win32" ], "dependencies": { - "@mapbox/node-pre-gyp": "^1.0.11", + "ansi-colors": "^4.1.3", "bindings": "^1.5.0", "minimatch": "^3.1.2", "nan": "^2.19.0", @@ -27,11 +27,15 @@ "npm-run-all": "^4.1.5" }, "devDependencies": { - "@types/node": "^20.12.7", + "@eslint/js": "^9.12.0", + "@mapbox/node-pre-gyp": "^1.0.11", + "@types/node": "^22.7.4", "chai": "^4.4.1", "choma": "^1.2.1", "codecov": "^3.8.3", "deep-eql": "^4.1.3", + "eslint": "^8.57.1", + "globals": "^15.11.0", "husky": "^9.0.11", "mocha": "^10.4.0", "mocha-clean": "^1.0.0", @@ -40,6 +44,10 @@ "semver": "^7.6.0", "standard": "^17.1.0", "tmp": "^0.2.3", + "typedoc": "^0.26.7", + "typedoc-plugin-rename-defaults": "^0.7.1", + "typescript": "^5.6.2", + "typescript-eslint": "^8.8.1", "yargs": "^17.7.2" }, "engines": { @@ -51,6 +59,7 @@ "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", "dev": true, + "license": "Apache-2.0", "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.24" @@ -60,12 +69,13 @@ } }, "node_modules/@babel/code-frame": { - "version": "7.24.6", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.24.6.tgz", - "integrity": "sha512-ZJhac6FkEd1yhG2AHOmfcXG4ceoLltoCVJjN5XsWN9BifBQr+cHJbWi0h68HZuSORq+3WtJ2z0hwF2NG1b5kcA==", + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.24.7.tgz", + "integrity": "sha512-BcYH1CVJBO9tvyIZ2jVeXgSIMvGZ2FDRvDdOIVQyuklNKSsx+eppDEBq/g47Ayw+RqNFE+URvOShmf+f/qwAlA==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/highlight": "^7.24.6", + "@babel/highlight": "^7.24.7", "picocolors": "^1.0.0" }, "engines": { @@ -73,30 +83,32 @@ } }, "node_modules/@babel/compat-data": { - "version": "7.24.6", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.24.6.tgz", - "integrity": "sha512-aC2DGhBq5eEdyXWqrDInSqQjO0k8xtPRf5YylULqx8MCd6jBtzqfta/3ETMRpuKIc5hyswfO80ObyA1MvkCcUQ==", + "version": "7.25.4", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.25.4.tgz", + "integrity": "sha512-+LGRog6RAsCJrrrg/IO6LGmpphNe5DiK30dGjCoxxeGv49B10/3XYGxPsAwrDlMFcFEvdAUavDT8r9k/hSyQqQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/core": { - "version": "7.24.6", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.24.6.tgz", - "integrity": "sha512-qAHSfAdVyFmIvl0VHELib8xar7ONuSHrE2hLnsaWkYNTI68dmi1x8GYDhJjMI/e7XWal9QBlZkwbOnkcw7Z8gQ==", + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.25.2.tgz", + "integrity": "sha512-BBt3opiCOxUr9euZ5/ro/Xv8/V7yJ5bjYMqG/C1YAo8MIKAnumZalCN+msbci3Pigy4lIQfPUpfMM27HMGaYEA==", "dev": true, + "license": "MIT", "dependencies": { "@ampproject/remapping": "^2.2.0", - "@babel/code-frame": "^7.24.6", - "@babel/generator": "^7.24.6", - "@babel/helper-compilation-targets": "^7.24.6", - "@babel/helper-module-transforms": "^7.24.6", - "@babel/helpers": "^7.24.6", - "@babel/parser": "^7.24.6", - "@babel/template": "^7.24.6", - "@babel/traverse": "^7.24.6", - "@babel/types": "^7.24.6", + "@babel/code-frame": "^7.24.7", + "@babel/generator": "^7.25.0", + "@babel/helper-compilation-targets": "^7.25.2", + "@babel/helper-module-transforms": "^7.25.2", + "@babel/helpers": "^7.25.0", + "@babel/parser": "^7.25.0", + "@babel/template": "^7.25.0", + "@babel/traverse": "^7.25.2", + "@babel/types": "^7.25.2", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", @@ -115,24 +127,27 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/@babel/core/node_modules/semver": { "version": "6.3.1", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } }, "node_modules/@babel/generator": { - "version": "7.24.6", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.24.6.tgz", - "integrity": "sha512-S7m4eNa6YAPJRHmKsLHIDJhNAGNKoWNiWefz1MBbpnt8g9lvMDl1hir4P9bo/57bQEmuwEhnRU/AMWsD0G/Fbg==", + "version": "7.25.6", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.25.6.tgz", + "integrity": "sha512-VPC82gr1seXOpkjAAKoLhP50vx4vGNlF4msF64dSFq1P8RfB+QAuJWGHPXXPc8QyfVWwwB/TNNU4+ayZmHNbZw==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/types": "^7.24.6", + "@babel/types": "^7.25.6", "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.25", "jsesc": "^2.5.1" @@ -142,14 +157,15 @@ } }, "node_modules/@babel/helper-compilation-targets": { - "version": "7.24.6", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.24.6.tgz", - "integrity": "sha512-VZQ57UsDGlX/5fFA7GkVPplZhHsVc+vuErWgdOiysI9Ksnw0Pbbd6pnPiR/mmJyKHgyIW0c7KT32gmhiF+cirg==", + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.25.2.tgz", + "integrity": "sha512-U2U5LsSaZ7TAt3cfaymQ8WHh0pxvdHoEk6HVpaexxixjyEquMh0L0YNJNM6CTGKMXV1iksi0iZkGw4AcFkPaaw==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/compat-data": "^7.24.6", - "@babel/helper-validator-option": "^7.24.6", - "browserslist": "^4.22.2", + "@babel/compat-data": "^7.25.2", + "@babel/helper-validator-option": "^7.24.8", + "browserslist": "^4.23.1", "lru-cache": "^5.1.1", "semver": "^6.3.1" }, @@ -162,6 +178,7 @@ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", "dev": true, + "license": "ISC", "dependencies": { "yallist": "^3.0.2" } @@ -171,6 +188,7 @@ "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -179,65 +197,34 @@ "version": "3.1.1", "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", - "dev": true - }, - "node_modules/@babel/helper-environment-visitor": { - "version": "7.24.6", - "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.24.6.tgz", - "integrity": "sha512-Y50Cg3k0LKLMjxdPjIl40SdJgMB85iXn27Vk/qbHZCFx/o5XO3PSnpi675h1KEmmDb6OFArfd5SCQEQ5Q4H88g==", - "dev": true, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-function-name": { - "version": "7.24.6", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.24.6.tgz", - "integrity": "sha512-xpeLqeeRkbxhnYimfr2PC+iA0Q7ljX/d1eZ9/inYbmfG2jpl8Lu3DyXvpOAnrS5kxkfOWJjioIMQsaMBXFI05w==", "dev": true, - "dependencies": { - "@babel/template": "^7.24.6", - "@babel/types": "^7.24.6" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-hoist-variables": { - "version": "7.24.6", - "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.24.6.tgz", - "integrity": "sha512-SF/EMrC3OD7dSta1bLJIlrsVxwtd0UpjRJqLno6125epQMJ/kyFmpTT4pbvPbdQHzCHg+biQ7Syo8lnDtbR+uA==", - "dev": true, - "dependencies": { - "@babel/types": "^7.24.6" - }, - "engines": { - "node": ">=6.9.0" - } + "license": "ISC" }, "node_modules/@babel/helper-module-imports": { - "version": "7.24.6", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.24.6.tgz", - "integrity": "sha512-a26dmxFJBF62rRO9mmpgrfTLsAuyHk4e1hKTUkD/fcMfynt8gvEKwQPQDVxWhca8dHoDck+55DFt42zV0QMw5g==", + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.24.7.tgz", + "integrity": "sha512-8AyH3C+74cgCVVXow/myrynrAGv+nTVg5vKu2nZph9x7RcRwzmh0VFallJuFTZ9mx6u4eSdXZfcOzSqTUm0HCA==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/types": "^7.24.6" + "@babel/traverse": "^7.24.7", + "@babel/types": "^7.24.7" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-transforms": { - "version": "7.24.6", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.24.6.tgz", - "integrity": "sha512-Y/YMPm83mV2HJTbX1Qh2sjgjqcacvOlhbzdCCsSlblOKjSYmQqEbO6rUniWQyRo9ncyfjT8hnUjlG06RXDEmcA==", + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.25.2.tgz", + "integrity": "sha512-BjyRAbix6j/wv83ftcVJmBt72QtHI56C7JXZoG2xATiLpmoC7dpd8WnkikExHDVPpi/3qCmO6WY1EaXOluiecQ==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/helper-environment-visitor": "^7.24.6", - "@babel/helper-module-imports": "^7.24.6", - "@babel/helper-simple-access": "^7.24.6", - "@babel/helper-split-export-declaration": "^7.24.6", - "@babel/helper-validator-identifier": "^7.24.6" + "@babel/helper-module-imports": "^7.24.7", + "@babel/helper-simple-access": "^7.24.7", + "@babel/helper-validator-identifier": "^7.24.7", + "@babel/traverse": "^7.25.2" }, "engines": { "node": ">=6.9.0" @@ -247,76 +234,71 @@ } }, "node_modules/@babel/helper-simple-access": { - "version": "7.24.6", - "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.24.6.tgz", - "integrity": "sha512-nZzcMMD4ZhmB35MOOzQuiGO5RzL6tJbsT37Zx8M5L/i9KSrukGXWTjLe1knIbb/RmxoJE9GON9soq0c0VEMM5g==", + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.24.7.tgz", + "integrity": "sha512-zBAIvbCMh5Ts+b86r/CjU+4XGYIs+R1j951gxI3KmmxBMhCg4oQMsv6ZXQ64XOm/cvzfU1FmoCyt6+owc5QMYg==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/types": "^7.24.6" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-split-export-declaration": { - "version": "7.24.6", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.24.6.tgz", - "integrity": "sha512-CvLSkwXGWnYlF9+J3iZUvwgAxKiYzK3BWuo+mLzD/MDGOZDj7Gq8+hqaOkMxmJwmlv0iu86uH5fdADd9Hxkymw==", - "dev": true, - "dependencies": { - "@babel/types": "^7.24.6" + "@babel/traverse": "^7.24.7", + "@babel/types": "^7.24.7" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-string-parser": { - "version": "7.24.6", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.24.6.tgz", - "integrity": "sha512-WdJjwMEkmBicq5T9fm/cHND3+UlFa2Yj8ALLgmoSQAJZysYbBjw+azChSGPN4DSPLXOcooGRvDwZWMcF/mLO2Q==", + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.24.8.tgz", + "integrity": "sha512-pO9KhhRcuUyGnJWwyEgnRJTSIZHiT+vMD0kPeD+so0l7mxkMT19g3pjY9GTnHySck/hDzq+dtW/4VgnMkippsQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.24.6", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.6.tgz", - "integrity": "sha512-4yA7s865JHaqUdRbnaxarZREuPTHrjpDT+pXoAZ1yhyo6uFnIEpS8VMu16siFOHDpZNKYv5BObhsB//ycbICyw==", + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.7.tgz", + "integrity": "sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-option": { - "version": "7.24.6", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.24.6.tgz", - "integrity": "sha512-Jktc8KkF3zIkePb48QO+IapbXlSapOW9S+ogZZkcO6bABgYAxtZcjZ/O005111YLf+j4M84uEgwYoidDkXbCkQ==", + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.24.8.tgz", + "integrity": "sha512-xb8t9tD1MHLungh/AIoWYN+gVHaB9kwlu8gffXGSt3FFEIT7RjS+xWbc2vUD1UTZdIpKj/ab3rdqJ7ufngyi2Q==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helpers": { - "version": "7.24.6", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.24.6.tgz", - "integrity": "sha512-V2PI+NqnyFu1i0GyTd/O/cTpxzQCYioSkUIRmgo7gFEHKKCg5w46+r/A6WeUR1+P3TeQ49dspGPNd/E3n9AnnA==", + "version": "7.25.6", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.25.6.tgz", + "integrity": "sha512-Xg0tn4HcfTijTwfDwYlvVCl43V6h4KyVVX2aEm4qdO/PC6L2YvzLHFdmxhoeSA3eslcE6+ZVXHgWwopXYLNq4Q==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/template": "^7.24.6", - "@babel/types": "^7.24.6" + "@babel/template": "^7.25.0", + "@babel/types": "^7.25.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/highlight": { - "version": "7.24.6", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.24.6.tgz", - "integrity": "sha512-2YnuOp4HAk2BsBrJJvYCbItHx0zWscI1C3zgWkz+wDyD9I7GIVrfnLyrR4Y1VR+7p+chAEcrgRQYZAGIKMV7vQ==", + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.24.7.tgz", + "integrity": "sha512-EStJpq4OuY8xYfhGVXngigBJRWxftKX9ksiGDnmlY3o7B/V7KIAc9X4oiK87uPJSc/vs5L869bem5fhZa8caZw==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/helper-validator-identifier": "^7.24.6", + "@babel/helper-validator-identifier": "^7.24.7", "chalk": "^2.4.2", "js-tokens": "^4.0.0", "picocolors": "^1.0.0" @@ -326,10 +308,14 @@ } }, "node_modules/@babel/parser": { - "version": "7.24.6", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.24.6.tgz", - "integrity": "sha512-eNZXdfU35nJC2h24RznROuOpO94h6x8sg9ju0tT9biNtLZ2vuP8SduLqqV+/8+cebSLV9SJEAN5Z3zQbJG/M+Q==", + "version": "7.25.6", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.25.6.tgz", + "integrity": "sha512-trGdfBdbD0l1ZPmcJ83eNxB9rbEax4ALFTF7fN386TMYbeCQbyme5cOEXQhbGXKebwGaB/J52w1mrklMcbgy6Q==", "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.25.6" + }, "bin": { "parser": "bin/babel-parser.js" }, @@ -338,33 +324,32 @@ } }, "node_modules/@babel/template": { - "version": "7.24.6", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.24.6.tgz", - "integrity": "sha512-3vgazJlLwNXi9jhrR1ef8qiB65L1RK90+lEQwv4OxveHnqC3BfmnHdgySwRLzf6akhlOYenT+b7AfWq+a//AHw==", + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.25.0.tgz", + "integrity": "sha512-aOOgh1/5XzKvg1jvVz7AVrx2piJ2XBi227DHmbY6y+bM9H2FlN+IfecYu4Xl0cNiiVejlsCri89LUsbj8vJD9Q==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.24.6", - "@babel/parser": "^7.24.6", - "@babel/types": "^7.24.6" + "@babel/code-frame": "^7.24.7", + "@babel/parser": "^7.25.0", + "@babel/types": "^7.25.0" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/traverse": { - "version": "7.24.6", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.24.6.tgz", - "integrity": "sha512-OsNjaJwT9Zn8ozxcfoBc+RaHdj3gFmCmYoQLUII1o6ZrUwku0BMg80FoOTPx+Gi6XhcQxAYE4xyjPTo4SxEQqw==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.24.6", - "@babel/generator": "^7.24.6", - "@babel/helper-environment-visitor": "^7.24.6", - "@babel/helper-function-name": "^7.24.6", - "@babel/helper-hoist-variables": "^7.24.6", - "@babel/helper-split-export-declaration": "^7.24.6", - "@babel/parser": "^7.24.6", - "@babel/types": "^7.24.6", + "version": "7.25.6", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.25.6.tgz", + "integrity": "sha512-9Vrcx5ZW6UwK5tvqsj0nGpp/XzqthkT0dqIc9g1AdtygFToNtTF67XzYS//dm+SAK9cp3B9R4ZO/46p63SCjlQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.24.7", + "@babel/generator": "^7.25.6", + "@babel/parser": "^7.25.6", + "@babel/template": "^7.25.0", + "@babel/types": "^7.25.6", "debug": "^4.3.1", "globals": "^11.1.0" }, @@ -372,14 +357,25 @@ "node": ">=6.9.0" } }, + "node_modules/@babel/traverse/node_modules/globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, "node_modules/@babel/types": { - "version": "7.24.6", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.24.6.tgz", - "integrity": "sha512-WaMsgi6Q8zMgMth93GvWPXkhAIEobfsIkLTacoVZoK1J0CevIPGYY2Vo5YvJGqyHqXM6P4ppOYGsIRU8MM9pFQ==", + "version": "7.25.6", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.25.6.tgz", + "integrity": "sha512-/l42B1qxpG6RdfYf343Uw1vmDjeNhneUXtzhojE7pDgfpEypmRhI6j1kr17XCVv4Cgl9HdAiQY2x0GwKm7rWCw==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/helper-string-parser": "^7.24.6", - "@babel/helper-validator-identifier": "^7.24.6", + "@babel/helper-string-parser": "^7.24.8", + "@babel/helper-validator-identifier": "^7.24.7", "to-fast-properties": "^2.0.0" }, "engines": { @@ -391,6 +387,7 @@ "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", "integrity": "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==", "dev": true, + "license": "MIT", "dependencies": { "eslint-visitor-keys": "^3.3.0" }, @@ -402,10 +399,11 @@ } }, "node_modules/@eslint-community/regexpp": { - "version": "4.10.0", - "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.10.0.tgz", - "integrity": "sha512-Cu96Sd2By9mCNTx2iyKOmq10v22jUVQv0lQnlGNy16oE9589yE+QADPbrMGCkA51cKZSg3Pu/aTJVTGfL/qjUA==", + "version": "4.11.1", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.11.1.tgz", + "integrity": "sha512-m4DVN9ZqskZoLU5GlWZadwDnYo3vAEydiUayB9widCl9ffWx2IvPnp6n3on5rJmziJSw9Bv+Z3ChDVdMwXCY8Q==", "dev": true, + "license": "MIT", "engines": { "node": "^12.0.0 || ^14.0.0 || >=16.0.0" } @@ -415,6 +413,7 @@ "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", "dev": true, + "license": "MIT", "dependencies": { "ajv": "^6.12.4", "debug": "^4.3.2", @@ -437,13 +436,15 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", - "dev": true + "dev": true, + "license": "Python-2.0" }, "node_modules/@eslint/eslintrc/node_modules/globals": { "version": "13.24.0", "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", "dev": true, + "license": "MIT", "dependencies": { "type-fest": "^0.20.2" }, @@ -459,6 +460,7 @@ "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", "dev": true, + "license": "MIT", "dependencies": { "argparse": "^2.0.1" }, @@ -471,6 +473,7 @@ "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", "dev": true, + "license": "(MIT OR CC0-1.0)", "engines": { "node": ">=10" }, @@ -479,21 +482,23 @@ } }, "node_modules/@eslint/js": { - "version": "8.57.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.0.tgz", - "integrity": "sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g==", + "version": "9.12.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.12.0.tgz", + "integrity": "sha512-eohesHH8WFRUprDNyEREgqP6beG6htMeUYeCpkEgBCieCMme5r9zFWjzAJp//9S+Kub4rqE+jXe9Cp1a7IYIIA==", "dev": true, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, "node_modules/@humanwhocodes/config-array": { - "version": "0.11.14", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.14.tgz", - "integrity": "sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==", + "version": "0.13.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.13.0.tgz", + "integrity": "sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw==", + "deprecated": "Use @eslint/config-array instead", "dev": true, + "license": "Apache-2.0", "dependencies": { - "@humanwhocodes/object-schema": "^2.0.2", + "@humanwhocodes/object-schema": "^2.0.3", "debug": "^4.3.1", "minimatch": "^3.0.5" }, @@ -506,6 +511,7 @@ "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", "dev": true, + "license": "Apache-2.0", "engines": { "node": ">=12.22" }, @@ -518,12 +524,15 @@ "version": "2.0.3", "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz", "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==", - "dev": true + "deprecated": "Use @eslint/object-schema instead", + "dev": true, + "license": "BSD-3-Clause" }, "node_modules/@isaacs/cliui": { "version": "8.0.2", "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "license": "ISC", "dependencies": { "string-width": "^5.1.2", "string-width-cjs": "npm:string-width@^4.2.0", @@ -537,9 +546,10 @@ } }, "node_modules/@isaacs/cliui/node_modules/ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", + "license": "MIT", "engines": { "node": ">=12" }, @@ -551,6 +561,7 @@ "version": "6.2.1", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "license": "MIT", "engines": { "node": ">=12" }, @@ -561,12 +572,14 @@ "node_modules/@isaacs/cliui/node_modules/emoji-regex": { "version": "9.2.2", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "license": "MIT" }, "node_modules/@isaacs/cliui/node_modules/string-width": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "license": "MIT", "dependencies": { "eastasianwidth": "^0.2.0", "emoji-regex": "^9.2.2", @@ -583,6 +596,7 @@ "version": "7.1.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "license": "MIT", "dependencies": { "ansi-regex": "^6.0.1" }, @@ -597,6 +611,7 @@ "version": "8.1.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "license": "MIT", "dependencies": { "ansi-styles": "^6.1.0", "string-width": "^5.0.1", @@ -614,6 +629,7 @@ "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==", "dev": true, + "license": "ISC", "dependencies": { "camelcase": "^5.3.1", "find-up": "^4.1.0", @@ -630,6 +646,7 @@ "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", "dev": true, + "license": "MIT", "dependencies": { "locate-path": "^5.0.0", "path-exists": "^4.0.0" @@ -643,6 +660,7 @@ "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", "dev": true, + "license": "MIT", "dependencies": { "p-locate": "^4.1.0" }, @@ -655,6 +673,7 @@ "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", "dev": true, + "license": "MIT", "dependencies": { "p-try": "^2.0.0" }, @@ -670,6 +689,7 @@ "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", "dev": true, + "license": "MIT", "dependencies": { "p-limit": "^2.2.0" }, @@ -677,11 +697,22 @@ "node": ">=8" } }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/@istanbuljs/schema": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } @@ -691,6 +722,7 @@ "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz", "integrity": "sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==", "dev": true, + "license": "MIT", "dependencies": { "@jridgewell/set-array": "^1.2.1", "@jridgewell/sourcemap-codec": "^1.4.10", @@ -705,6 +737,7 @@ "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.0.0" } @@ -714,21 +747,24 @@ "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.0.0" } }, "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.4.15", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz", - "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==", - "dev": true + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", + "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", + "dev": true, + "license": "MIT" }, "node_modules/@jridgewell/trace-mapping": { "version": "0.3.25", "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", "dev": true, + "license": "MIT", "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" @@ -738,6 +774,7 @@ "version": "1.0.11", "resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz", "integrity": "sha512-Yhlar6v9WQgUp/He7BdgzOz8lqMQ8sU+jkCq7Wx8Myc5YFJLbEe7lgui/V7G1qB1DJykHSGwreceSaD60Y0PUQ==", + "dev": true, "dependencies": { "detect-libc": "^2.0.0", "https-proxy-agent": "^5.0.0", @@ -758,6 +795,7 @@ "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", "dev": true, + "license": "MIT", "dependencies": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" @@ -771,6 +809,7 @@ "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", "dev": true, + "license": "MIT", "engines": { "node": ">= 8" } @@ -780,6 +819,7 @@ "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", "dev": true, + "license": "MIT", "dependencies": { "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" @@ -792,6 +832,7 @@ "version": "2.2.2", "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-2.2.2.tgz", "integrity": "sha512-OrcNPXdpSl9UX7qPVRWbmWMCSXrcDa2M9DvrbOTj7ao1S4PlqVFYv9/yLKMkrJKZ/V5A/kDBC690or307i26Og==", + "license": "ISC", "dependencies": { "agent-base": "^7.1.0", "http-proxy-agent": "^7.0.0", @@ -807,6 +848,7 @@ "version": "7.1.1", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.1.tgz", "integrity": "sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==", + "license": "MIT", "dependencies": { "debug": "^4.3.4" }, @@ -815,9 +857,10 @@ } }, "node_modules/@npmcli/agent/node_modules/https-proxy-agent": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.4.tgz", - "integrity": "sha512-wlwpilI7YdjSkWaQ/7omYBMTliDcmCN8OLihO6I9B86g06lMyAoqgoDpV0XqoaPOKj+0DIdAvnsWfyAAhmimcg==", + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.5.tgz", + "integrity": "sha512-1e4Wqeblerz+tMKPIq2EMGiiWW1dIjZOksyHWSUm1rmuvw/how9hBHZ38lAGj5ID4Ik6EdkOw7NmWPy6LAwalw==", + "license": "MIT", "dependencies": { "agent-base": "^7.0.2", "debug": "4" @@ -830,6 +873,7 @@ "version": "3.1.1", "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-3.1.1.tgz", "integrity": "sha512-q9CRWjpHCMIh5sVyefoD1cA7PkvILqCZsnSOEUUivORLjxCO/Irmue2DprETiNgEqktDBZaM1Bi+jrarx1XdCg==", + "license": "ISC", "dependencies": { "semver": "^7.3.5" }, @@ -841,51 +885,346 @@ "version": "0.11.0", "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "license": "MIT", "optional": true, "engines": { "node": ">=14" } }, + "node_modules/@rtsao/scc": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@rtsao/scc/-/scc-1.1.0.tgz", + "integrity": "sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==", + "dev": true, + "license": "MIT" + }, + "node_modules/@shikijs/core": { + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@shikijs/core/-/core-1.21.0.tgz", + "integrity": "sha512-zAPMJdiGuqXpZQ+pWNezQAk5xhzRXBNiECFPcJLtUdsFM3f//G95Z15EHTnHchYycU8kIIysqGgxp8OVSj1SPQ==", + "dev": true, + "dependencies": { + "@shikijs/engine-javascript": "1.21.0", + "@shikijs/engine-oniguruma": "1.21.0", + "@shikijs/types": "1.21.0", + "@shikijs/vscode-textmate": "^9.2.2", + "@types/hast": "^3.0.4", + "hast-util-to-html": "^9.0.3" + } + }, + "node_modules/@shikijs/engine-javascript": { + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@shikijs/engine-javascript/-/engine-javascript-1.21.0.tgz", + "integrity": "sha512-jxQHNtVP17edFW4/0vICqAVLDAxmyV31MQJL4U/Kg+heQALeKYVOWo0sMmEZ18FqBt+9UCdyqGKYE7bLRtk9mg==", + "dev": true, + "dependencies": { + "@shikijs/types": "1.21.0", + "@shikijs/vscode-textmate": "^9.2.2", + "oniguruma-to-js": "0.4.3" + } + }, + "node_modules/@shikijs/engine-oniguruma": { + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@shikijs/engine-oniguruma/-/engine-oniguruma-1.21.0.tgz", + "integrity": "sha512-AIZ76XocENCrtYzVU7S4GY/HL+tgHGbVU+qhiDyNw1qgCA5OSi4B4+HY4BtAoJSMGuD/L5hfTzoRVbzEm2WTvg==", + "dev": true, + "dependencies": { + "@shikijs/types": "1.21.0", + "@shikijs/vscode-textmate": "^9.2.2" + } + }, + "node_modules/@shikijs/types": { + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@shikijs/types/-/types-1.21.0.tgz", + "integrity": "sha512-tzndANDhi5DUndBtpojEq/42+dpUF2wS7wdCDQaFtIXm3Rd1QkrcVgSSRLOvEwexekihOXfbYJINW37g96tJRw==", + "dev": true, + "dependencies": { + "@shikijs/vscode-textmate": "^9.2.2", + "@types/hast": "^3.0.4" + } + }, + "node_modules/@shikijs/vscode-textmate": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/@shikijs/vscode-textmate/-/vscode-textmate-9.2.2.tgz", + "integrity": "sha512-TMp15K+GGYrWlZM8+Lnj9EaHEFmOen0WJBrfa17hF7taDOYthuPPV0GWzfd/9iMij0akS/8Yw2ikquH7uVi/fg==", + "dev": true + }, "node_modules/@tootallnate/once": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==", "dev": true, + "license": "MIT", "engines": { "node": ">= 6" } }, + "node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "dev": true, + "dependencies": { + "@types/unist": "*" + } + }, "node_modules/@types/json5": { "version": "0.0.29", "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", - "dev": true + "dev": true, + "license": "MIT" + }, + "node_modules/@types/mdast": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz", + "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==", + "dev": true, + "dependencies": { + "@types/unist": "*" + } }, "node_modules/@types/node": { - "version": "20.12.13", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.12.13.tgz", - "integrity": "sha512-gBGeanV41c1L171rR7wjbMiEpEI/l5XFQdLLfhr/REwpgDy/4U8y89+i8kRiLzDyZdOkXh+cRaTetUnCYutoXA==", + "version": "22.7.4", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.7.4.tgz", + "integrity": "sha512-y+NPi1rFzDs1NdQHHToqeiX2TIS79SWEAw9GYhkkx8bD0ChpfqC+n2j5OXOCpzfojBEBt6DnEnnG9MY0zk1XLg==", + "dev": true, + "dependencies": { + "undici-types": "~6.19.2" + } + }, + "node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", + "dev": true + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "8.8.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.8.1.tgz", + "integrity": "sha512-xfvdgA8AP/vxHgtgU310+WBnLB4uJQ9XdyP17RebG26rLtDrQJV3ZYrcopX91GrHmMoH8bdSwMRh2a//TiJ1jQ==", + "dev": true, + "dependencies": { + "@eslint-community/regexpp": "^4.10.0", + "@typescript-eslint/scope-manager": "8.8.1", + "@typescript-eslint/type-utils": "8.8.1", + "@typescript-eslint/utils": "8.8.1", + "@typescript-eslint/visitor-keys": "8.8.1", + "graphemer": "^1.4.0", + "ignore": "^5.3.1", + "natural-compare": "^1.4.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", + "eslint": "^8.57.0 || ^9.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/parser": { + "version": "8.8.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.8.1.tgz", + "integrity": "sha512-hQUVn2Lij2NAxVFEdvIGxT9gP1tq2yM83m+by3whWFsWC+1y8pxxxHUFE1UqDu2VsGi2i6RLcv4QvouM84U+ow==", + "dev": true, + "dependencies": { + "@typescript-eslint/scope-manager": "8.8.1", + "@typescript-eslint/types": "8.8.1", + "@typescript-eslint/typescript-estree": "8.8.1", + "@typescript-eslint/visitor-keys": "8.8.1", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "8.8.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.8.1.tgz", + "integrity": "sha512-X4JdU+66Mazev/J0gfXlcC/dV6JI37h+93W9BRYXrSn0hrE64IoWgVkO9MSJgEzoWkxONgaQpICWg8vAN74wlA==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.8.1", + "@typescript-eslint/visitor-keys": "8.8.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/type-utils": { + "version": "8.8.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.8.1.tgz", + "integrity": "sha512-qSVnpcbLP8CALORf0za+vjLYj1Wp8HSoiI8zYU5tHxRVj30702Z1Yw4cLwfNKhTPWp5+P+k1pjmD5Zd1nhxiZA==", + "dev": true, + "dependencies": { + "@typescript-eslint/typescript-estree": "8.8.1", + "@typescript-eslint/utils": "8.8.1", + "debug": "^4.3.4", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/types": { + "version": "8.8.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.8.1.tgz", + "integrity": "sha512-WCcTP4SDXzMd23N27u66zTKMuEevH4uzU8C9jf0RO4E04yVHgQgW+r+TeVTNnO1KIfrL8ebgVVYYMMO3+jC55Q==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "8.8.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.8.1.tgz", + "integrity": "sha512-A5d1R9p+X+1js4JogdNilDuuq+EHZdsH9MjTVxXOdVFfTJXunKJR/v+fNNyO4TnoOn5HqobzfRlc70NC6HTcdg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.8.1", + "@typescript-eslint/visitor-keys": "8.8.1", + "debug": "^4.3.4", + "fast-glob": "^3.3.2", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", "dev": true, "dependencies": { - "undici-types": "~5.26.4" + "balanced-match": "^1.0.0" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@typescript-eslint/utils": { + "version": "8.8.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.8.1.tgz", + "integrity": "sha512-/QkNJDbV0bdL7H7d0/y0qBbV2HTtf0TIyjSDTvvmQEzeVx8jEImEbLuOA4EsvE8gIgqMitns0ifb5uQhMj8d9w==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.4.0", + "@typescript-eslint/scope-manager": "8.8.1", + "@typescript-eslint/types": "8.8.1", + "@typescript-eslint/typescript-estree": "8.8.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0" + } + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "8.8.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.8.1.tgz", + "integrity": "sha512-0/TdC3aeRAsW7MDvYRwEc1Uwm0TIBfzjPFgg60UU2Haj5qsCs9cc3zNgY71edqE3LbWfF/WoZQd3lJoDXFQpag==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.8.1", + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" } }, "node_modules/@ungap/structured-clone": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/abbrev": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", - "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==" + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", + "dev": true, + "license": "ISC" }, "node_modules/acorn": { - "version": "8.11.3", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz", - "integrity": "sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==", + "version": "8.12.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz", + "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==", "dev": true, + "license": "MIT", "bin": { "acorn": "bin/acorn" }, @@ -898,6 +1237,7 @@ "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", "dev": true, + "license": "MIT", "peerDependencies": { "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } @@ -906,6 +1246,8 @@ "version": "6.0.2", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dev": true, + "license": "MIT", "dependencies": { "debug": "4" }, @@ -917,6 +1259,7 @@ "version": "3.1.0", "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "license": "MIT", "dependencies": { "clean-stack": "^2.0.0", "indent-string": "^4.0.0" @@ -930,6 +1273,7 @@ "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", "dev": true, + "license": "MIT", "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", @@ -942,10 +1286,10 @@ } }, "node_modules/ansi-colors": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", - "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", - "dev": true, + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.3.tgz", + "integrity": "sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==", + "license": "MIT", "engines": { "node": ">=6" } @@ -954,6 +1298,7 @@ "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "license": "MIT", "engines": { "node": ">=8" } @@ -962,6 +1307,7 @@ "version": "3.2.1", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "license": "MIT", "dependencies": { "color-convert": "^1.9.0" }, @@ -974,6 +1320,7 @@ "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", "dev": true, + "license": "ISC", "dependencies": { "normalize-path": "^3.0.0", "picomatch": "^2.0.4" @@ -987,6 +1334,7 @@ "resolved": "https://registry.npmjs.org/append-transform/-/append-transform-2.0.0.tgz", "integrity": "sha512-7yeyCEurROLQJFv5Xj4lEGTy0borxepjFv1g22oAdqFu//SrAlDl1O1Nxx15SH1RoliUml6p8dwJW9jvZughhg==", "dev": true, + "license": "MIT", "dependencies": { "default-require-extensions": "^3.0.0" }, @@ -997,19 +1345,24 @@ "node_modules/aproba": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz", - "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==" + "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==", + "dev": true, + "license": "ISC" }, "node_modules/archy": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/archy/-/archy-1.0.0.tgz", "integrity": "sha512-Xg+9RwCg/0p32teKdGMPTPnVXKD0w3DfHnFTficozsAgsvq2XenPJq/MYpzzQ/v8zrOyJn6Ds39VA4JIDwFfqw==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/are-we-there-yet": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz", "integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==", "deprecated": "This package is no longer supported.", + "dev": true, + "license": "ISC", "dependencies": { "delegates": "^1.0.0", "readable-stream": "^3.6.0" @@ -1023,6 +1376,7 @@ "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", "dev": true, + "license": "MIT", "dependencies": { "sprintf-js": "~1.0.2" } @@ -1041,6 +1395,7 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.1.tgz", "integrity": "sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg==", + "license": "MIT", "dependencies": { "call-bind": "^1.0.5", "is-array-buffer": "^3.0.4" @@ -1057,6 +1412,7 @@ "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.8.tgz", "integrity": "sha512-itaWrbYbqpGXkGhZPGUulwnhVf5Hpy1xiCFsGqyIGglbBxmG5vSjxQen3/WGOjPpNEv1RtBLKxbmVXm8HpJStQ==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", @@ -1077,6 +1433,7 @@ "resolved": "https://registry.npmjs.org/array.prototype.findlast/-/array.prototype.findlast-1.2.5.tgz", "integrity": "sha512-CVvd6FHg1Z3POpBLxO6E6zr+rSKEQ9L6rZHAaY7lLfhKsWYUBBOuMs0e9o24oopj6H+geRCX0YJ+TJLBK2eHyQ==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", @@ -1097,6 +1454,7 @@ "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.5.tgz", "integrity": "sha512-zfETvRFA8o7EiNn++N5f/kaCw221hrpGsDmcpndVupkPzEc1Wuf3VgC0qby1BbHs7f5DVYjgtEU2LLh5bqeGfQ==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", @@ -1117,6 +1475,7 @@ "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.2.tgz", "integrity": "sha512-djYB+Zx2vLewY8RWlNCUdHjDXs2XOgm602S9E7P/UpHgfeHL00cRiIF+IN/G/aUJ7kGPb6yO/ErDI5V2s8iycA==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.2", "define-properties": "^1.2.0", @@ -1135,6 +1494,7 @@ "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.2.tgz", "integrity": "sha512-Ewyx0c9PmpcsByhSW4r+9zDU7sGjFc86qf/kKtuSCRdhfbk0SNLLkaT5qvcHnRGgc5NP/ly/y+qkXkqONX54CQ==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.2", "define-properties": "^1.2.0", @@ -1148,35 +1508,28 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/array.prototype.toreversed": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/array.prototype.toreversed/-/array.prototype.toreversed-1.1.2.tgz", - "integrity": "sha512-wwDCoT4Ck4Cz7sLtgUmzR5UV3YF5mFHUlbChCzZBQZ+0m2cl/DH3tKgvphv1nKgFsJ48oCSg6p91q2Vm0I/ZMA==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "es-shim-unscopables": "^1.0.0" - } - }, "node_modules/array.prototype.tosorted": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.3.tgz", - "integrity": "sha512-/DdH4TiTmOKzyQbp/eadcCVexiCb36xJg7HshYOYJnNZFDj33GEv0P7GxsynpShhq4OLYJzbGcBDkLsDt7MnNg==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.4.tgz", + "integrity": "sha512-p6Fx8B7b7ZhL/gmUsAy0D15WhvDccw3mnGNbZpi3pmeJdxtWsj2jEaI4Y6oo3XiHfzuSgPwKc04MYt6KgvC/wA==", "dev": true, + "license": "MIT", "dependencies": { - "call-bind": "^1.0.5", + "call-bind": "^1.0.7", "define-properties": "^1.2.1", - "es-abstract": "^1.22.3", - "es-errors": "^1.1.0", + "es-abstract": "^1.23.3", + "es-errors": "^1.3.0", "es-shim-unscopables": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" } }, "node_modules/arraybuffer.prototype.slice": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.3.tgz", "integrity": "sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A==", + "license": "MIT", "dependencies": { "array-buffer-byte-length": "^1.0.1", "call-bind": "^1.0.5", @@ -1199,6 +1552,7 @@ "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", "dev": true, + "license": "MIT", "engines": { "node": "*" } @@ -1207,6 +1561,7 @@ "version": "1.0.7", "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", + "license": "MIT", "dependencies": { "possible-typed-array-names": "^1.0.0" }, @@ -1220,13 +1575,15 @@ "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "license": "MIT" }, "node_modules/binary-extensions": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" }, @@ -1238,6 +1595,7 @@ "version": "1.5.0", "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", + "license": "MIT", "dependencies": { "file-uri-to-path": "1.0.0" } @@ -1246,6 +1604,7 @@ "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -1256,6 +1615,7 @@ "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", "dev": true, + "license": "MIT", "dependencies": { "fill-range": "^7.1.1" }, @@ -1267,12 +1627,13 @@ "version": "1.3.1", "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/browserslist": { - "version": "4.23.0", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.0.tgz", - "integrity": "sha512-QW8HiM1shhT2GuzkvklfjcKDiWFXHOeFCIA/huJPwHsslwcydgk7X+z2zXpEijP98UCY7HbubZt5J2Zgvf0CaQ==", + "version": "4.24.0", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.0.tgz", + "integrity": "sha512-Rmb62sR1Zpjql25eSanFGEhAxcFwfA1K0GuQcLoaJBAcENegrQut3hYdhXFF1obQfiDyqIW/cLM5HSJ/9k884A==", "dev": true, "funding": [ { @@ -1288,11 +1649,12 @@ "url": "https://github.com/sponsors/ai" } ], + "license": "MIT", "dependencies": { - "caniuse-lite": "^1.0.30001587", - "electron-to-chromium": "^1.4.668", - "node-releases": "^2.0.14", - "update-browserslist-db": "^1.0.13" + "caniuse-lite": "^1.0.30001663", + "electron-to-chromium": "^1.5.28", + "node-releases": "^2.0.18", + "update-browserslist-db": "^1.1.0" }, "bin": { "browserslist": "cli.js" @@ -1306,14 +1668,16 @@ "resolved": "https://registry.npmjs.org/builtins/-/builtins-5.1.0.tgz", "integrity": "sha512-SW9lzGTLvWTP1AY8xeAMZimqDrIaSdLQUcVr9DMef51niJ022Ri87SwRRKYm4A6iHfkPaiVUu/Duw2Wc4J7kKg==", "dev": true, + "license": "MIT", "dependencies": { "semver": "^7.0.0" } }, "node_modules/cacache": { - "version": "18.0.3", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-18.0.3.tgz", - "integrity": "sha512-qXCd4rh6I07cnDqh8V48/94Tc/WSfj+o3Gn6NZ0aZovS255bUx8O13uKxRFd2eWG0xgsco7+YItQNPaa5E85hg==", + "version": "18.0.4", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-18.0.4.tgz", + "integrity": "sha512-B+L5iIa9mgcjLbliir2th36yEwPftrzteHYujzsx3dFP/31GCHcIeS8f5MGd80odLOjaOvSpU3EEAmRQptkxLQ==", + "license": "ISC", "dependencies": { "@npmcli/fs": "^3.1.0", "fs-minipass": "^3.0.0", @@ -1336,27 +1700,16 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0" } }, - "node_modules/cacache/node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/cacache/node_modules/foreground-child": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.1.1.tgz", - "integrity": "sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==", + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.0.tgz", + "integrity": "sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==", + "license": "ISC", "dependencies": { "cross-spawn": "^7.0.0", "signal-exit": "^4.0.1" @@ -1369,35 +1722,30 @@ } }, "node_modules/cacache/node_modules/glob": { - "version": "10.4.1", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.1.tgz", - "integrity": "sha512-2jelhlq3E4ho74ZyVLN03oKdAZVUa6UDZzFLVH1H7dnoax+y9qyaq8zBkfDIggjniU19z0wU18y16jMB2eyVIw==", + "version": "10.4.5", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", + "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", + "license": "ISC", "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^3.1.2", "minimatch": "^9.0.4", "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", "path-scurry": "^1.11.1" }, "bin": { "glob": "dist/esm/bin.mjs" }, - "engines": { - "node": ">=16 || 14 >=14.18" - }, "funding": { "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/cacache/node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" - }, "node_modules/cacache/node_modules/minimatch": { - "version": "9.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", - "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "license": "ISC", "dependencies": { "brace-expansion": "^2.0.1" }, @@ -1408,37 +1756,11 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/cacache/node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "engines": { - "node": ">=8" - } - }, - "node_modules/cacache/node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/cacache/node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "engines": { - "node": ">=8" - } - }, "node_modules/cacache/node_modules/signal-exit": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "license": "ISC", "engines": { "node": ">=14" }, @@ -1446,25 +1768,12 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/cacache/node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/caching-transform": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/caching-transform/-/caching-transform-4.0.0.tgz", "integrity": "sha512-kpqOvwXnjjN44D89K5ccQC+RUrsy7jB/XLlRrx0D7/2HNcTPqzsb6XgYoErwko6QsV184CA2YgS1fxDiiDZMWA==", "dev": true, + "license": "MIT", "dependencies": { "hasha": "^5.0.0", "make-dir": "^3.0.0", @@ -1479,6 +1788,7 @@ "version": "1.0.7", "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", + "license": "MIT", "dependencies": { "es-define-property": "^1.0.0", "es-errors": "^1.3.0", @@ -1498,6 +1808,7 @@ "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=6" } @@ -1507,14 +1818,15 @@ "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", "dev": true, + "license": "MIT", "engines": { "node": ">=6" } }, "node_modules/caniuse-lite": { - "version": "1.0.30001625", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001625.tgz", - "integrity": "sha512-4KE9N2gcRH+HQhpeiRZXd+1niLB/XNLAhSy4z7fI8EzcbcPoAqjNInxVHTiTwWfTIV4w096XG8OtCOCQQKPv3w==", + "version": "1.0.30001664", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001664.tgz", + "integrity": "sha512-AmE7k4dXiNKQipgn7a2xg558IRqPN3jMQY/rOsbxDhrd0tyChwbITBfiwtnqz8bi2M5mIWbxAYBvk7W7QBUS2g==", "dev": true, "funding": [ { @@ -1529,13 +1841,25 @@ "type": "github", "url": "https://github.com/sponsors/ai" } - ] + ], + "license": "CC-BY-4.0" + }, + "node_modules/ccount": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz", + "integrity": "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==", + "dev": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } }, "node_modules/chai": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/chai/-/chai-4.4.1.tgz", - "integrity": "sha512-13sOfMv2+DWduEU+/xbun3LScLoqN17nBeTLUsmDfKdoiC1fr0n9PU4guu4AhRcOVFk/sW8LyZWHuhWtQZiF+g==", + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.5.0.tgz", + "integrity": "sha512-RITGBfijLkBddZvnn8jdqoTypxvqbOLYQkGGxXzeFjVHvudaPw0HNFD9x928/eUwYWd2dPCugVqspGALTZZQKw==", "dev": true, + "license": "MIT", "dependencies": { "assertion-error": "^1.1.0", "check-error": "^1.0.3", @@ -1543,7 +1867,7 @@ "get-func-name": "^2.0.2", "loupe": "^2.3.6", "pathval": "^1.1.1", - "type-detect": "^4.0.8" + "type-detect": "^4.1.0" }, "engines": { "node": ">=4" @@ -1553,6 +1877,7 @@ "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "license": "MIT", "dependencies": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", @@ -1562,11 +1887,32 @@ "node": ">=4" } }, + "node_modules/character-entities-html4": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-2.1.0.tgz", + "integrity": "sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==", + "dev": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-entities-legacy": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz", + "integrity": "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==", + "dev": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/check-error": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.3.tgz", "integrity": "sha512-iKEoDYaRmd1mxM90a2OEfWhjsjPpYPuQ+lMYsoxB126+t8fw7ySEO48nmDg5COTjxDI65/Y2OWpeEHk3ZOe8zg==", "dev": true, + "license": "MIT", "dependencies": { "get-func-name": "^2.0.2" }, @@ -1575,16 +1921,11 @@ } }, "node_modules/chokidar": { - "version": "3.5.3", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", - "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", "dev": true, - "funding": [ - { - "type": "individual", - "url": "https://paulmillr.com/funding/" - } - ], + "license": "MIT", "dependencies": { "anymatch": "~3.1.2", "braces": "~3.0.2", @@ -1597,15 +1938,32 @@ "engines": { "node": ">= 8.10.0" }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, "optionalDependencies": { "fsevents": "~2.3.2" } }, + "node_modules/chokidar/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/choma": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/choma/-/choma-1.2.1.tgz", "integrity": "sha512-4KwEouEHt6SfG8vYnN2gSJfq/cGmnY2gubnUgsgkRXzHoSRAgluX2YXQgDg6bTDWuOmUrTb/cfwMpNlvnnPZCg==", "dev": true, + "license": "MIT", "dependencies": { "chalk": "^2.3.2", "seedrandom": "^2.4.3" @@ -1618,6 +1976,7 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", + "license": "ISC", "engines": { "node": ">=10" } @@ -1626,6 +1985,7 @@ "version": "2.2.0", "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "license": "MIT", "engines": { "node": ">=6" } @@ -1635,6 +1995,7 @@ "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", "dev": true, + "license": "ISC", "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.1", @@ -1650,6 +2011,7 @@ "integrity": "sha512-Y8Hw+V3HgR7V71xWH2vQ9lyS358CbGCldWlJFR0JirqoGtOoas3R3/OclRTvgUYFK29mmJICDPauVKmpqbwhOA==", "deprecated": "https://about.codecov.io/blog/codecov-uploader-deprecation-plan/", "dev": true, + "license": "MIT", "dependencies": { "argv": "0.0.2", "ignore-walk": "3.0.4", @@ -1668,6 +2030,7 @@ "version": "1.9.3", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "license": "MIT", "dependencies": { "color-name": "1.1.3" } @@ -1675,81 +2038,75 @@ "node_modules/color-name": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", + "license": "MIT" }, "node_modules/color-support": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", + "dev": true, + "license": "ISC", "bin": { "color-support": "bin.js" } }, + "node_modules/comma-separated-tokens": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz", + "integrity": "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==", + "dev": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/commondir": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", "integrity": "sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "license": "MIT" }, "node_modules/console-control-strings": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", - "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==" - }, - "node_modules/convert-source-map": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz", - "integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==", - "dev": true - }, - "node_modules/cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", - "dependencies": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - }, - "engines": { - "node": ">=4.8" - } - }, - "node_modules/cross-spawn/node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" + "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==", + "dev": true, + "license": "ISC" }, - "node_modules/cross-spawn/node_modules/semver": { - "version": "5.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", - "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", - "bin": { - "semver": "bin/semver" - } + "node_modules/convert-source-map": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz", + "integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==", + "dev": true, + "license": "MIT" }, - "node_modules/cross-spawn/node_modules/which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "license": "MIT", "dependencies": { - "isexe": "^2.0.0" + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" }, - "bin": { - "which": "bin/which" + "engines": { + "node": ">= 8" } }, "node_modules/data-view-buffer": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.1.tgz", "integrity": "sha512-0lht7OugA5x3iJLOWFhWK/5ehONdprk0ISXqVFn/NFrDu+cuc8iADFrGQz5BnRK7LLU3JmkbXSxaqX+/mXYtUA==", + "license": "MIT", "dependencies": { "call-bind": "^1.0.6", "es-errors": "^1.3.0", @@ -1766,6 +2123,7 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.1.tgz", "integrity": "sha512-4J7wRJD3ABAzr8wP+OcIcqq2dlUKp4DVflx++hs5h5ZKydWMI6/D/fAot+yh6g2tHh8fLFTvNOaVN357NvSrOQ==", + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "es-errors": "^1.3.0", @@ -1782,6 +2140,7 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.0.tgz", "integrity": "sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA==", + "license": "MIT", "dependencies": { "call-bind": "^1.0.6", "es-errors": "^1.3.0", @@ -1795,11 +2154,12 @@ } }, "node_modules/debug": { - "version": "4.3.5", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.5.tgz", - "integrity": "sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==", + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz", + "integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==", + "license": "MIT", "dependencies": { - "ms": "2.1.2" + "ms": "^2.1.3" }, "engines": { "node": ">=6.0" @@ -1815,15 +2175,17 @@ "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } }, "node_modules/deep-eql": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-4.1.3.tgz", - "integrity": "sha512-WaEtAOpRA1MQ0eohqZjpGD8zdI0Ovsm8mmFhaDN8dvDZzyoUMcYDnf5Y6iu7HTXxf8JDS23qWa4a+hKCDyOPzw==", + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-4.1.4.tgz", + "integrity": "sha512-SUwdGfqdKOwxCPeVYjwSyRpJ7Z+fhpwIAtmCUdZIWZ/YP5R9WAsyuSgpLVDi9bjWoN2LXHNss/dk3urXtdQxGg==", "dev": true, + "license": "MIT", "dependencies": { "type-detect": "^4.0.0" }, @@ -1835,13 +2197,15 @@ "version": "0.1.4", "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/default-require-extensions": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/default-require-extensions/-/default-require-extensions-3.0.1.tgz", "integrity": "sha512-eXTJmRbm2TIt9MgWTsOH1wEuhew6XGZcMeGKCtLedIg/NCsg1iBePXkceTdK4Fii7pzmN9tGsZhKzZ4h7O/fxw==", "dev": true, + "license": "MIT", "dependencies": { "strip-bom": "^4.0.0" }, @@ -1856,6 +2220,7 @@ "version": "1.1.4", "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", + "license": "MIT", "dependencies": { "es-define-property": "^1.0.0", "es-errors": "^1.3.0", @@ -1872,6 +2237,7 @@ "version": "1.2.1", "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", + "license": "MIT", "dependencies": { "define-data-property": "^1.0.1", "has-property-descriptors": "^1.0.0", @@ -1887,21 +2253,48 @@ "node_modules/delegates": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", - "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==" + "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/dequal": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", + "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", + "dev": true, + "engines": { + "node": ">=6" + } }, "node_modules/detect-libc": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz", "integrity": "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==", + "dev": true, + "license": "Apache-2.0", "engines": { "node": ">=8" } }, + "node_modules/devlop": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/devlop/-/devlop-1.1.0.tgz", + "integrity": "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==", + "dev": true, + "dependencies": { + "dequal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/diff": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", - "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.2.0.tgz", + "integrity": "sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==", "dev": true, + "license": "BSD-3-Clause", "engines": { "node": ">=0.3.1" } @@ -1911,6 +2304,7 @@ "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", "dev": true, + "license": "Apache-2.0", "dependencies": { "esutils": "^2.0.2" }, @@ -1921,32 +2315,49 @@ "node_modules/eastasianwidth": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==" + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", + "license": "MIT" }, "node_modules/electron-to-chromium": { - "version": "1.4.787", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.787.tgz", - "integrity": "sha512-d0EFmtLPjctczO3LogReyM2pbBiiZbnsKnGF+cdZhsYzHm/A0GV7W94kqzLD8SN4O3f3iHlgLUChqghgyznvCQ==", - "dev": true + "version": "1.5.30", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.30.tgz", + "integrity": "sha512-sXI35EBN4lYxzc/pIGorlymYNzDBOqkSlVRe6MkgBsW/hW1tpC/HDJ2fjG7XnjakzfLEuvdmux0Mjs6jHq4UOA==", + "dev": true, + "license": "ISC" }, "node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "license": "MIT" }, "node_modules/encoding": { "version": "0.1.13", "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz", "integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==", + "license": "MIT", "optional": true, "dependencies": { "iconv-lite": "^0.6.2" } }, + "node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "dev": true, + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, "node_modules/env-paths": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz", "integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==", + "license": "MIT", "engines": { "node": ">=6" } @@ -1954,12 +2365,14 @@ "node_modules/err-code": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/err-code/-/err-code-2.0.3.tgz", - "integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==" + "integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==", + "license": "MIT" }, "node_modules/error-ex": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "license": "MIT", "dependencies": { "is-arrayish": "^0.2.1" } @@ -1968,6 +2381,7 @@ "version": "1.23.3", "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.23.3.tgz", "integrity": "sha512-e+HfNH61Bj1X9/jLc5v1owaLYuHdeHHSQlkhCBiTK8rBvKaULl/beGMxwrMXjpYrv4pz22BlY570vVePA2ho4A==", + "license": "MIT", "dependencies": { "array-buffer-byte-length": "^1.0.1", "arraybuffer.prototype.slice": "^1.0.3", @@ -2027,6 +2441,7 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", + "license": "MIT", "dependencies": { "get-intrinsic": "^1.2.4" }, @@ -2038,6 +2453,7 @@ "version": "1.3.0", "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", "engines": { "node": ">= 0.4" } @@ -2047,6 +2463,7 @@ "resolved": "https://registry.npmjs.org/es-iterator-helpers/-/es-iterator-helpers-1.0.19.tgz", "integrity": "sha512-zoMwbCcH5hwUkKJkT8kDIBZSz9I6mVG//+lDCinLCGov4+r7NIy0ld8o03M0cJxl2spVf6ESYVS6/gpIfq1FFw==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", @@ -2071,6 +2488,7 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz", "integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==", + "license": "MIT", "dependencies": { "es-errors": "^1.3.0" }, @@ -2082,6 +2500,7 @@ "version": "2.0.3", "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.3.tgz", "integrity": "sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==", + "license": "MIT", "dependencies": { "get-intrinsic": "^1.2.4", "has-tostringtag": "^1.0.2", @@ -2096,6 +2515,7 @@ "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.2.tgz", "integrity": "sha512-J3yBRXCzDu4ULnQwxyToo/OjdMx6akgVC7K6few0a7F/0wLtmKKN7I73AH5T2836UuXRqN7Qg+IIUw/+YJksRw==", "dev": true, + "license": "MIT", "dependencies": { "hasown": "^2.0.0" } @@ -2104,6 +2524,7 @@ "version": "1.2.1", "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "license": "MIT", "dependencies": { "is-callable": "^1.1.4", "is-date-object": "^1.0.1", @@ -2120,13 +2541,15 @@ "version": "4.1.1", "resolved": "https://registry.npmjs.org/es6-error/-/es6-error-4.1.1.tgz", "integrity": "sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/escalade": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.2.tgz", - "integrity": "sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", "dev": true, + "license": "MIT", "engines": { "node": ">=6" } @@ -2135,21 +2558,23 @@ "version": "1.0.5", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "license": "MIT", "engines": { "node": ">=0.8.0" } }, "node_modules/eslint": { - "version": "8.57.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.0.tgz", - "integrity": "sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ==", + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.1.tgz", + "integrity": "sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA==", + "deprecated": "This version is no longer supported. Please see https://eslint.org/version-support for other options.", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", "@eslint/eslintrc": "^2.1.4", - "@eslint/js": "8.57.0", - "@humanwhocodes/config-array": "^0.11.14", + "@eslint/js": "8.57.1", + "@humanwhocodes/config-array": "^0.13.0", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", "@ungap/structured-clone": "^1.2.0", @@ -2213,6 +2638,7 @@ "url": "https://feross.org/support" } ], + "license": "MIT", "engines": { "node": ">=12.0.0" }, @@ -2242,6 +2668,7 @@ "url": "https://feross.org/support" } ], + "license": "MIT", "peerDependencies": { "eslint": "^8.8.0", "eslint-plugin-react": "^7.28.0" @@ -2252,6 +2679,7 @@ "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz", "integrity": "sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g==", "dev": true, + "license": "MIT", "dependencies": { "debug": "^3.2.7", "is-core-module": "^2.13.0", @@ -2263,15 +2691,17 @@ "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", "dev": true, + "license": "MIT", "dependencies": { "ms": "^2.1.1" } }, "node_modules/eslint-module-utils": { - "version": "2.8.1", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.8.1.tgz", - "integrity": "sha512-rXDXR3h7cs7dy9RNpUlQf80nX31XWJEyGq1tRMo+6GsO5VmTe4UTwtmonAD4ZkAsrfMVDA2wlGJ3790Ys+D49Q==", + "version": "2.12.0", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.12.0.tgz", + "integrity": "sha512-wALZ0HFoytlyh/1+4wuZ9FJCD/leWHQzzrxJ8+rebyReSLk7LApMyd3WJaLVoN+D5+WIdJyDK1c6JnE65V4Zyg==", "dev": true, + "license": "MIT", "dependencies": { "debug": "^3.2.7" }, @@ -2289,6 +2719,7 @@ "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", "dev": true, + "license": "MIT", "dependencies": { "ms": "^2.1.1" } @@ -2298,6 +2729,7 @@ "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-4.1.0.tgz", "integrity": "sha512-GILhQTnjYE2WorX5Jyi5i4dz5ALWxBIdQECVQavL6s7cI76IZTDWleTHkxz/QT3kvcs2QlGHvKLYsSlPOlPXnQ==", "dev": true, + "license": "MIT", "dependencies": { "eslint-utils": "^2.0.0", "regexpp": "^3.0.0" @@ -2317,6 +2749,7 @@ "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", "dev": true, + "license": "MIT", "dependencies": { "eslint-visitor-keys": "^1.1.0" }, @@ -2332,31 +2765,34 @@ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", "dev": true, + "license": "Apache-2.0", "engines": { "node": ">=4" } }, "node_modules/eslint-plugin-import": { - "version": "2.29.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.29.1.tgz", - "integrity": "sha512-BbPC0cuExzhiMo4Ff1BTVwHpjjv28C5R+btTOGaCRC7UEz801up0JadwkeSk5Ued6TG34uaczuVuH6qyy5YUxw==", + "version": "2.30.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.30.0.tgz", + "integrity": "sha512-/mHNE9jINJfiD2EKkg1BKyPyUk4zdnT54YgbOgfjSakWT5oyX/qQLVNTkehyfpcMxZXMy1zyonZ2v7hZTX43Yw==", "dev": true, + "license": "MIT", "dependencies": { - "array-includes": "^3.1.7", - "array.prototype.findlastindex": "^1.2.3", + "@rtsao/scc": "^1.1.0", + "array-includes": "^3.1.8", + "array.prototype.findlastindex": "^1.2.5", "array.prototype.flat": "^1.3.2", "array.prototype.flatmap": "^1.3.2", "debug": "^3.2.7", "doctrine": "^2.1.0", "eslint-import-resolver-node": "^0.3.9", - "eslint-module-utils": "^2.8.0", - "hasown": "^2.0.0", - "is-core-module": "^2.13.1", + "eslint-module-utils": "^2.9.0", + "hasown": "^2.0.2", + "is-core-module": "^2.15.1", "is-glob": "^4.0.3", "minimatch": "^3.1.2", - "object.fromentries": "^2.0.7", - "object.groupby": "^1.0.1", - "object.values": "^1.1.7", + "object.fromentries": "^2.0.8", + "object.groupby": "^1.0.3", + "object.values": "^1.2.0", "semver": "^6.3.1", "tsconfig-paths": "^3.15.0" }, @@ -2372,6 +2808,7 @@ "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", "dev": true, + "license": "MIT", "dependencies": { "ms": "^2.1.1" } @@ -2381,6 +2818,7 @@ "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", "dev": true, + "license": "Apache-2.0", "dependencies": { "esutils": "^2.0.2" }, @@ -2393,6 +2831,7 @@ "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -2402,6 +2841,7 @@ "resolved": "https://registry.npmjs.org/eslint-plugin-n/-/eslint-plugin-n-15.7.0.tgz", "integrity": "sha512-jDex9s7D/Qial8AGVIHq4W7NswpUD5DPDL2RH8Lzd9EloWUuvUkHfv4FRLMipH5q2UtyurorBkPeNi1wVWNh3Q==", "dev": true, + "license": "MIT", "dependencies": { "builtins": "^5.0.1", "eslint-plugin-es": "^4.1.0", @@ -2423,10 +2863,11 @@ } }, "node_modules/eslint-plugin-promise": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-6.2.0.tgz", - "integrity": "sha512-QmAqwizauvnKOlifxyDj2ObfULpHQawlg/zQdgEixur9vl0CvZGv/LCJV2rtj3210QCoeGBzVMfMXqGAOr/4fA==", + "version": "6.6.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-6.6.0.tgz", + "integrity": "sha512-57Zzfw8G6+Gq7axm2Pdo3gW/Rx3h9Yywgn61uE/3elTCOePEHVrn2i5CdfBwA1BLK0Q0WqctICIUSqXZW/VprQ==", "dev": true, + "license": "ISC", "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" }, @@ -2438,35 +2879,36 @@ } }, "node_modules/eslint-plugin-react": { - "version": "7.34.2", - "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.34.2.tgz", - "integrity": "sha512-2HCmrU+/JNigDN6tg55cRDKCQWicYAPB38JGSFDQt95jDm8rrvSUo7YPkOIm5l6ts1j1zCvysNcasvfTMQzUOw==", + "version": "7.37.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.37.0.tgz", + "integrity": "sha512-IHBePmfWH5lKhJnJ7WB1V+v/GolbB0rjS8XYVCSQCZKaQCAUhMoVoOEn1Ef8Z8Wf0a7l8KTJvuZg5/e4qrZ6nA==", "dev": true, + "license": "MIT", "dependencies": { "array-includes": "^3.1.8", "array.prototype.findlast": "^1.2.5", "array.prototype.flatmap": "^1.3.2", - "array.prototype.toreversed": "^1.1.2", - "array.prototype.tosorted": "^1.1.3", + "array.prototype.tosorted": "^1.1.4", "doctrine": "^2.1.0", "es-iterator-helpers": "^1.0.19", "estraverse": "^5.3.0", + "hasown": "^2.0.2", "jsx-ast-utils": "^2.4.1 || ^3.0.0", "minimatch": "^3.1.2", "object.entries": "^1.1.8", "object.fromentries": "^2.0.8", - "object.hasown": "^1.1.4", "object.values": "^1.2.0", "prop-types": "^15.8.1", "resolve": "^2.0.0-next.5", "semver": "^6.3.1", - "string.prototype.matchall": "^4.0.11" + "string.prototype.matchall": "^4.0.11", + "string.prototype.repeat": "^1.0.0" }, "engines": { "node": ">=4" }, "peerDependencies": { - "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8" + "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9.7" } }, "node_modules/eslint-plugin-react/node_modules/doctrine": { @@ -2474,6 +2916,7 @@ "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", "dev": true, + "license": "Apache-2.0", "dependencies": { "esutils": "^2.0.2" }, @@ -2486,6 +2929,7 @@ "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.5.tgz", "integrity": "sha512-U7WjGVG9sH8tvjW5SmGbQuui75FiyjAX72HX15DwBBwF9dNiQZRQAg9nnPhYy+TUnE0+VcrttuvNI8oSxZcocA==", "dev": true, + "license": "MIT", "dependencies": { "is-core-module": "^2.13.0", "path-parse": "^1.0.7", @@ -2503,6 +2947,7 @@ "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -2512,6 +2957,7 @@ "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", "dev": true, + "license": "BSD-2-Clause", "dependencies": { "esrecurse": "^4.3.0", "estraverse": "^5.2.0" @@ -2528,6 +2974,7 @@ "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz", "integrity": "sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==", "dev": true, + "license": "MIT", "dependencies": { "eslint-visitor-keys": "^2.0.0" }, @@ -2546,6 +2993,7 @@ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", "dev": true, + "license": "Apache-2.0", "engines": { "node": ">=10" } @@ -2555,6 +3003,7 @@ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", "dev": true, + "license": "Apache-2.0", "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" }, @@ -2562,11 +3011,22 @@ "url": "https://opencollective.com/eslint" } }, + "node_modules/eslint/node_modules/@eslint/js": { + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.1.tgz", + "integrity": "sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, "node_modules/eslint/node_modules/ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, + "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -2581,13 +3041,15 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", - "dev": true + "dev": true, + "license": "Python-2.0" }, "node_modules/eslint/node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, + "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -2604,6 +3066,7 @@ "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, + "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -2615,27 +3078,15 @@ "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, - "node_modules/eslint/node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", "dev": true, - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } + "license": "MIT" }, "node_modules/eslint/node_modules/escape-string-regexp": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", "dev": true, + "license": "MIT", "engines": { "node": ">=10" }, @@ -2643,23 +3094,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/eslint/node_modules/glob-parent": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", - "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", - "dev": true, - "dependencies": { - "is-glob": "^4.0.3" - }, - "engines": { - "node": ">=10.13.0" - } - }, "node_modules/eslint/node_modules/globals": { "version": "13.24.0", "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", "dev": true, + "license": "MIT", "dependencies": { "type-fest": "^0.20.2" }, @@ -2675,21 +3115,17 @@ "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, - "node_modules/eslint/node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", - "dev": true - }, "node_modules/eslint/node_modules/js-yaml": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", "dev": true, + "license": "MIT", "dependencies": { "argparse": "^2.0.1" }, @@ -2697,41 +3133,12 @@ "js-yaml": "bin/js-yaml.js" } }, - "node_modules/eslint/node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/eslint/node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/eslint/node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true, - "engines": { - "node": ">=8" - } - }, "node_modules/eslint/node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, + "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -2744,6 +3151,7 @@ "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", "dev": true, + "license": "(MIT OR CC0-1.0)", "engines": { "node": ">=10" }, @@ -2751,26 +3159,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/eslint/node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/espree": { "version": "9.6.1", "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", "dev": true, + "license": "BSD-2-Clause", "dependencies": { "acorn": "^8.9.0", "acorn-jsx": "^5.3.2", @@ -2788,6 +3182,7 @@ "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", "dev": true, + "license": "BSD-2-Clause", "bin": { "esparse": "bin/esparse.js", "esvalidate": "bin/esvalidate.js" @@ -2797,10 +3192,11 @@ } }, "node_modules/esquery": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", - "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", + "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", "dev": true, + "license": "BSD-3-Clause", "dependencies": { "estraverse": "^5.1.0" }, @@ -2813,6 +3209,7 @@ "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", "dev": true, + "license": "BSD-2-Clause", "dependencies": { "estraverse": "^5.2.0" }, @@ -2825,6 +3222,7 @@ "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", "dev": true, + "license": "BSD-2-Clause", "engines": { "node": ">=4.0" } @@ -2834,6 +3232,7 @@ "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", "dev": true, + "license": "BSD-2-Clause", "engines": { "node": ">=0.10.0" } @@ -2841,31 +3240,64 @@ "node_modules/exponential-backoff": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/exponential-backoff/-/exponential-backoff-3.1.1.tgz", - "integrity": "sha512-dX7e/LHVJ6W3DE1MHWi9S1EYzDESENfLrYohG2G++ovZrYOkm4Knwa0mc1cn84xJOR4KEU0WSchhLbd0UklbHw==" + "integrity": "sha512-dX7e/LHVJ6W3DE1MHWi9S1EYzDESENfLrYohG2G++ovZrYOkm4Knwa0mc1cn84xJOR4KEU0WSchhLbd0UklbHw==", + "license": "Apache-2.0" }, "node_modules/fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "dev": true + "dev": true, + "license": "MIT" + }, + "node_modules/fast-glob": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", + "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } }, "node_modules/fast-json-stable-stringify": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/fast-levenshtein": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/fast-url-parser": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/fast-url-parser/-/fast-url-parser-1.1.3.tgz", "integrity": "sha512-5jOCVXADYNuRkKFzNJ0dCCewsZiYo0dz8QNYljkOpFC6r2U4OBmKtvm/Tsuh4w1YYdDqDb31a8TVhBJ2OJKdqQ==", "dev": true, + "license": "MIT", "dependencies": { "punycode": "^1.3.2" } @@ -2874,13 +3306,15 @@ "version": "1.4.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", "integrity": "sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/fastq": { "version": "1.17.1", "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz", "integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==", "dev": true, + "license": "ISC", "dependencies": { "reusify": "^1.0.4" } @@ -2890,6 +3324,7 @@ "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", "dev": true, + "license": "MIT", "dependencies": { "flat-cache": "^3.0.4" }, @@ -2900,13 +3335,15 @@ "node_modules/file-uri-to-path": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", - "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==" + "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==", + "license": "MIT" }, "node_modules/fill-range": { "version": "7.1.1", "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", "dev": true, + "license": "MIT", "dependencies": { "to-regex-range": "^5.0.1" }, @@ -2919,6 +3356,7 @@ "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.2.tgz", "integrity": "sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig==", "dev": true, + "license": "MIT", "dependencies": { "commondir": "^1.0.1", "make-dir": "^3.0.2", @@ -2936,6 +3374,7 @@ "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", "dev": true, + "license": "MIT", "dependencies": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" @@ -2952,6 +3391,7 @@ "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", "dev": true, + "license": "BSD-3-Clause", "bin": { "flat": "cli.js" } @@ -2961,6 +3401,7 @@ "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz", "integrity": "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==", "dev": true, + "license": "MIT", "dependencies": { "flatted": "^3.2.9", "keyv": "^4.5.3", @@ -2974,12 +3415,14 @@ "version": "3.3.1", "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.1.tgz", "integrity": "sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/for-each": { "version": "0.3.3", "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", + "license": "MIT", "dependencies": { "is-callable": "^1.1.3" } @@ -2989,6 +3432,7 @@ "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz", "integrity": "sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==", "dev": true, + "license": "ISC", "dependencies": { "cross-spawn": "^7.0.0", "signal-exit": "^3.0.2" @@ -2997,71 +3441,6 @@ "node": ">=8.0.0" } }, - "node_modules/foreground-child/node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dev": true, - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/foreground-child/node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", - "dev": true - }, - "node_modules/foreground-child/node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/foreground-child/node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/foreground-child/node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/foreground-child/node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/fromentries": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/fromentries/-/fromentries-1.3.2.tgz", @@ -3080,12 +3459,14 @@ "type": "consulting", "url": "https://feross.org/support" } - ] + ], + "license": "MIT" }, "node_modules/fs-minipass": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-3.0.3.tgz", "integrity": "sha512-XUBA9XClHbnJWSfBzjkm6RvPsyg3sryZt06BEQoXcF7EK/xpGaQYJgQKDJSUH5SGZ76Y7pFx1QBnXz09rU5Fbw==", + "license": "ISC", "dependencies": { "minipass": "^7.0.3" }, @@ -3096,7 +3477,9 @@ "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true, + "license": "ISC" }, "node_modules/fsevents": { "version": "2.3.3", @@ -3104,6 +3487,7 @@ "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", "dev": true, "hasInstallScript": true, + "license": "MIT", "optional": true, "os": [ "darwin" @@ -3116,6 +3500,7 @@ "version": "1.1.2", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -3124,6 +3509,7 @@ "version": "1.1.6", "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.6.tgz", "integrity": "sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg==", + "license": "MIT", "dependencies": { "call-bind": "^1.0.2", "define-properties": "^1.2.0", @@ -3141,6 +3527,7 @@ "version": "1.2.3", "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", + "license": "MIT", "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -3150,6 +3537,8 @@ "resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz", "integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==", "deprecated": "This package is no longer supported.", + "dev": true, + "license": "ISC", "dependencies": { "aproba": "^1.0.3 || ^2.0.0", "color-support": "^1.1.2", @@ -3170,6 +3559,7 @@ "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.9.0" } @@ -3179,6 +3569,7 @@ "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", "dev": true, + "license": "ISC", "engines": { "node": "6.* || 8.* || >= 10.*" } @@ -3188,6 +3579,7 @@ "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.2.tgz", "integrity": "sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ==", "dev": true, + "license": "MIT", "engines": { "node": "*" } @@ -3196,6 +3588,7 @@ "version": "1.2.4", "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", + "license": "MIT", "dependencies": { "es-errors": "^1.3.0", "function-bind": "^1.1.2", @@ -3215,6 +3608,7 @@ "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", "dev": true, + "license": "MIT", "engines": { "node": ">=8.0.0" } @@ -3224,6 +3618,7 @@ "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-8.0.0.tgz", "integrity": "sha512-sY22aA6xchAzprjyqmSEQv4UbAAzRN0L2dQB0NlN5acTTK9Don6nhoc3eAbUnpZiCANAMfd/+40kVdKfFygohg==", "dev": true, + "license": "MIT", "engines": { "node": ">=10" }, @@ -3235,6 +3630,7 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.2.tgz", "integrity": "sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==", + "license": "MIT", "dependencies": { "call-bind": "^1.0.5", "es-errors": "^1.3.0", @@ -3253,6 +3649,7 @@ "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", "deprecated": "Glob versions prior to v9 are no longer supported", "dev": true, + "license": "ISC", "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", @@ -3268,15 +3665,16 @@ } }, "node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", "dev": true, + "license": "ISC", "dependencies": { - "is-glob": "^4.0.1" + "is-glob": "^4.0.3" }, "engines": { - "node": ">= 6" + "node": ">=10.13.0" } }, "node_modules/glob/node_modules/brace-expansion": { @@ -3284,6 +3682,7 @@ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", "dev": true, + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0" } @@ -3293,6 +3692,7 @@ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", "dev": true, + "license": "ISC", "dependencies": { "brace-expansion": "^2.0.1" }, @@ -3301,18 +3701,22 @@ } }, "node_modules/globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "version": "15.11.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-15.11.0.tgz", + "integrity": "sha512-yeyNSjdbyVaWurlwCpcA6XNBrHTMIeDdj0/hnvX/OLJ9ekOXYbLsLinH/MucQyGvNnXhidTdNhTtJaffL2sMfw==", "dev": true, "engines": { - "node": ">=4" + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/globalthis": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz", "integrity": "sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==", + "license": "MIT", "dependencies": { "define-properties": "^1.2.1", "gopd": "^1.0.1" @@ -3328,6 +3732,7 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", + "license": "MIT", "dependencies": { "get-intrinsic": "^1.1.3" }, @@ -3338,18 +3743,21 @@ "node_modules/graceful-fs": { "version": "4.2.11", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", - "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "license": "ISC" }, "node_modules/graphemer": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/has-bigints": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==", + "license": "MIT", "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -3358,6 +3766,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "license": "MIT", "engines": { "node": ">=4" } @@ -3366,6 +3775,7 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", + "license": "MIT", "dependencies": { "es-define-property": "^1.0.0" }, @@ -3377,6 +3787,7 @@ "version": "1.0.3", "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz", "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==", + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -3388,6 +3799,7 @@ "version": "1.0.3", "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -3399,6 +3811,7 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "license": "MIT", "dependencies": { "has-symbols": "^1.0.3" }, @@ -3412,13 +3825,16 @@ "node_modules/has-unicode": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", - "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==" + "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==", + "dev": true, + "license": "ISC" }, "node_modules/hasha": { "version": "5.2.2", "resolved": "https://registry.npmjs.org/hasha/-/hasha-5.2.2.tgz", "integrity": "sha512-Hrp5vIK/xr5SkeN2onO32H0MgNZ0f17HRNH39WfL0SYUNOTZ5Lz1TJ8Pajo/87dYGEFlLMm7mIc/k/s6Bvz9HQ==", "dev": true, + "license": "MIT", "dependencies": { "is-stream": "^2.0.0", "type-fest": "^0.8.0" @@ -3434,6 +3850,7 @@ "version": "2.0.2", "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", "dependencies": { "function-bind": "^1.1.2" }, @@ -3441,11 +3858,48 @@ "node": ">= 0.4" } }, + "node_modules/hast-util-to-html": { + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/hast-util-to-html/-/hast-util-to-html-9.0.3.tgz", + "integrity": "sha512-M17uBDzMJ9RPCqLMO92gNNUDuBSq10a25SDBI08iCCxmorf4Yy6sYHK57n9WAbRAAaU+DuR4W6GN9K4DFZesYg==", + "dev": true, + "dependencies": { + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "ccount": "^2.0.0", + "comma-separated-tokens": "^2.0.0", + "hast-util-whitespace": "^3.0.0", + "html-void-elements": "^3.0.0", + "mdast-util-to-hast": "^13.0.0", + "property-information": "^6.0.0", + "space-separated-tokens": "^2.0.0", + "stringify-entities": "^4.0.0", + "zwitch": "^2.0.4" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-whitespace": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-3.0.0.tgz", + "integrity": "sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw==", + "dev": true, + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/he": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", "dev": true, + "license": "MIT", "bin": { "he": "bin/he" } @@ -3453,23 +3907,37 @@ "node_modules/hosted-git-info": { "version": "2.8.9", "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", - "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==" + "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", + "license": "ISC" }, "node_modules/html-escaper": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", - "dev": true + "dev": true, + "license": "MIT" + }, + "node_modules/html-void-elements": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-3.0.0.tgz", + "integrity": "sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==", + "dev": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } }, "node_modules/http-cache-semantics": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz", - "integrity": "sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==" + "integrity": "sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==", + "license": "BSD-2-Clause" }, "node_modules/http-proxy-agent": { "version": "7.0.2", "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz", "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==", + "license": "MIT", "dependencies": { "agent-base": "^7.1.0", "debug": "^4.3.4" @@ -3482,6 +3950,7 @@ "version": "7.1.1", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.1.tgz", "integrity": "sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==", + "license": "MIT", "dependencies": { "debug": "^4.3.4" }, @@ -3493,6 +3962,8 @@ "version": "5.0.1", "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "dev": true, + "license": "MIT", "dependencies": { "agent-base": "6", "debug": "4" @@ -3502,12 +3973,13 @@ } }, "node_modules/husky": { - "version": "9.0.11", - "resolved": "https://registry.npmjs.org/husky/-/husky-9.0.11.tgz", - "integrity": "sha512-AB6lFlbwwyIqMdHYhwPe+kjOC3Oc5P3nThEoW/AaO2BX3vJDjWPFxYLxokUZOo6RNX20He3AaT8sESs9NJcmEw==", + "version": "9.1.6", + "resolved": "https://registry.npmjs.org/husky/-/husky-9.1.6.tgz", + "integrity": "sha512-sqbjZKK7kf44hfdE94EoX8MZNk0n7HeW37O4YrVGCF4wzgQjp+akPAkfUK5LZ6KuR/6sqeAVuXHji+RzQgOn5A==", "dev": true, + "license": "MIT", "bin": { - "husky": "bin.mjs" + "husky": "bin.js" }, "engines": { "node": ">=18" @@ -3520,6 +3992,7 @@ "version": "0.6.3", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "license": "MIT", "optional": true, "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" @@ -3529,10 +4002,11 @@ } }, "node_modules/ignore": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.1.tgz", - "integrity": "sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==", + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", "dev": true, + "license": "MIT", "engines": { "node": ">= 4" } @@ -3542,6 +4016,7 @@ "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-3.0.4.tgz", "integrity": "sha512-PY6Ii8o1jMRA1z4F2hRkH/xN59ox43DavKvD3oDpfurRlOJyAHpifIwpbdv1n4jt4ov0jSpw3kQ4GhJnpBL6WQ==", "dev": true, + "license": "ISC", "dependencies": { "minimatch": "^3.0.4" } @@ -3551,6 +4026,7 @@ "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", "dev": true, + "license": "MIT", "dependencies": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" @@ -3562,19 +4038,11 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/import-fresh/node_modules/resolve-from": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", - "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", - "dev": true, - "engines": { - "node": ">=4" - } - }, "node_modules/imurmurhash": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "license": "MIT", "engines": { "node": ">=0.8.19" } @@ -3583,6 +4051,7 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "license": "MIT", "engines": { "node": ">=8" } @@ -3592,6 +4061,8 @@ "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "dev": true, + "license": "ISC", "dependencies": { "once": "^1.3.0", "wrappy": "1" @@ -3600,12 +4071,15 @@ "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true, + "license": "ISC" }, "node_modules/internal-slot": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.7.tgz", "integrity": "sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==", + "license": "MIT", "dependencies": { "es-errors": "^1.3.0", "hasown": "^2.0.0", @@ -3619,6 +4093,7 @@ "version": "9.0.5", "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-9.0.5.tgz", "integrity": "sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==", + "license": "MIT", "dependencies": { "jsbn": "1.1.0", "sprintf-js": "^1.1.3" @@ -3630,12 +4105,14 @@ "node_modules/ip-address/node_modules/sprintf-js": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.3.tgz", - "integrity": "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==" + "integrity": "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==", + "license": "BSD-3-Clause" }, "node_modules/is-array-buffer": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.4.tgz", "integrity": "sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw==", + "license": "MIT", "dependencies": { "call-bind": "^1.0.2", "get-intrinsic": "^1.2.1" @@ -3650,13 +4127,15 @@ "node_modules/is-arrayish": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==" + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "license": "MIT" }, "node_modules/is-async-function": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-async-function/-/is-async-function-2.0.0.tgz", "integrity": "sha512-Y1JXKrfykRJGdlDwdKlLpLyMIiWqWvuSd17TvZk68PLAOGOoF4Xyav1z0Xhoi+gCYjZVeC5SI+hYFOfvXmGRCA==", "dev": true, + "license": "MIT", "dependencies": { "has-tostringtag": "^1.0.0" }, @@ -3671,6 +4150,7 @@ "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", + "license": "MIT", "dependencies": { "has-bigints": "^1.0.1" }, @@ -3683,6 +4163,7 @@ "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", "dev": true, + "license": "MIT", "dependencies": { "binary-extensions": "^2.0.0" }, @@ -3694,6 +4175,7 @@ "version": "1.1.2", "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", + "license": "MIT", "dependencies": { "call-bind": "^1.0.2", "has-tostringtag": "^1.0.0" @@ -3709,6 +4191,7 @@ "version": "1.2.7", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -3717,11 +4200,15 @@ } }, "node_modules/is-core-module": { - "version": "2.13.1", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.1.tgz", - "integrity": "sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==", + "version": "2.15.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.15.1.tgz", + "integrity": "sha512-z0vtXSwucUJtANQWldhbtbt7BnL0vxiFjIdDLAatwhDYty2bad6s+rijD6Ri4YuYJubLzIJLUidCh09e1djEVQ==", + "license": "MIT", "dependencies": { - "hasown": "^2.0.0" + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -3731,6 +4218,7 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.1.tgz", "integrity": "sha512-AHkaJrsUVW6wq6JS8y3JnM/GJF/9cf+k20+iDzlSaJrinEo5+7vRiteOSwBhHRiAyQATN1AmY4hwzxJKPmYf+w==", + "license": "MIT", "dependencies": { "is-typed-array": "^1.1.13" }, @@ -3745,6 +4233,7 @@ "version": "1.0.5", "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", + "license": "MIT", "dependencies": { "has-tostringtag": "^1.0.0" }, @@ -3760,6 +4249,7 @@ "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } @@ -3769,6 +4259,7 @@ "resolved": "https://registry.npmjs.org/is-finalizationregistry/-/is-finalizationregistry-1.0.2.tgz", "integrity": "sha512-0by5vtUJs8iFQb5TYUHHPudOR+qXYIMKtiUzvLIZITZUjknFmziyBJuLhVRc+Ds0dREFlskDNJKYIdIzu/9pfw==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.2" }, @@ -3780,6 +4271,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "license": "MIT", "engines": { "node": ">=8" } @@ -3789,6 +4281,7 @@ "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.0.10.tgz", "integrity": "sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==", "dev": true, + "license": "MIT", "dependencies": { "has-tostringtag": "^1.0.0" }, @@ -3804,6 +4297,7 @@ "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", "dev": true, + "license": "MIT", "dependencies": { "is-extglob": "^2.1.1" }, @@ -3814,13 +4308,15 @@ "node_modules/is-lambda": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/is-lambda/-/is-lambda-1.0.1.tgz", - "integrity": "sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==" + "integrity": "sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==", + "license": "MIT" }, "node_modules/is-map": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz", "integrity": "sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -3832,6 +4328,7 @@ "version": "2.0.3", "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz", "integrity": "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==", + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -3844,6 +4341,7 @@ "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.12.0" } @@ -3852,6 +4350,7 @@ "version": "1.0.7", "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", + "license": "MIT", "dependencies": { "has-tostringtag": "^1.0.0" }, @@ -3867,6 +4366,7 @@ "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } @@ -3876,6 +4376,7 @@ "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } @@ -3884,6 +4385,7 @@ "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", + "license": "MIT", "dependencies": { "call-bind": "^1.0.2", "has-tostringtag": "^1.0.0" @@ -3900,6 +4402,7 @@ "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.3.tgz", "integrity": "sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -3911,6 +4414,7 @@ "version": "1.0.3", "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.3.tgz", "integrity": "sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==", + "license": "MIT", "dependencies": { "call-bind": "^1.0.7" }, @@ -3926,6 +4430,7 @@ "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" }, @@ -3937,6 +4442,7 @@ "version": "1.0.7", "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", + "license": "MIT", "dependencies": { "has-tostringtag": "^1.0.0" }, @@ -3951,6 +4457,7 @@ "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", + "license": "MIT", "dependencies": { "has-symbols": "^1.0.2" }, @@ -3965,6 +4472,7 @@ "version": "1.1.13", "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.13.tgz", "integrity": "sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==", + "license": "MIT", "dependencies": { "which-typed-array": "^1.1.14" }, @@ -3979,13 +4487,15 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/is-unicode-supported": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", "dev": true, + "license": "MIT", "engines": { "node": ">=10" }, @@ -3998,6 +4508,7 @@ "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.2.tgz", "integrity": "sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -4009,6 +4520,7 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", + "license": "MIT", "dependencies": { "call-bind": "^1.0.2" }, @@ -4021,6 +4533,7 @@ "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.3.tgz", "integrity": "sha512-LvIm3/KWzS9oRFHugab7d+M/GcBXuXX5xZkzPmN+NxihdQlZUQ4dWuSV1xR/sq6upL1TJEDrfBgRepHFdBtSNQ==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "get-intrinsic": "^1.2.4" @@ -4037,6 +4550,7 @@ "resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz", "integrity": "sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } @@ -4044,21 +4558,21 @@ "node_modules/isarray": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", - "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==" + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", + "license": "MIT" }, "node_modules/isexe": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz", - "integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==", - "engines": { - "node": ">=16" - } + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "license": "ISC" }, "node_modules/istanbul-lib-coverage": { "version": "3.2.2", "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==", "dev": true, + "license": "BSD-3-Clause", "engines": { "node": ">=8" } @@ -4068,6 +4582,7 @@ "resolved": "https://registry.npmjs.org/istanbul-lib-hook/-/istanbul-lib-hook-3.0.0.tgz", "integrity": "sha512-Pt/uge1Q9s+5VAZ+pCo16TYMWPBIl+oaNIjgLQxcX0itS6ueeaA+pEfThZpH8WxhFgCiEb8sAJY6MdUKgiIWaQ==", "dev": true, + "license": "BSD-3-Clause", "dependencies": { "append-transform": "^2.0.0" }, @@ -4080,6 +4595,7 @@ "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.3.tgz", "integrity": "sha512-BXgQl9kf4WTCPCCpmFGoJkz/+uhvm7h7PFKUYxh7qarQd3ER33vHG//qaE8eN25l07YqZPpHXU9I09l/RD5aGQ==", "dev": true, + "license": "BSD-3-Clause", "dependencies": { "@babel/core": "^7.7.5", "@istanbuljs/schema": "^0.1.2", @@ -4095,6 +4611,7 @@ "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -4104,6 +4621,7 @@ "resolved": "https://registry.npmjs.org/istanbul-lib-processinfo/-/istanbul-lib-processinfo-2.0.3.tgz", "integrity": "sha512-NkwHbo3E00oybX6NGJi6ar0B29vxyvNwoC7eJ4G4Yq28UfY758Hgn/heV8VRFhevPED4LXfFz0DQ8z/0kw9zMg==", "dev": true, + "license": "ISC", "dependencies": { "archy": "^1.0.0", "cross-spawn": "^7.0.3", @@ -4116,31 +4634,12 @@ "node": ">=8" } }, - "node_modules/istanbul-lib-processinfo/node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dev": true, - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/istanbul-lib-processinfo/node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", - "dev": true - }, "node_modules/istanbul-lib-processinfo/node_modules/p-map": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz", "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==", "dev": true, + "license": "MIT", "dependencies": { "aggregate-error": "^3.0.0" }, @@ -4148,56 +4647,12 @@ "node": ">=8" } }, - "node_modules/istanbul-lib-processinfo/node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/istanbul-lib-processinfo/node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/istanbul-lib-processinfo/node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/istanbul-lib-processinfo/node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/istanbul-lib-report": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz", "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==", "dev": true, + "license": "BSD-3-Clause", "dependencies": { "istanbul-lib-coverage": "^3.0.0", "make-dir": "^4.0.0", @@ -4212,6 +4667,7 @@ "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } @@ -4221,6 +4677,7 @@ "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==", "dev": true, + "license": "MIT", "dependencies": { "semver": "^7.5.3" }, @@ -4236,6 +4693,7 @@ "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, + "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -4248,6 +4706,7 @@ "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz", "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==", "dev": true, + "license": "BSD-3-Clause", "dependencies": { "debug": "^4.1.1", "istanbul-lib-coverage": "^3.0.0", @@ -4262,6 +4721,7 @@ "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.7.tgz", "integrity": "sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g==", "dev": true, + "license": "BSD-3-Clause", "dependencies": { "html-escaper": "^2.0.0", "istanbul-lib-report": "^3.0.0" @@ -4275,6 +4735,7 @@ "resolved": "https://registry.npmjs.org/iterator.prototype/-/iterator.prototype-1.1.2.tgz", "integrity": "sha512-DR33HMMr8EzwuRL8Y9D3u2BMj8+RqSE850jfGu59kS7tbmPLzGkZmVSfyCFSDxuZiEY6Rzt3T2NA/qU+NwVj1w==", "dev": true, + "license": "MIT", "dependencies": { "define-properties": "^1.2.1", "get-intrinsic": "^1.2.1", @@ -4284,15 +4745,13 @@ } }, "node_modules/jackspeak": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.1.2.tgz", - "integrity": "sha512-kWmLKn2tRtfYMF/BakihVVRzBKOxz4gJMiL2Rj91WnAB5TPZumSH99R/Yf1qE1u4uRimvCSJfm6hnxohXeEXjQ==", + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz", + "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==", + "license": "BlueOak-1.0.0", "dependencies": { "@isaacs/cliui": "^8.0.2" }, - "engines": { - "node": ">=14" - }, "funding": { "url": "https://github.com/sponsors/isaacs" }, @@ -4304,13 +4763,15 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/js-yaml": { "version": "3.14.1", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", "dev": true, + "license": "MIT", "dependencies": { "argparse": "^1.0.7", "esprima": "^4.0.0" @@ -4322,13 +4783,15 @@ "node_modules/jsbn": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-1.1.0.tgz", - "integrity": "sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A==" + "integrity": "sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A==", + "license": "MIT" }, "node_modules/jsesc": { "version": "2.5.2", "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", "dev": true, + "license": "MIT", "bin": { "jsesc": "bin/jsesc" }, @@ -4340,30 +4803,35 @@ "version": "3.0.1", "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/json-parse-better-errors": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", - "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==" + "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", + "license": "MIT" }, "node_modules/json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/json-stable-stringify-without-jsonify": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/json5": { "version": "2.2.3", "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", "dev": true, + "license": "MIT", "bin": { "json5": "lib/cli.js" }, @@ -4376,6 +4844,7 @@ "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.5.tgz", "integrity": "sha512-ZZow9HBI5O6EPgSJLUb8n2NKgmVWTwCvHGwFuJlMjvLFqlGG6pjirPhtdsseaLZjSibD8eegzmYpUZwoIlj2cQ==", "dev": true, + "license": "MIT", "dependencies": { "array-includes": "^3.1.6", "array.prototype.flat": "^1.3.1", @@ -4391,6 +4860,7 @@ "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", "dev": true, + "license": "MIT", "dependencies": { "json-buffer": "3.0.1" } @@ -4400,6 +4870,7 @@ "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", "dev": true, + "license": "MIT", "dependencies": { "prelude-ls": "^1.2.1", "type-check": "~0.4.0" @@ -4408,10 +4879,20 @@ "node": ">= 0.8.0" } }, + "node_modules/linkify-it": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-5.0.0.tgz", + "integrity": "sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==", + "dev": true, + "dependencies": { + "uc.micro": "^2.0.0" + } + }, "node_modules/load-json-file": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", "integrity": "sha512-Kx8hMakjX03tiGTLAIdJ+lL0htKnXjEZN6hk/tozf/WOuYGdZBJrZ+rCJRbVCugsjB3jMLn9746NsQIf5VjBMw==", + "license": "MIT", "dependencies": { "graceful-fs": "^4.1.2", "parse-json": "^4.0.0", @@ -4426,6 +4907,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", + "license": "MIT", "engines": { "node": ">=4" } @@ -4435,6 +4917,7 @@ "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", "dev": true, + "license": "MIT", "dependencies": { "p-locate": "^5.0.0" }, @@ -4449,19 +4932,22 @@ "version": "4.4.0", "resolved": "https://registry.npmjs.org/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz", "integrity": "sha512-uHaJFihxmJcEX3kT4I23ABqKKalJ/zDrDg0lsFtc1h+3uw49SIJ5beyhx5ExVRti3AvKoOJngIj7xz3oylPdWQ==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/lodash.merge": { "version": "4.6.2", "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/log-symbols": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", "dev": true, + "license": "MIT", "dependencies": { "chalk": "^4.1.0", "is-unicode-supported": "^0.1.0" @@ -4478,6 +4964,7 @@ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, + "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -4493,6 +4980,7 @@ "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, + "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -4509,6 +4997,7 @@ "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, + "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -4520,13 +5009,15 @@ "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/log-symbols/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } @@ -4536,6 +5027,7 @@ "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, + "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -4548,6 +5040,7 @@ "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", "dev": true, + "license": "MIT", "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" }, @@ -4560,22 +5053,29 @@ "resolved": "https://registry.npmjs.org/loupe/-/loupe-2.3.7.tgz", "integrity": "sha512-zSMINGVYkdpYSOBmLi0D1Uo7JU9nVdQKrHxC8eYlV+9YKK9WePqAlL7lSlorG/U2Fw1w0hTBmaa/jrQ3UbPHtA==", "dev": true, + "license": "MIT", "dependencies": { "get-func-name": "^2.0.1" } }, "node_modules/lru-cache": { - "version": "10.2.2", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.2.tgz", - "integrity": "sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==", - "engines": { - "node": "14 || >=16.14" - } + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", + "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "license": "ISC" + }, + "node_modules/lunr": { + "version": "2.3.9", + "resolved": "https://registry.npmjs.org/lunr/-/lunr-2.3.9.tgz", + "integrity": "sha512-zTU3DaZaF3Rt9rhN3uBMGQD3dD2/vFQqnvZCDv4dl5iOzq2IZQqTxu90r4E5J+nP70J3ilqVCrbho2eWaeW8Ow==", + "dev": true }, "node_modules/make-dir": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "dev": true, + "license": "MIT", "dependencies": { "semver": "^6.0.0" }, @@ -4590,6 +5090,8 @@ "version": "6.3.1", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -4598,6 +5100,7 @@ "version": "13.0.1", "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-13.0.1.tgz", "integrity": "sha512-cKTUFc/rbKUd/9meOvgrpJ2WrNzymt6jfRDdwg5UCnVzv9dTpEj9JS5m3wtziXVCjluIXyL8pcaukYqezIzZQA==", + "license": "ISC", "dependencies": { "@npmcli/agent": "^2.0.0", "cacache": "^18.0.0", @@ -4616,14 +5119,56 @@ "node": "^16.14.0 || >=18.0.0" } }, - "node_modules/make-fetch-happen/node_modules/proc-log": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz", - "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node_modules/markdown-it": { + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-14.1.0.tgz", + "integrity": "sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1", + "entities": "^4.4.0", + "linkify-it": "^5.0.0", + "mdurl": "^2.0.0", + "punycode.js": "^2.3.1", + "uc.micro": "^2.1.0" + }, + "bin": { + "markdown-it": "bin/markdown-it.mjs" + } + }, + "node_modules/markdown-it/node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "node_modules/mdast-util-to-hast": { + "version": "13.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.0.tgz", + "integrity": "sha512-QGYKEuUsYT9ykKBCMOEDLsU5JRObWQusAolFMeko/tYPufNkRffBAQjIE+99jbA87xv6FgmjLtwjh9wBWajwAA==", + "dev": true, + "dependencies": { + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "@ungap/structured-clone": "^1.0.0", + "devlop": "^1.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "trim-lines": "^3.0.0", + "unist-util-position": "^5.0.0", + "unist-util-visit": "^5.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, + "node_modules/mdurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz", + "integrity": "sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==", + "dev": true + }, "node_modules/memorystream": { "version": "0.3.1", "resolved": "https://registry.npmjs.org/memorystream/-/memorystream-0.3.1.tgz", @@ -4632,10 +5177,122 @@ "node": ">= 0.10.0" } }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromark-util-character": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz", + "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-encode": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.0.tgz", + "integrity": "sha512-pS+ROfCXAGLWCOc8egcBvT0kf27GoWMqtdarNfDcjb6YLuV5cM3ioG45Ys2qOVqeqSbjaKg72vU+Wby3eddPsA==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromark-util-sanitize-uri": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.0.tgz", + "integrity": "sha512-WhYv5UEcZrbAtlsnPuChHUAsu/iBPOVaEVsntLBIdpibO0ddy8OzavZz3iL2xVvBZOpolujSliP65Kq0/7KIYw==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-encode": "^2.0.0", + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-symbol": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.0.tgz", + "integrity": "sha512-8JZt9ElZ5kyTnO94muPxIGS8oyElRJaiJO8EzV6ZSyGQ1Is8xwl4Q45qU5UOg+bGH4AikWziz0iN4sFLWs8PGw==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromark-util-types": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.0.tgz", + "integrity": "sha512-oNh6S2WMHWRZrmutsRmDDfkzKtxF+bc2VxLC9dvtrDIRFln627VsFP6fLMgTryGDljgLPjkrzQSDcPrjPyDJ5w==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, "node_modules/minimatch": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "license": "ISC", "dependencies": { "brace-expansion": "^1.1.7" }, @@ -4648,6 +5305,7 @@ "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", "dev": true, + "license": "MIT", "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -4656,6 +5314,7 @@ "version": "7.1.2", "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "license": "ISC", "engines": { "node": ">=16 || 14 >=14.17" } @@ -4664,6 +5323,7 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/minipass-collect/-/minipass-collect-2.0.1.tgz", "integrity": "sha512-D7V8PO9oaz7PWGLbCACuI1qEOsq7UKfLotx/C0Aet43fCUB/wfQ7DYeq2oR/svFJGYDHPr38SHATeaj/ZoKHKw==", + "license": "ISC", "dependencies": { "minipass": "^7.0.3" }, @@ -4675,6 +5335,7 @@ "version": "3.0.5", "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-3.0.5.tgz", "integrity": "sha512-2N8elDQAtSnFV0Dk7gt15KHsS0Fyz6CbYZ360h0WTYV1Ty46li3rAXVOQj1THMNLdmrD9Vt5pBPtWtVkpwGBqg==", + "license": "MIT", "dependencies": { "minipass": "^7.0.3", "minipass-sized": "^1.0.3", @@ -4691,6 +5352,7 @@ "version": "1.0.5", "resolved": "https://registry.npmjs.org/minipass-flush/-/minipass-flush-1.0.5.tgz", "integrity": "sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==", + "license": "ISC", "dependencies": { "minipass": "^3.0.0" }, @@ -4702,6 +5364,7 @@ "version": "3.3.6", "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "license": "ISC", "dependencies": { "yallist": "^4.0.0" }, @@ -4713,6 +5376,7 @@ "version": "1.2.4", "resolved": "https://registry.npmjs.org/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz", "integrity": "sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==", + "license": "ISC", "dependencies": { "minipass": "^3.0.0" }, @@ -4724,6 +5388,7 @@ "version": "3.3.6", "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "license": "ISC", "dependencies": { "yallist": "^4.0.0" }, @@ -4735,6 +5400,7 @@ "version": "1.0.3", "resolved": "https://registry.npmjs.org/minipass-sized/-/minipass-sized-1.0.3.tgz", "integrity": "sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==", + "license": "ISC", "dependencies": { "minipass": "^3.0.0" }, @@ -4746,6 +5412,7 @@ "version": "3.3.6", "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "license": "ISC", "dependencies": { "yallist": "^4.0.0" }, @@ -4757,6 +5424,7 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", + "license": "MIT", "dependencies": { "minipass": "^3.0.0", "yallist": "^4.0.0" @@ -4769,6 +5437,7 @@ "version": "3.3.6", "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "license": "ISC", "dependencies": { "yallist": "^4.0.0" }, @@ -4780,6 +5449,7 @@ "version": "1.0.4", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "license": "MIT", "bin": { "mkdirp": "bin/cmd.js" }, @@ -4788,31 +5458,32 @@ } }, "node_modules/mocha": { - "version": "10.4.0", - "resolved": "https://registry.npmjs.org/mocha/-/mocha-10.4.0.tgz", - "integrity": "sha512-eqhGB8JKapEYcC4ytX/xrzKforgEc3j1pGlAXVy3eRwrtAy5/nIfT1SvgGzfN0XZZxeLq0aQWkOUAmqIJiv+bA==", - "dev": true, - "dependencies": { - "ansi-colors": "4.1.1", - "browser-stdout": "1.3.1", - "chokidar": "3.5.3", - "debug": "4.3.4", - "diff": "5.0.0", - "escape-string-regexp": "4.0.0", - "find-up": "5.0.0", - "glob": "8.1.0", - "he": "1.2.0", - "js-yaml": "4.1.0", - "log-symbols": "4.1.0", - "minimatch": "5.0.1", - "ms": "2.1.3", - "serialize-javascript": "6.0.0", - "strip-json-comments": "3.1.1", - "supports-color": "8.1.1", - "workerpool": "6.2.1", - "yargs": "16.2.0", - "yargs-parser": "20.2.4", - "yargs-unparser": "2.0.0" + "version": "10.7.3", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-10.7.3.tgz", + "integrity": "sha512-uQWxAu44wwiACGqjbPYmjo7Lg8sFrS3dQe7PP2FQI+woptP4vZXSMcfMyFL/e1yFEeEpV4RtyTpZROOKmxis+A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-colors": "^4.1.3", + "browser-stdout": "^1.3.1", + "chokidar": "^3.5.3", + "debug": "^4.3.5", + "diff": "^5.2.0", + "escape-string-regexp": "^4.0.0", + "find-up": "^5.0.0", + "glob": "^8.1.0", + "he": "^1.2.0", + "js-yaml": "^4.1.0", + "log-symbols": "^4.1.0", + "minimatch": "^5.1.6", + "ms": "^2.1.3", + "serialize-javascript": "^6.0.2", + "strip-json-comments": "^3.1.1", + "supports-color": "^8.1.1", + "workerpool": "^6.5.1", + "yargs": "^16.2.0", + "yargs-parser": "^20.2.9", + "yargs-unparser": "^2.0.0" }, "bin": { "_mocha": "bin/_mocha", @@ -4826,19 +5497,22 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/mocha-clean/-/mocha-clean-1.0.0.tgz", "integrity": "sha512-1GFhy+5nkz6lle/fpoTvgiXnObhpzy7VZYkg+zfmHf2Dewu99uCJ6ycVXa5UjFlhMBGE0CvdIot9yifb85g2gw==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/mocha/node_modules/argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", - "dev": true + "dev": true, + "license": "Python-2.0" }, "node_modules/mocha/node_modules/brace-expansion": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", "dev": true, + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0" } @@ -4848,40 +5522,19 @@ "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", "dev": true, + "license": "ISC", "dependencies": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.0", - "wrap-ansi": "^7.0.0" - } - }, - "node_modules/mocha/node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "dev": true, - "dependencies": { - "ms": "2.1.2" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" } }, - "node_modules/mocha/node_modules/debug/node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true - }, "node_modules/mocha/node_modules/escape-string-regexp": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", "dev": true, + "license": "MIT", "engines": { "node": ">=10" }, @@ -4894,6 +5547,7 @@ "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } @@ -4903,6 +5557,7 @@ "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", "dev": true, + "license": "MIT", "dependencies": { "argparse": "^2.0.1" }, @@ -4911,10 +5566,11 @@ } }, "node_modules/mocha/node_modules/minimatch": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.0.1.tgz", - "integrity": "sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g==", + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", "dev": true, + "license": "ISC", "dependencies": { "brace-expansion": "^2.0.1" }, @@ -4922,17 +5578,12 @@ "node": ">=10" } }, - "node_modules/mocha/node_modules/ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "dev": true - }, "node_modules/mocha/node_modules/supports-color": { "version": "8.1.1", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", "dev": true, + "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -4948,6 +5599,7 @@ "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", "dev": true, + "license": "MIT", "dependencies": { "cliui": "^7.0.2", "escalade": "^3.1.1", @@ -4962,25 +5614,29 @@ } }, "node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" }, "node_modules/nan": { - "version": "2.19.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.19.0.tgz", - "integrity": "sha512-nO1xXxfh/RWNxfd/XPfbIfFk5vgLsAxUR9y5O0cHMJu/AW9U95JLXqthYHjEp+8gQ5p96K9jUp8nbVOxCdRbtw==" + "version": "2.20.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.20.0.tgz", + "integrity": "sha512-bk3gXBZDGILuuo/6sKtr0DQmSThYHLtNCdSdXk9YkxD/jK6X2vmCyyXBBxyqZ4XcnzTyYEAThfX3DCEnLf6igw==", + "license": "MIT" }, "node_modules/natural-compare": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/negotiator": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "license": "MIT", "engines": { "node": ">= 0.6" } @@ -4988,12 +5644,15 @@ "node_modules/nice-try": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", - "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==" + "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", + "license": "MIT" }, "node_modules/node-fetch": { "version": "2.7.0", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "dev": true, + "license": "MIT", "dependencies": { "whatwg-url": "^5.0.0" }, @@ -5010,9 +5669,10 @@ } }, "node_modules/node-gyp": { - "version": "10.1.0", - "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-10.1.0.tgz", - "integrity": "sha512-B4J5M1cABxPc5PwfjhbV5hoy2DP9p8lFXASnEN6hugXOa61416tnTZ29x9sSwAd0o99XNIcpvDDy1swAExsVKA==", + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-10.2.0.tgz", + "integrity": "sha512-sp3FonBAaFe4aYTcFdZUn2NYkbP7xroPGYvQmP4Nl5PxamznItBnNCgjrVTKrEfQynInMsJvZrdmqUnysCJ8rw==", + "license": "MIT", "dependencies": { "env-paths": "^2.2.0", "exponential-backoff": "^3.1.1", @@ -5020,9 +5680,9 @@ "graceful-fs": "^4.2.6", "make-fetch-happen": "^13.0.0", "nopt": "^7.0.0", - "proc-log": "^3.0.0", + "proc-log": "^4.1.0", "semver": "^7.3.5", - "tar": "^6.1.2", + "tar": "^6.2.1", "which": "^4.0.0" }, "bin": { @@ -5036,6 +5696,7 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-2.0.0.tgz", "integrity": "sha512-6/mh1E2u2YgEsCHdY0Yx5oW+61gZU+1vXaoiHHrpKeuRNNgFvS+/jrwHiQhB5apAf5oB7UB7E19ol2R2LKH8hQ==", + "license": "ISC", "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } @@ -5044,41 +5705,16 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0" } }, - "node_modules/node-gyp/node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/node-gyp/node_modules/cross-spawn/node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/node-gyp/node_modules/foreground-child": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.1.1.tgz", - "integrity": "sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==", + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.0.tgz", + "integrity": "sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==", + "license": "ISC", "dependencies": { "cross-spawn": "^7.0.0", "signal-exit": "^4.0.1" @@ -5091,35 +5727,39 @@ } }, "node_modules/node-gyp/node_modules/glob": { - "version": "10.4.1", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.1.tgz", - "integrity": "sha512-2jelhlq3E4ho74ZyVLN03oKdAZVUa6UDZzFLVH1H7dnoax+y9qyaq8zBkfDIggjniU19z0wU18y16jMB2eyVIw==", + "version": "10.4.5", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", + "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", + "license": "ISC", "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^3.1.2", "minimatch": "^9.0.4", "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", "path-scurry": "^1.11.1" }, "bin": { "glob": "dist/esm/bin.mjs" }, - "engines": { - "node": ">=16 || 14 >=14.18" - }, "funding": { "url": "https://github.com/sponsors/isaacs" } }, "node_modules/node-gyp/node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz", + "integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==", + "license": "ISC", + "engines": { + "node": ">=16" + } }, "node_modules/node-gyp/node_modules/minimatch": { - "version": "9.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", - "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "license": "ISC", "dependencies": { "brace-expansion": "^2.0.1" }, @@ -5134,6 +5774,7 @@ "version": "7.2.1", "resolved": "https://registry.npmjs.org/nopt/-/nopt-7.2.1.tgz", "integrity": "sha512-taM24ViiimT/XntxbPyJQzCG+p4EKOpgD3mxFwW38mGjVUrfERQOeY4EDHjdnptttfHuHQXFx+lTP08Q+mLa/w==", + "license": "ISC", "dependencies": { "abbrev": "^2.0.0" }, @@ -5144,37 +5785,11 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, - "node_modules/node-gyp/node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "engines": { - "node": ">=8" - } - }, - "node_modules/node-gyp/node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/node-gyp/node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "engines": { - "node": ">=8" - } - }, "node_modules/node-gyp/node_modules/signal-exit": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "license": "ISC", "engines": { "node": ">=14" }, @@ -5182,11 +5797,27 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/node-gyp/node_modules/which": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/which/-/which-4.0.0.tgz", + "integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==", + "license": "ISC", + "dependencies": { + "isexe": "^3.1.1" + }, + "bin": { + "node-which": "bin/which.js" + }, + "engines": { + "node": "^16.13.0 || >=18.0.0" + } + }, "node_modules/node-preload": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/node-preload/-/node-preload-0.2.1.tgz", "integrity": "sha512-RM5oyBy45cLEoHqCeh+MNuFAxO0vTFBLskvQbOKnEE7YTTSN4tbN8QWDIPQ6L+WvKsB/qLEGpYe2ZZ9d4W9OIQ==", "dev": true, + "license": "MIT", "dependencies": { "process-on-spawn": "^1.0.0" }, @@ -5195,15 +5826,18 @@ } }, "node_modules/node-releases": { - "version": "2.0.14", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.14.tgz", - "integrity": "sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==", - "dev": true + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz", + "integrity": "sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==", + "dev": true, + "license": "MIT" }, "node_modules/nopt": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", "integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==", + "dev": true, + "license": "ISC", "dependencies": { "abbrev": "1" }, @@ -5218,6 +5852,7 @@ "version": "2.5.0", "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "license": "BSD-2-Clause", "dependencies": { "hosted-git-info": "^2.1.4", "resolve": "^1.10.0", @@ -5229,6 +5864,7 @@ "version": "5.7.2", "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "license": "ISC", "bin": { "semver": "bin/semver" } @@ -5238,6 +5874,7 @@ "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } @@ -5246,6 +5883,7 @@ "version": "4.1.5", "resolved": "https://registry.npmjs.org/npm-run-all/-/npm-run-all-4.1.5.tgz", "integrity": "sha512-Oo82gJDAVcaMdi3nuoKFavkIHBRVqQ1qvMb+9LHk/cF4P6B2m8aP04hGf7oL6wZ9BuGwX1onlLhpuoofSyoQDQ==", + "license": "MIT", "dependencies": { "ansi-styles": "^3.2.1", "chalk": "^2.4.1", @@ -5266,11 +5904,80 @@ "node": ">= 4" } }, + "node_modules/npm-run-all/node_modules/cross-spawn": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", + "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "license": "MIT", + "dependencies": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + }, + "engines": { + "node": ">=4.8" + } + }, + "node_modules/npm-run-all/node_modules/path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==", + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/npm-run-all/node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "license": "ISC", + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/npm-run-all/node_modules/shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", + "license": "MIT", + "dependencies": { + "shebang-regex": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/npm-run-all/node_modules/shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/npm-run-all/node_modules/which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "which": "bin/which" + } + }, "node_modules/npmlog": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz", "integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==", "deprecated": "This package is no longer supported.", + "dev": true, + "license": "ISC", "dependencies": { "are-we-there-yet": "^2.0.0", "console-control-strings": "^1.1.0", @@ -5283,6 +5990,7 @@ "resolved": "https://registry.npmjs.org/nyc/-/nyc-15.1.0.tgz", "integrity": "sha512-jMW04n9SxKdKi1ZMGhvUTHBN0EICCRkHemEoE5jm6mTYcqcdas0ATzgUgejlQUHMvpnOZqGB5Xxsv9KxJW1j8A==", "dev": true, + "license": "ISC", "dependencies": { "@istanbuljs/load-nyc-config": "^1.0.0", "@istanbuljs/schema": "^0.1.2", @@ -5324,6 +6032,7 @@ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, + "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -5339,6 +6048,7 @@ "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz", "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==", "dev": true, + "license": "ISC", "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.0", @@ -5350,6 +6060,7 @@ "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, + "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -5361,13 +6072,15 @@ "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/nyc/node_modules/find-up": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", "dev": true, + "license": "MIT", "dependencies": { "locate-path": "^5.0.0", "path-exists": "^4.0.0" @@ -5382,6 +6095,7 @@ "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", "deprecated": "Glob versions prior to v9 are no longer supported", "dev": true, + "license": "ISC", "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", @@ -5402,6 +6116,7 @@ "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", "dev": true, + "license": "MIT", "dependencies": { "p-locate": "^4.1.0" }, @@ -5414,6 +6129,7 @@ "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", "dev": true, + "license": "MIT", "dependencies": { "p-try": "^2.0.0" }, @@ -5429,6 +6145,7 @@ "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", "dev": true, + "license": "MIT", "dependencies": { "p-limit": "^2.2.0" }, @@ -5441,6 +6158,7 @@ "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz", "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==", "dev": true, + "license": "MIT", "dependencies": { "aggregate-error": "^3.0.0" }, @@ -5448,11 +6166,22 @@ "node": ">=8" } }, + "node_modules/nyc/node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/nyc/node_modules/wrap-ansi": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", "dev": true, + "license": "MIT", "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", @@ -5466,13 +6195,15 @@ "version": "4.0.3", "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz", "integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/nyc/node_modules/yargs": { "version": "15.4.1", "resolved": "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz", "integrity": "sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==", "dev": true, + "license": "MIT", "dependencies": { "cliui": "^6.0.0", "decamelize": "^1.2.0", @@ -5495,6 +6226,7 @@ "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz", "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==", "dev": true, + "license": "ISC", "dependencies": { "camelcase": "^5.0.0", "decamelize": "^1.2.0" @@ -5507,14 +6239,20 @@ "version": "4.1.1", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } }, "node_modules/object-inspect": { - "version": "1.13.1", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz", - "integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==", + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.2.tgz", + "integrity": "sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -5523,6 +6261,7 @@ "version": "1.1.1", "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "license": "MIT", "engines": { "node": ">= 0.4" } @@ -5531,6 +6270,7 @@ "version": "4.1.5", "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.5.tgz", "integrity": "sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ==", + "license": "MIT", "dependencies": { "call-bind": "^1.0.5", "define-properties": "^1.2.1", @@ -5549,6 +6289,7 @@ "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.8.tgz", "integrity": "sha512-cmopxi8VwRIAw/fkijJohSfpef5PdN0pMQJN6VC/ZKvn0LIknWD8KtgY6KlQdEc4tIjcQ3HxSMmnvtzIscdaYQ==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", @@ -5563,6 +6304,7 @@ "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.8.tgz", "integrity": "sha512-k6E21FzySsSK5a21KRADBd/NGneRegFO5pLHfdQLpRDETUNJueLXs3WCzyQ3tFRDYgbq3KHGXfTbi2bs8WQ6rQ==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", @@ -5581,6 +6323,7 @@ "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.3.tgz", "integrity": "sha512-+Lhy3TQTuzXI5hevh8sBGqbmurHbbIjAi0Z4S63nthVLmLxfbj4T54a4CfZrXIrt9iP4mVAPYMo/v99taj3wjQ==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", @@ -5590,28 +6333,12 @@ "node": ">= 0.4" } }, - "node_modules/object.hasown": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/object.hasown/-/object.hasown-1.1.4.tgz", - "integrity": "sha512-FZ9LZt9/RHzGySlBARE3VF+gE26TxR38SdmqOqliuTnl9wrKulaQs+4dee1V+Io8VfxqzAfHu6YuRgUy8OHoTg==", - "dev": true, - "dependencies": { - "define-properties": "^1.2.1", - "es-abstract": "^1.23.2", - "es-object-atoms": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/object.values": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.2.0.tgz", "integrity": "sha512-yBYjY9QX2hnRmZHAjG/f13MzmBzxzYgQhFrke06TTyKY5zSTEqkOeukBzIdVA3j3ulu8Qa3MbVFShV7T2RmGtQ==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", @@ -5628,15 +6355,30 @@ "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "license": "ISC", "dependencies": { "wrappy": "1" } }, + "node_modules/oniguruma-to-js": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/oniguruma-to-js/-/oniguruma-to-js-0.4.3.tgz", + "integrity": "sha512-X0jWUcAlxORhOqqBREgPMgnshB7ZGYszBNspP+tS9hPD3l13CdaXcHbgImoHUHlrvGx/7AvFEkTRhAGYh+jzjQ==", + "dev": true, + "dependencies": { + "regex": "^4.3.2" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, "node_modules/optionator": { "version": "0.9.4", "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", "dev": true, + "license": "MIT", "dependencies": { "deep-is": "^0.1.3", "fast-levenshtein": "^2.0.6", @@ -5654,6 +6396,7 @@ "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", "dev": true, + "license": "MIT", "dependencies": { "yocto-queue": "^0.1.0" }, @@ -5669,6 +6412,7 @@ "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", "dev": true, + "license": "MIT", "dependencies": { "p-limit": "^3.0.2" }, @@ -5683,6 +6427,7 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", + "license": "MIT", "dependencies": { "aggregate-error": "^3.0.0" }, @@ -5698,6 +6443,7 @@ "resolved": "https://registry.npmjs.org/p-throttle/-/p-throttle-3.1.0.tgz", "integrity": "sha512-rLo81NXBihs3GJQhq89IXa0Egj/sbW1zW8/qnyadOwUhIUrZSUvyGdQ46ISRKELFBkVvmMJ4JUqWki4oAh30Qw==", "dev": true, + "license": "MIT", "engines": { "node": ">=6" } @@ -5707,6 +6453,7 @@ "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=6" } @@ -5716,6 +6463,7 @@ "resolved": "https://registry.npmjs.org/package-hash/-/package-hash-4.0.0.tgz", "integrity": "sha512-whdkPIooSu/bASggZ96BWVvZTRMOFxnyUG5PnTSGKoJE2gd5mbVNmR2Nj20QFzxYYgAXpoqC+AiXzl+UMRh7zQ==", "dev": true, + "license": "ISC", "dependencies": { "graceful-fs": "^4.1.15", "hasha": "^5.0.0", @@ -5726,11 +6474,18 @@ "node": ">=8" } }, + "node_modules/package-json-from-dist": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", + "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", + "license": "BlueOak-1.0.0" + }, "node_modules/parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", "dev": true, + "license": "MIT", "dependencies": { "callsites": "^3.0.0" }, @@ -5742,6 +6497,7 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", "integrity": "sha512-aOIos8bujGN93/8Ox/jPLh7RwVnPEysynVFE+fQZyg6jKELEHwzgKdLRFHUgXJL6kylijVSBC4BvN9OmsB48Rw==", + "license": "MIT", "dependencies": { "error-ex": "^1.3.1", "json-parse-better-errors": "^1.0.1" @@ -5755,6 +6511,7 @@ "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } @@ -5763,27 +6520,32 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } }, "node_modules/path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "license": "MIT", "engines": { - "node": ">=4" + "node": ">=8" } }, "node_modules/path-parse": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", - "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "license": "MIT" }, "node_modules/path-scurry": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", + "license": "BlueOak-1.0.0", "dependencies": { "lru-cache": "^10.2.0", "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" @@ -5799,6 +6561,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", + "license": "MIT", "dependencies": { "pify": "^3.0.0" }, @@ -5811,21 +6574,24 @@ "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz", "integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==", "dev": true, + "license": "MIT", "engines": { "node": "*" } }, "node_modules/picocolors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz", - "integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==", - "dev": true + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.0.tgz", + "integrity": "sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw==", + "dev": true, + "license": "ISC" }, "node_modules/picomatch": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", "dev": true, + "license": "MIT", "engines": { "node": ">=8.6" }, @@ -5837,6 +6603,7 @@ "version": "0.3.1", "resolved": "https://registry.npmjs.org/pidtree/-/pidtree-0.3.1.tgz", "integrity": "sha512-qQbW94hLHEqCg7nhby4yRC7G2+jYHY4Rguc2bjw7Uug4GIJuu1tvf2uHaZv5Q8zdt+WKJ6qK1FOI6amaWUo5FA==", + "license": "MIT", "bin": { "pidtree": "bin/pidtree.js" }, @@ -5848,6 +6615,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==", + "license": "MIT", "engines": { "node": ">=4" } @@ -5857,6 +6625,7 @@ "resolved": "https://registry.npmjs.org/pkg-conf/-/pkg-conf-3.1.0.tgz", "integrity": "sha512-m0OTbR/5VPNPqO1ph6Fqbj7Hv6QU7gR/tQW40ZqrL1rjgCU85W6C1bJn0BItuJqnR98PWzw7Z8hHeChD1WrgdQ==", "dev": true, + "license": "MIT", "dependencies": { "find-up": "^3.0.0", "load-json-file": "^5.2.0" @@ -5870,6 +6639,7 @@ "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", "dev": true, + "license": "MIT", "dependencies": { "locate-path": "^3.0.0" }, @@ -5882,6 +6652,7 @@ "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-5.3.0.tgz", "integrity": "sha512-cJGP40Jc/VXUsp8/OrnyKyTZ1y6v/dphm3bioS+RrKXjK2BB6wHUd6JptZEFDGgGahMT+InnZO5i1Ei9mpC8Bw==", "dev": true, + "license": "MIT", "dependencies": { "graceful-fs": "^4.1.15", "parse-json": "^4.0.0", @@ -5898,6 +6669,7 @@ "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", "dev": true, + "license": "MIT", "dependencies": { "p-locate": "^3.0.0", "path-exists": "^3.0.0" @@ -5911,6 +6683,7 @@ "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", "dev": true, + "license": "MIT", "dependencies": { "p-try": "^2.0.0" }, @@ -5926,6 +6699,7 @@ "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", "dev": true, + "license": "MIT", "dependencies": { "p-limit": "^2.0.0" }, @@ -5938,6 +6712,7 @@ "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", "integrity": "sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=4" } @@ -5947,6 +6722,7 @@ "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", "dev": true, + "license": "MIT", "engines": { "node": ">=6" } @@ -5956,6 +6732,7 @@ "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", "dev": true, + "license": "MIT", "engines": { "node": ">=4" } @@ -5965,6 +6742,7 @@ "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.3.1.tgz", "integrity": "sha512-cUGJnCdr4STbePCgqNFbpVNCepa+kAVohJs1sLhxzdH+gnEoOd8VhbYa7pD3zZYGiURWM2xzEII3fQcRizDkYQ==", "dev": true, + "license": "(MIT OR CC0-1.0)", "engines": { "node": ">=6" } @@ -5974,6 +6752,7 @@ "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", "dev": true, + "license": "MIT", "dependencies": { "find-up": "^4.0.0" }, @@ -5986,6 +6765,7 @@ "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", "dev": true, + "license": "MIT", "dependencies": { "locate-path": "^5.0.0", "path-exists": "^4.0.0" @@ -5999,6 +6779,7 @@ "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", "dev": true, + "license": "MIT", "dependencies": { "p-locate": "^4.1.0" }, @@ -6011,6 +6792,7 @@ "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", "dev": true, + "license": "MIT", "dependencies": { "p-try": "^2.0.0" }, @@ -6026,6 +6808,7 @@ "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", "dev": true, + "license": "MIT", "dependencies": { "p-limit": "^2.2.0" }, @@ -6037,6 +6820,7 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz", "integrity": "sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==", + "license": "MIT", "engines": { "node": ">= 0.4" } @@ -6046,14 +6830,16 @@ "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.8.0" } }, "node_modules/proc-log": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-3.0.0.tgz", - "integrity": "sha512-++Vn7NS4Xf9NacaU9Xq3URUuqZETPsf8L4j5/ckhaRYsfPeRyzGw+iDjFhV/Jr3uNmTvvddEJFWh5R1gRgUH8A==", + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz", + "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==", + "license": "ISC", "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } @@ -6063,6 +6849,7 @@ "resolved": "https://registry.npmjs.org/process-on-spawn/-/process-on-spawn-1.0.0.tgz", "integrity": "sha512-1WsPDsUSMmZH5LeMLegqkPDrsGgsWwk1Exipy2hvB0o/F0ASzbpIctSCcZIK1ykJvtTJULEH+20WOFjMvGnCTg==", "dev": true, + "license": "MIT", "dependencies": { "fromentries": "^1.2.0" }, @@ -6074,6 +6861,7 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/promise-retry/-/promise-retry-2.0.1.tgz", "integrity": "sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==", + "license": "MIT", "dependencies": { "err-code": "^2.0.2", "retry": "^0.12.0" @@ -6087,17 +6875,38 @@ "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", "dev": true, + "license": "MIT", "dependencies": { "loose-envify": "^1.4.0", "object-assign": "^4.1.1", "react-is": "^16.13.1" } }, + "node_modules/property-information": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.5.0.tgz", + "integrity": "sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig==", + "dev": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/punycode": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/punycode.js": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode.js/-/punycode.js-2.3.1.tgz", + "integrity": "sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==", + "dev": true, "engines": { "node": ">=6" } @@ -6120,13 +6929,15 @@ "type": "consulting", "url": "https://feross.org/support" } - ] + ], + "license": "MIT" }, "node_modules/randombytes": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", "dev": true, + "license": "MIT", "dependencies": { "safe-buffer": "^5.1.0" } @@ -6135,12 +6946,14 @@ "version": "16.13.1", "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/read-pkg": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", "integrity": "sha512-BLq/cCO9two+lBgiTYNqD6GdtK8s4NpaWrl6/rCO9w0TUS8oJl7cmToOZfRYllKTISY6nt1U7jQ53brmKqY6BA==", + "license": "MIT", "dependencies": { "load-json-file": "^4.0.0", "normalize-package-data": "^2.3.2", @@ -6154,6 +6967,8 @@ "version": "3.6.2", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dev": true, + "license": "MIT", "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", @@ -6168,6 +6983,7 @@ "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", "dev": true, + "license": "MIT", "dependencies": { "picomatch": "^2.2.1" }, @@ -6180,6 +6996,7 @@ "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.6.tgz", "integrity": "sha512-fmfw4XgoDke3kdI6h4xcUz1dG8uaiv5q9gcEwLS4Pnth2kxT+GZ7YehS1JTMGBQmtV7Y4GFGbs2re2NqhdozUg==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", @@ -6196,10 +7013,17 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/regex": { + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/regex/-/regex-4.3.3.tgz", + "integrity": "sha512-r/AadFO7owAq1QJVeZ/nq9jNS1vyZt+6t1p/E59B56Rn2GCya+gr1KSyOzNL/er+r+B7phv5jG2xU2Nz1YkmJg==", + "dev": true + }, "node_modules/regexp.prototype.flags": { "version": "1.5.2", "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz", "integrity": "sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw==", + "license": "MIT", "dependencies": { "call-bind": "^1.0.6", "define-properties": "^1.2.1", @@ -6218,6 +7042,7 @@ "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz", "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" }, @@ -6230,6 +7055,7 @@ "resolved": "https://registry.npmjs.org/release-zalgo/-/release-zalgo-1.0.0.tgz", "integrity": "sha512-gUAyHVHPPC5wdqX/LG4LWtRYtgjxyX78oanFNTMMyFEfOqdC54s3eE82imuWKbOeqYht2CrNf64Qb8vgmmtZGA==", "dev": true, + "license": "ISC", "dependencies": { "es6-error": "^4.0.1" }, @@ -6242,6 +7068,7 @@ "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } @@ -6250,12 +7077,14 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/resolve": { "version": "1.22.8", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz", "integrity": "sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==", + "license": "MIT", "dependencies": { "is-core-module": "^2.13.0", "path-parse": "^1.0.7", @@ -6269,18 +7098,20 @@ } }, "node_modules/resolve-from": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", - "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", "dev": true, + "license": "MIT", "engines": { - "node": ">=8" + "node": ">=4" } }, "node_modules/retry": { "version": "0.12.0", "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", "integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==", + "license": "MIT", "engines": { "node": ">= 4" } @@ -6290,6 +7121,7 @@ "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", "dev": true, + "license": "MIT", "engines": { "iojs": ">=1.0.0", "node": ">=0.10.0" @@ -6300,6 +7132,8 @@ "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", "deprecated": "Rimraf versions prior to v4 are no longer supported", + "dev": true, + "license": "ISC", "dependencies": { "glob": "^7.1.3" }, @@ -6315,6 +7149,8 @@ "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, + "license": "ISC", "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", @@ -6349,6 +7185,7 @@ "url": "https://feross.org/support" } ], + "license": "MIT", "dependencies": { "queue-microtask": "^1.2.2" } @@ -6357,6 +7194,7 @@ "version": "1.1.2", "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.2.tgz", "integrity": "sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q==", + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "get-intrinsic": "^1.2.4", @@ -6374,6 +7212,7 @@ "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, "funding": [ { "type": "github", @@ -6387,12 +7226,14 @@ "type": "consulting", "url": "https://feross.org/support" } - ] + ], + "license": "MIT" }, "node_modules/safe-regex-test": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.3.tgz", "integrity": "sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw==", + "license": "MIT", "dependencies": { "call-bind": "^1.0.6", "es-errors": "^1.3.0", @@ -6409,18 +7250,21 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "license": "MIT", "optional": true }, "node_modules/seedrandom": { "version": "2.4.4", "resolved": "https://registry.npmjs.org/seedrandom/-/seedrandom-2.4.4.tgz", "integrity": "sha512-9A+PDmgm+2du77B5i0Ip2cxOqqHjgNxnBgglxLcX78A2D6c2rTo61z4jnVABpF4cKeDMDG+cmXXvdnqse2VqMA==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/semver": { - "version": "7.6.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz", - "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==", + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "license": "ISC", "bin": { "semver": "bin/semver.js" }, @@ -6429,10 +7273,11 @@ } }, "node_modules/serialize-javascript": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz", - "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.2.tgz", + "integrity": "sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==", "dev": true, + "license": "BSD-3-Clause", "dependencies": { "randombytes": "^2.1.0" } @@ -6440,12 +7285,15 @@ "node_modules/set-blocking": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==" + "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==", + "dev": true, + "license": "ISC" }, "node_modules/set-function-length": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", + "license": "MIT", "dependencies": { "define-data-property": "^1.1.4", "es-errors": "^1.3.0", @@ -6462,6 +7310,7 @@ "version": "2.0.2", "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", + "license": "MIT", "dependencies": { "define-data-property": "^1.1.4", "es-errors": "^1.3.0", @@ -6471,38 +7320,56 @@ "engines": { "node": ">= 0.4" } - }, - "node_modules/shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "license": "MIT", "dependencies": { - "shebang-regex": "^1.0.0" + "shebang-regex": "^3.0.0" }, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, "node_modules/shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "license": "MIT", "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, "node_modules/shell-quote": { "version": "1.8.1", "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.1.tgz", "integrity": "sha512-6j1W9l1iAs/4xYBI1SYOVZyFcCis9b4KCLQ8fgAGG07QvzaRLVVRQvAy85yNmmZSjYjg4MWh4gNvlPujU/5LpA==", + "license": "MIT", "funding": { "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/shiki": { + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/shiki/-/shiki-1.21.0.tgz", + "integrity": "sha512-apCH5BoWTrmHDPGgg3RF8+HAAbEL/CdbYr8rMw7eIrdhCkZHdVGat5mMNlRtd1erNG01VPMIKHNQ0Pj2HMAiog==", + "dev": true, + "dependencies": { + "@shikijs/core": "1.21.0", + "@shikijs/engine-javascript": "1.21.0", + "@shikijs/engine-oniguruma": "1.21.0", + "@shikijs/types": "1.21.0", + "@shikijs/vscode-textmate": "^9.2.2", + "@types/hast": "^3.0.4" + } + }, "node_modules/side-channel": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "es-errors": "^1.3.0", @@ -6519,12 +7386,15 @@ "node_modules/signal-exit": { "version": "3.0.7", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==" + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true, + "license": "ISC" }, "node_modules/smart-buffer": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==", + "license": "MIT", "engines": { "node": ">= 6.0.0", "npm": ">= 3.0.0" @@ -6534,6 +7404,7 @@ "version": "2.8.3", "resolved": "https://registry.npmjs.org/socks/-/socks-2.8.3.tgz", "integrity": "sha512-l5x7VUUWbjVFbafGLxPWkYsHIhEvmF85tbIeFZWc8ZPtoMyybuEhL7Jye/ooC4/d48FgOjSJXgsF/AJPYCW8Zw==", + "license": "MIT", "dependencies": { "ip-address": "^9.0.5", "smart-buffer": "^4.2.0" @@ -6544,13 +7415,14 @@ } }, "node_modules/socks-proxy-agent": { - "version": "8.0.3", - "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-8.0.3.tgz", - "integrity": "sha512-VNegTZKhuGq5vSD6XNKlbqWhyt/40CgoEw8XxD6dhnm8Jq9IEa3nIa4HwnM8XOqU0CdB0BwWVXusqiFXfHB3+A==", + "version": "8.0.4", + "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-8.0.4.tgz", + "integrity": "sha512-GNAq/eg8Udq2x0eNiFkr9gRg5bA7PXEWagQdeRX4cPSG+X/8V38v637gim9bjFptMk1QWsCTr0ttrJEiXbNnRw==", + "license": "MIT", "dependencies": { "agent-base": "^7.1.1", "debug": "^4.3.4", - "socks": "^2.7.1" + "socks": "^2.8.3" }, "engines": { "node": ">= 14" @@ -6560,6 +7432,7 @@ "version": "7.1.1", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.1.tgz", "integrity": "sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==", + "license": "MIT", "dependencies": { "debug": "^4.3.4" }, @@ -6572,15 +7445,27 @@ "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", "dev": true, + "license": "BSD-3-Clause", "engines": { "node": ">=0.10.0" } }, + "node_modules/space-separated-tokens": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz", + "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==", + "dev": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/spawn-wrap": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/spawn-wrap/-/spawn-wrap-2.0.0.tgz", "integrity": "sha512-EeajNjfN9zMnULLwhZZQU3GWBoFNkbngTUPfaawT4RkMiviTxcX0qfhVbGey39mfctfDHkWtuecgQ8NJcyQWHg==", "dev": true, + "license": "ISC", "dependencies": { "foreground-child": "^2.0.0", "is-windows": "^1.0.2", @@ -6593,31 +7478,11 @@ "node": ">=8" } }, - "node_modules/spawn-wrap/node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", - "dev": true - }, - "node_modules/spawn-wrap/node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/spdx-correct": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz", "integrity": "sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==", + "license": "Apache-2.0", "dependencies": { "spdx-expression-parse": "^3.0.0", "spdx-license-ids": "^3.0.0" @@ -6626,32 +7491,37 @@ "node_modules/spdx-exceptions": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz", - "integrity": "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==" + "integrity": "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==", + "license": "CC-BY-3.0" }, "node_modules/spdx-expression-parse": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", + "license": "MIT", "dependencies": { "spdx-exceptions": "^2.1.0", "spdx-license-ids": "^3.0.0" } }, "node_modules/spdx-license-ids": { - "version": "3.0.18", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.18.tgz", - "integrity": "sha512-xxRs31BqRYHwiMzudOrpSiHtZ8i/GeionCBDSilhYRj+9gIcI8wCZTlXZKu9vZIVqViP3dcp9qE5G6AlIaD+TQ==" + "version": "3.0.20", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.20.tgz", + "integrity": "sha512-jg25NiDV/1fLtSgEgyvVyDunvaNHbuwF9lfNV17gSmPFAlYzdfNBlLtLzXTevwkPj7DhGbmN9VnmJIgLnhvaBw==", + "license": "CC0-1.0" }, "node_modules/sprintf-js": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", - "dev": true + "dev": true, + "license": "BSD-3-Clause" }, "node_modules/ssri": { "version": "10.0.6", "resolved": "https://registry.npmjs.org/ssri/-/ssri-10.0.6.tgz", "integrity": "sha512-MGrFH9Z4NP9Iyhqn16sDtBpRRNJ0Y2hNa6D65h736fVSaPCHr4DM4sWUNvVaSuC+0OBGhwsrydQwmgfg5LncqQ==", + "license": "ISC", "dependencies": { "minipass": "^7.0.3" }, @@ -6660,9 +7530,9 @@ } }, "node_modules/standard": { - "version": "17.1.0", - "resolved": "https://registry.npmjs.org/standard/-/standard-17.1.0.tgz", - "integrity": "sha512-jaDqlNSzLtWYW4lvQmU0EnxWMUGQiwHasZl5ZEIwx3S/ijZDjZOzs1y1QqKwKs5vqnFpGtizo4NOYX2s0Voq/g==", + "version": "17.1.2", + "resolved": "https://registry.npmjs.org/standard/-/standard-17.1.2.tgz", + "integrity": "sha512-WLm12WoXveKkvnPnPnaFUUHuOB2cUdAsJ4AiGHL2G0UNMrcRAWY2WriQaV8IQ3oRmYr0AWUbLNr94ekYFAHOrA==", "dev": true, "funding": [ { @@ -6678,6 +7548,7 @@ "url": "https://feross.org/support" } ], + "license": "MIT", "dependencies": { "eslint": "^8.41.0", "eslint-config-standard": "17.1.0", @@ -6685,8 +7556,8 @@ "eslint-plugin-import": "^2.27.5", "eslint-plugin-n": "^15.7.0", "eslint-plugin-promise": "^6.1.1", - "eslint-plugin-react": "^7.32.2", - "standard-engine": "^15.0.0", + "eslint-plugin-react": "^7.36.1", + "standard-engine": "^15.1.0", "version-guard": "^1.1.1" }, "bin": { @@ -6715,6 +7586,7 @@ "url": "https://feross.org/support" } ], + "license": "MIT", "dependencies": { "get-stdin": "^8.0.0", "minimist": "^1.2.6", @@ -6730,6 +7602,7 @@ "resolved": "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz", "integrity": "sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg==", "dev": true, + "license": "MIT", "dependencies": { "stubs": "^3.0.0" } @@ -6738,6 +7611,8 @@ "version": "1.3.0", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dev": true, + "license": "MIT", "dependencies": { "safe-buffer": "~5.2.0" } @@ -6746,6 +7621,7 @@ "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "license": "MIT", "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -6760,6 +7636,7 @@ "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "license": "MIT", "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -6774,6 +7651,7 @@ "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.11.tgz", "integrity": "sha512-NUdh0aDavY2og7IbBPenWqR9exH+E26Sv8e0/eTe1tltDGZL+GtBkDAnnyBtmekfK6/Dq3MkcGtzXFEd1LQrtg==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", @@ -6799,6 +7677,7 @@ "version": "3.1.6", "resolved": "https://registry.npmjs.org/string.prototype.padend/-/string.prototype.padend-3.1.6.tgz", "integrity": "sha512-XZpspuSB7vJWhvJc9DLSlrXl1mcA2BdoY5jjnS135ydXqLoqhs96JjDtCkjJEQHvfqZIp9hBuBMgI589peyx9Q==", + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", @@ -6812,10 +7691,22 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/string.prototype.repeat": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/string.prototype.repeat/-/string.prototype.repeat-1.0.0.tgz", + "integrity": "sha512-0u/TldDbKD8bFCQ/4f5+mNRrXwZ8hg2w7ZR8wa16e8z9XpePWl3eGEcUD0OXpEH/VJH/2G3gjUtR3ZOiBe2S/w==", + "dev": true, + "license": "MIT", + "dependencies": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5" + } + }, "node_modules/string.prototype.trim": { "version": "1.2.9", "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.9.tgz", "integrity": "sha512-klHuCNxiMZ8MlsOihJhJEBJAiMVqU3Z2nEXWfWnIqjN0gEFS9J9+IxKozWWtQGcgoa1WUZzLjKPTr4ZHNFTFxw==", + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", @@ -6833,6 +7724,7 @@ "version": "1.0.8", "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.8.tgz", "integrity": "sha512-p73uL5VCHCO2BZZ6krwwQE3kCzM7NKmis8S//xEC6fQonchbum4eP6kR4DLEjQFO3Wnj3Fuo8NM0kOSjVdHjZQ==", + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", @@ -6846,6 +7738,7 @@ "version": "1.0.8", "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.8.tgz", "integrity": "sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==", + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", @@ -6858,10 +7751,25 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/stringify-entities": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-4.0.4.tgz", + "integrity": "sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==", + "dev": true, + "dependencies": { + "character-entities-html4": "^2.0.0", + "character-entities-legacy": "^3.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/strip-ansi": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "license": "MIT", "dependencies": { "ansi-regex": "^5.0.1" }, @@ -6874,6 +7782,7 @@ "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "license": "MIT", "dependencies": { "ansi-regex": "^5.0.1" }, @@ -6886,6 +7795,7 @@ "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } @@ -6895,6 +7805,7 @@ "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" }, @@ -6906,12 +7817,14 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", "integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "license": "MIT", "dependencies": { "has-flag": "^3.0.0" }, @@ -6923,6 +7836,7 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -6934,6 +7848,7 @@ "version": "6.2.1", "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", + "license": "ISC", "dependencies": { "chownr": "^2.0.0", "fs-minipass": "^2.0.0", @@ -6950,6 +7865,7 @@ "version": "2.1.0", "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", + "license": "ISC", "dependencies": { "minipass": "^3.0.0" }, @@ -6961,6 +7877,7 @@ "version": "3.3.6", "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "license": "ISC", "dependencies": { "yallist": "^4.0.0" }, @@ -6972,6 +7889,7 @@ "version": "5.0.0", "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", + "license": "ISC", "engines": { "node": ">=8" } @@ -6981,6 +7899,7 @@ "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.1.1.tgz", "integrity": "sha512-iwY6rkW5DDGq8hE2YgNQlKbptYpY5Nn2xecjQiNjOXWbKzPGUfmeUBCSQbbr306d7Z7U2N0TPl+/SwYRfua1Dg==", "dev": true, + "license": "Apache-2.0", "dependencies": { "http-proxy-agent": "^4.0.0", "https-proxy-agent": "^5.0.0", @@ -6997,6 +7916,7 @@ "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz", "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==", "dev": true, + "license": "MIT", "dependencies": { "@tootallnate/once": "1", "agent-base": "6", @@ -7011,6 +7931,7 @@ "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", "dev": true, + "license": "ISC", "dependencies": { "@istanbuljs/schema": "^0.1.2", "glob": "^7.1.4", @@ -7026,6 +7947,7 @@ "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", "deprecated": "Glob versions prior to v9 are no longer supported", "dev": true, + "license": "ISC", "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", @@ -7045,13 +7967,15 @@ "version": "0.2.0", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/tmp": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.3.tgz", "integrity": "sha512-nZD7m9iCPC5g0pYmcaxogYKggSfLsdxl8of3Q/oIbqCqLLIO9IAF0GWjX1z9NZRHPiXv8Wex4yDCaZsgEw0Y8w==", "dev": true, + "license": "MIT", "engines": { "node": ">=14.14" } @@ -7061,6 +7985,7 @@ "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", "dev": true, + "license": "MIT", "engines": { "node": ">=4" } @@ -7070,6 +7995,7 @@ "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", "dev": true, + "license": "MIT", "dependencies": { "is-number": "^7.0.0" }, @@ -7080,13 +8006,38 @@ "node_modules/tr46": { "version": "0.0.3", "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "dev": true, + "license": "MIT" + }, + "node_modules/trim-lines": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/trim-lines/-/trim-lines-3.0.1.tgz", + "integrity": "sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==", + "dev": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/ts-api-utils": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.3.0.tgz", + "integrity": "sha512-UQMIo7pb8WRomKR1/+MFVLTroIvDVtMX3K6OUir8ynLyzB8Jeriont2bTAtmNPa1ekAgN7YPDyf6V+ygrdU+eQ==", + "dev": true, + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "typescript": ">=4.2.0" + } }, "node_modules/tsconfig-paths": { "version": "3.15.0", "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz", "integrity": "sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==", "dev": true, + "license": "MIT", "dependencies": { "@types/json5": "^0.0.29", "json5": "^1.0.2", @@ -7099,6 +8050,7 @@ "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz", "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==", "dev": true, + "license": "MIT", "dependencies": { "minimist": "^1.2.0" }, @@ -7111,6 +8063,7 @@ "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", "dev": true, + "license": "MIT", "engines": { "node": ">=4" } @@ -7120,6 +8073,7 @@ "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", "dev": true, + "license": "MIT", "dependencies": { "prelude-ls": "^1.2.1" }, @@ -7128,10 +8082,11 @@ } }, "node_modules/type-detect": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", - "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.1.0.tgz", + "integrity": "sha512-Acylog8/luQ8L7il+geoSxhEkazvkslg7PSNKOX59mbB9cOveP5aq9h74Y7YU8yDpJwetzQQrfIwtf4Wp4LKcw==", "dev": true, + "license": "MIT", "engines": { "node": ">=4" } @@ -7141,6 +8096,7 @@ "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", "dev": true, + "license": "(MIT OR CC0-1.0)", "engines": { "node": ">=8" } @@ -7149,6 +8105,7 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.2.tgz", "integrity": "sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==", + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "es-errors": "^1.3.0", @@ -7162,6 +8119,7 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.1.tgz", "integrity": "sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw==", + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "for-each": "^0.3.3", @@ -7180,6 +8138,7 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.2.tgz", "integrity": "sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA==", + "license": "MIT", "dependencies": { "available-typed-arrays": "^1.0.7", "call-bind": "^1.0.7", @@ -7199,6 +8158,7 @@ "version": "1.0.6", "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.6.tgz", "integrity": "sha512-/OxDN6OtAk5KBpGb28T+HZc2M+ADtvRxXrKKbUwtsLgdoxgX13hyy7ek6bFRl5+aBs2yZzB0c4CnQfAtVypW/g==", + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "for-each": "^0.3.3", @@ -7219,14 +8179,128 @@ "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz", "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==", "dev": true, + "license": "MIT", "dependencies": { "is-typedarray": "^1.0.0" } }, + "node_modules/typedoc": { + "version": "0.26.7", + "resolved": "https://registry.npmjs.org/typedoc/-/typedoc-0.26.7.tgz", + "integrity": "sha512-gUeI/Wk99vjXXMi8kanwzyhmeFEGv1LTdTQsiyIsmSYsBebvFxhbcyAx7Zjo4cMbpLGxM4Uz3jVIjksu/I2v6Q==", + "dev": true, + "dependencies": { + "lunr": "^2.3.9", + "markdown-it": "^14.1.0", + "minimatch": "^9.0.5", + "shiki": "^1.16.2", + "yaml": "^2.5.1" + }, + "bin": { + "typedoc": "bin/typedoc" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "typescript": "4.6.x || 4.7.x || 4.8.x || 4.9.x || 5.0.x || 5.1.x || 5.2.x || 5.3.x || 5.4.x || 5.5.x || 5.6.x" + } + }, + "node_modules/typedoc-plugin-rename-defaults": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/typedoc-plugin-rename-defaults/-/typedoc-plugin-rename-defaults-0.7.1.tgz", + "integrity": "sha512-hgg4mAy5IumgUmPOnVVGmGywjTGtUCmRJ2jRbseqtXdlUuYKj652ODL9joUWFt5uvNu4Dr/pNILc/qsKGHJw+w==", + "dev": true, + "dependencies": { + "camelcase": "^8.0.0" + }, + "peerDependencies": { + "typedoc": ">=0.22.x <0.27.x" + } + }, + "node_modules/typedoc-plugin-rename-defaults/node_modules/camelcase": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-8.0.0.tgz", + "integrity": "sha512-8WB3Jcas3swSvjIeA2yvCJ+Miyz5l1ZmB6HFb9R1317dt9LCQoswg/BGrmAmkWVEszSrrg4RwmO46qIm2OEnSA==", + "dev": true, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typedoc/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/typedoc/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/typescript": { + "version": "5.6.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.6.2.tgz", + "integrity": "sha512-NW8ByodCSNCwZeghjN3o+JX5OFH0Ojg6sadjEKY4huZ52TqbJTJnDo5+Tw98lSy63NZvi4n+ez5m2u5d4PkZyw==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/typescript-eslint": { + "version": "8.8.1", + "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.8.1.tgz", + "integrity": "sha512-R0dsXFt6t4SAFjUSKFjMh4pXDtq04SsFKCVGDP3ZOzNP7itF0jBcZYU4fMsZr4y7O7V7Nc751dDeESbe4PbQMQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/eslint-plugin": "8.8.1", + "@typescript-eslint/parser": "8.8.1", + "@typescript-eslint/utils": "8.8.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/uc.micro": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz", + "integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==", + "dev": true + }, "node_modules/unbox-primitive": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", + "license": "MIT", "dependencies": { "call-bind": "^1.0.2", "has-bigints": "^1.0.2", @@ -7238,15 +8312,17 @@ } }, "node_modules/undici-types": { - "version": "5.26.5", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", - "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", - "dev": true + "version": "6.19.8", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", + "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", + "dev": true, + "license": "MIT" }, "node_modules/unique-filename": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-3.0.0.tgz", "integrity": "sha512-afXhuC55wkAmZ0P18QsVE6kp8JaxrEokN2HGIoIVv2ijHQd419H0+6EigAFcIzXeMIkcIkNBpB3L/DXB3cTS/g==", + "license": "ISC", "dependencies": { "unique-slug": "^4.0.0" }, @@ -7258,6 +8334,7 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-4.0.0.tgz", "integrity": "sha512-WrcA6AyEfqDX5bWige/4NQfPZMtASNVxdmWR76WESYQVAACSgWcR6e9i0mofqqBxYFtL4oAxPIptY73/0YE1DQ==", + "license": "ISC", "dependencies": { "imurmurhash": "^0.1.4" }, @@ -7265,10 +8342,78 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/unist-util-is": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz", + "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==", + "dev": true, + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-position": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-5.0.0.tgz", + "integrity": "sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==", + "dev": true, + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-stringify-position": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz", + "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==", + "dev": true, + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-visit": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz", + "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==", + "dev": true, + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0", + "unist-util-visit-parents": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-visit-parents": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz", + "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==", + "dev": true, + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/update-browserslist-db": { - "version": "1.0.16", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.16.tgz", - "integrity": "sha512-KVbTxlBYlckhF5wgfyZXTWnMn7MMZjMu9XG8bPlliUOP9ThaF4QnhP8qrjrH7DRzHfSk0oQv1wToW+iA5GajEQ==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.1.tgz", + "integrity": "sha512-R8UzCaa9Az+38REPiJ1tXlImTJXlVfgHZsglwBD/k6nj76ctsH1E3q4doGrukiLQd3sGQYu56r5+lo5r94l29A==", "dev": true, "funding": [ { @@ -7284,9 +8429,10 @@ "url": "https://github.com/sponsors/ai" } ], + "license": "MIT", "dependencies": { - "escalade": "^3.1.2", - "picocolors": "^1.0.1" + "escalade": "^3.2.0", + "picocolors": "^1.1.0" }, "bin": { "update-browserslist-db": "cli.js" @@ -7300,6 +8446,7 @@ "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", "dev": true, + "license": "BSD-2-Clause", "dependencies": { "punycode": "^2.1.0" } @@ -7309,6 +8456,7 @@ "resolved": "https://registry.npmjs.org/urlgrey/-/urlgrey-1.0.0.tgz", "integrity": "sha512-hJfIzMPJmI9IlLkby8QrsCykQ+SXDeO2W5Q9QTW3QpqZVTx4a/K7p8/5q+/isD8vsbVaFgql/gvAoQCRQ2Cb5w==", "dev": true, + "license": "BSD-2-Clause", "dependencies": { "fast-url-parser": "^1.1.3" } @@ -7316,13 +8464,16 @@ "node_modules/util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true, + "license": "MIT" }, "node_modules/uuid": { "version": "8.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", "dev": true, + "license": "MIT", "bin": { "uuid": "dist/bin/uuid" } @@ -7331,52 +8482,88 @@ "version": "3.0.4", "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "license": "Apache-2.0", "dependencies": { "spdx-correct": "^3.0.0", "spdx-expression-parse": "^3.0.0" } }, "node_modules/version-guard": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/version-guard/-/version-guard-1.1.2.tgz", - "integrity": "sha512-D8d+YxCUpoqtCnQzDxm6SF7DLU3gr2535T4khAtMq4osBahsQnmSxuwXFdrbAdDGG8Uokzfis/jvyeFPdmlc7w==", + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/version-guard/-/version-guard-1.1.3.tgz", + "integrity": "sha512-JwPr6erhX53EWH/HCSzfy1tTFrtPXUe927wdM1jqBBeYp1OM+qPHjWbsvv6pIBduqdgxxS+ScfG7S28pzyr2DQ==", "dev": true, + "license": "0BSD", "engines": { "node": ">=0.10.48" } }, + "node_modules/vfile": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz", + "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==", + "dev": true, + "dependencies": { + "@types/unist": "^3.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/vfile-message": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.2.tgz", + "integrity": "sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==", + "dev": true, + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/webidl-conversions": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "dev": true, + "license": "BSD-2-Clause" }, "node_modules/whatwg-url": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "dev": true, + "license": "MIT", "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "node_modules/which": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/which/-/which-4.0.0.tgz", - "integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "license": "ISC", "dependencies": { - "isexe": "^3.1.1" + "isexe": "^2.0.0" }, "bin": { - "node-which": "bin/which.js" + "node-which": "bin/node-which" }, "engines": { - "node": "^16.13.0 || >=18.0.0" + "node": ">= 8" } }, "node_modules/which-boxed-primitive": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", + "license": "MIT", "dependencies": { "is-bigint": "^1.0.1", "is-boolean-object": "^1.1.0", @@ -7389,13 +8576,14 @@ } }, "node_modules/which-builtin-type": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/which-builtin-type/-/which-builtin-type-1.1.3.tgz", - "integrity": "sha512-YmjsSMDBYsM1CaFiayOVT06+KJeXf0o5M/CAd4o1lTadFAtacTUM49zoYxr/oroopFDfhvN6iEcBxUyc3gvKmw==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/which-builtin-type/-/which-builtin-type-1.1.4.tgz", + "integrity": "sha512-bppkmBSsHFmIMSl8BO9TbsyzsvGjVoppt8xUiGzwiu/bhDCGxnpOKCxgqj6GuyHE0mINMDecBFPlOm2hzY084w==", "dev": true, + "license": "MIT", "dependencies": { - "function.prototype.name": "^1.1.5", - "has-tostringtag": "^1.0.0", + "function.prototype.name": "^1.1.6", + "has-tostringtag": "^1.0.2", "is-async-function": "^2.0.0", "is-date-object": "^1.0.5", "is-finalizationregistry": "^1.0.2", @@ -7404,8 +8592,8 @@ "is-weakref": "^1.0.2", "isarray": "^2.0.5", "which-boxed-primitive": "^1.0.2", - "which-collection": "^1.0.1", - "which-typed-array": "^1.1.9" + "which-collection": "^1.0.2", + "which-typed-array": "^1.1.15" }, "engines": { "node": ">= 0.4" @@ -7419,6 +8607,7 @@ "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.2.tgz", "integrity": "sha512-K4jVyjnBdgvc86Y6BkaLZEN933SwYOuBFkdmBu9ZfkcAbdVbpITnDmjvZ/aQjRXQrv5EPkTnD1s39GiiqbngCw==", "dev": true, + "license": "MIT", "dependencies": { "is-map": "^2.0.3", "is-set": "^2.0.3", @@ -7436,12 +8625,14 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.1.tgz", "integrity": "sha512-iBdZ57RDvnOR9AGBhML2vFZf7h8vmBjhoaZqODJBFWHVtKkDmKuHai3cx5PgVMrX5YDNp27AofYbAwctSS+vhQ==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/which-typed-array": { "version": "1.1.15", "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.15.tgz", "integrity": "sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==", + "license": "MIT", "dependencies": { "available-typed-arrays": "^1.0.7", "call-bind": "^1.0.7", @@ -7460,6 +8651,8 @@ "version": "1.1.5", "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", + "dev": true, + "license": "ISC", "dependencies": { "string-width": "^1.0.2 || 2 || 3 || 4" } @@ -7469,21 +8662,24 @@ "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } }, "node_modules/workerpool": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.2.1.tgz", - "integrity": "sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw==", - "dev": true + "version": "6.5.1", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.5.1.tgz", + "integrity": "sha512-Fs4dNYcsdpYSAfVxhnl1L5zTksjvOJxtC5hzMNl+1t9B8hTJTdKDyZ5ju7ztgPy+ft9tBFXoOlDNiOT9WUXZlA==", + "dev": true, + "license": "Apache-2.0" }, "node_modules/wrap-ansi": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", "dev": true, + "license": "MIT", "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", @@ -7501,6 +8697,7 @@ "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "license": "MIT", "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", @@ -7517,6 +8714,7 @@ "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -7531,6 +8729,7 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -7541,13 +8740,15 @@ "node_modules/wrap-ansi-cjs/node_modules/color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "license": "MIT" }, "node_modules/wrap-ansi/node_modules/ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, + "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -7563,6 +8764,7 @@ "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, + "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -7574,18 +8776,22 @@ "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true, + "license": "ISC" }, "node_modules/write-file-atomic": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz", "integrity": "sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==", "dev": true, + "license": "ISC", "dependencies": { "imurmurhash": "^0.1.4", "is-typedarray": "^1.0.0", @@ -7598,6 +8804,7 @@ "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-4.0.0.tgz", "integrity": "sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } @@ -7607,6 +8814,7 @@ "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", "dev": true, + "license": "ISC", "engines": { "node": ">=10" } @@ -7614,13 +8822,27 @@ "node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "license": "ISC" + }, + "node_modules/yaml": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.5.1.tgz", + "integrity": "sha512-bLQOjaX/ADgQ20isPJRvF0iRUHIxVhYvr53Of7wGcWlO2jvtUlH5m87DsmulFVxRpNLOnI4tB6p/oh8D7kpn9Q==", + "dev": true, + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14" + } }, "node_modules/yargs": { "version": "17.7.2", "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", "dev": true, + "license": "MIT", "dependencies": { "cliui": "^8.0.1", "escalade": "^3.1.1", @@ -7635,10 +8857,11 @@ } }, "node_modules/yargs-parser": { - "version": "20.2.4", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz", - "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==", + "version": "20.2.9", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", + "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", "dev": true, + "license": "ISC", "engines": { "node": ">=10" } @@ -7648,6 +8871,7 @@ "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz", "integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==", "dev": true, + "license": "MIT", "dependencies": { "camelcase": "^6.0.0", "decamelize": "^4.0.0", @@ -7663,6 +8887,7 @@ "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", "dev": true, + "license": "MIT", "engines": { "node": ">=10" }, @@ -7675,6 +8900,7 @@ "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=10" }, @@ -7687,6 +8913,7 @@ "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", "dev": true, + "license": "ISC", "engines": { "node": ">=12" } @@ -7696,12 +8923,23 @@ "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", "dev": true, + "license": "MIT", "engines": { "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } + }, + "node_modules/zwitch": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz", + "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==", + "dev": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } } } } diff --git a/package.json b/package.json index 68ee55c85..0919a3f2a 100644 --- a/package.json +++ b/package.json @@ -41,7 +41,8 @@ "preinstall": "npm install @mapbox/node-pre-gyp", "install": "npm run build", "build": "node-pre-gyp install --fallback-to-build", - "test": "mocha", + "test": "mocha test", + "ts-test": "rm -rf ts-test/dist; cd ts-test; tsc ; cp tests/udf.lua dist/udf.lua ; mocha dist", "test-dry-run": "mocha --dry-run", "test-noserver": "GLOBAL_CLIENT=false mocha -g '#noserver'", "lint": "standard", @@ -50,13 +51,14 @@ "codecov": "codecov", "prepush": "npm test", "apidocs": "jsdoc -c jsdoc.json", + "build-docs": "npx typedoc --plugin typedoc-plugin-rename-defaults --entryPointStrategy expand typings/index.d.ts", "cppcheck": "cppcheck --quiet --enable=warning,style -I src/include src/main/", "valgrind": "valgrind node ./node_modules/mocha/bin/_mocha -R dot -g '#slow' -i", "prepare": "husky install", "removeExtraBinaries": "node ./scripts/prebuiltBinding.js" }, "dependencies": { - "@mapbox/node-pre-gyp": "^1.0.11", + "ansi-colors": "^4.1.3", "bindings": "^1.5.0", "minimatch": "^3.1.2", "nan": "^2.19.0", @@ -64,11 +66,15 @@ "npm-run-all": "^4.1.5" }, "devDependencies": { - "@types/node": "^20.12.7", + "@eslint/js": "^9.12.0", + "@mapbox/node-pre-gyp": "^1.0.11", + "@types/node": "^22.7.4", "chai": "^4.4.1", "choma": "^1.2.1", "codecov": "^3.8.3", "deep-eql": "^4.1.3", + "eslint": "^8.57.1", + "globals": "^15.11.0", "husky": "^9.0.11", "mocha": "^10.4.0", "mocha-clean": "^1.0.0", @@ -77,6 +83,10 @@ "semver": "^7.6.0", "standard": "^17.1.0", "tmp": "^0.2.3", + "typedoc": "^0.26.7", + "typedoc-plugin-rename-defaults": "^0.7.1", + "typescript": "^5.6.2", + "typescript-eslint": "^8.8.1", "yargs": "^17.7.2" }, "standard": { diff --git a/scripts/build-c-client.sh-cclient-output.log b/scripts/build-c-client.sh-cclient-output.log new file mode 100644 index 000000000..6176aa562 --- /dev/null +++ b/scripts/build-c-client.sh-cclient-output.log @@ -0,0 +1,371 @@ +make -e -C /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common libaerospike-common.a +make[1]: Entering directory '/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common' +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_aerospike.o -c src/main/aerospike/as_aerospike.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_arraylist.o -c src/main/aerospike/as_arraylist.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_arraylist_hooks.o -c src/main/aerospike/as_arraylist_hooks.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_arraylist_iterator.o -c src/main/aerospike/as_arraylist_iterator.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_arraylist_iterator_hooks.o -c src/main/aerospike/as_arraylist_iterator_hooks.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_boolean.o -c src/main/aerospike/as_boolean.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_buffer.o -c src/main/aerospike/as_buffer.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_buffer_pool.o -c src/main/aerospike/as_buffer_pool.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_bytes.o -c src/main/aerospike/as_bytes.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_double.o -c src/main/aerospike/as_double.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_geojson.o -c src/main/aerospike/as_geojson.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_integer.o -c src/main/aerospike/as_integer.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_iterator.o -c src/main/aerospike/as_iterator.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_list.o -c src/main/aerospike/as_list.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_log.o -c src/main/aerospike/as_log.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_map.o -c src/main/aerospike/as_map.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_module.o -c src/main/aerospike/as_module.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_msgpack.o -c src/main/aerospike/as_msgpack.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_msgpack_ext.o -c src/main/aerospike/as_msgpack_ext.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_msgpack_serializer.o -c src/main/aerospike/as_msgpack_serializer.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_nil.o -c src/main/aerospike/as_nil.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_orderedmap.o -c src/main/aerospike/as_orderedmap.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_pair.o -c src/main/aerospike/as_pair.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_password.o -c src/main/aerospike/as_password.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_queue.o -c src/main/aerospike/as_queue.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_queue_mt.o -c src/main/aerospike/as_queue_mt.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_random.o -c src/main/aerospike/as_random.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_rec.o -c src/main/aerospike/as_rec.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_result.o -c src/main/aerospike/as_result.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_serializer.o -c src/main/aerospike/as_serializer.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_stream.o -c src/main/aerospike/as_stream.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_string.o -c src/main/aerospike/as_string.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_string_builder.o -c src/main/aerospike/as_string_builder.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_thread_pool.o -c src/main/aerospike/as_thread_pool.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_timer.o -c src/main/aerospike/as_timer.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_val.o -c src/main/aerospike/as_val.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/as_vector.o -c src/main/aerospike/as_vector.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/crypt_blowfish.o -c src/main/aerospike/crypt_blowfish.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/aerospike/ssl_util.o -c src/main/aerospike/ssl_util.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/citrusleaf/cf_alloc.o -c src/main/citrusleaf/cf_alloc.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/citrusleaf/cf_b64.o -c src/main/citrusleaf/cf_b64.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/citrusleaf/cf_clock.o -c src/main/citrusleaf/cf_clock.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/citrusleaf/cf_crypto.o -c src/main/citrusleaf/cf_crypto.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/citrusleaf/cf_digest.o -c src/main/citrusleaf/cf_digest.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/citrusleaf/cf_ll.o -c src/main/citrusleaf/cf_ll.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/citrusleaf/cf_queue.o -c src/main/citrusleaf/cf_queue.c +cc -Isrc/include -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -march=nocona -finline-functions -rdynamic -o target/Linux-x86_64/obj/common/citrusleaf/cf_random.o -c src/main/citrusleaf/cf_random.c +ar rcs target/Linux-x86_64/lib/libaerospike-common.a target/Linux-x86_64/obj/common/aerospike/as_aerospike.o target/Linux-x86_64/obj/common/aerospike/as_arraylist.o target/Linux-x86_64/obj/common/aerospike/as_arraylist_hooks.o target/Linux-x86_64/obj/common/aerospike/as_arraylist_iterator.o target/Linux-x86_64/obj/common/aerospike/as_arraylist_iterator_hooks.o target/Linux-x86_64/obj/common/aerospike/as_boolean.o target/Linux-x86_64/obj/common/aerospike/as_buffer.o target/Linux-x86_64/obj/common/aerospike/as_buffer_pool.o target/Linux-x86_64/obj/common/aerospike/as_bytes.o target/Linux-x86_64/obj/common/aerospike/as_double.o target/Linux-x86_64/obj/common/aerospike/as_geojson.o target/Linux-x86_64/obj/common/aerospike/as_integer.o target/Linux-x86_64/obj/common/aerospike/as_iterator.o target/Linux-x86_64/obj/common/aerospike/as_list.o target/Linux-x86_64/obj/common/aerospike/as_log.o target/Linux-x86_64/obj/common/aerospike/as_map.o target/Linux-x86_64/obj/common/aerospike/as_module.o target/Linux-x86_64/obj/common/aerospike/as_msgpack.o target/Linux-x86_64/obj/common/aerospike/as_msgpack_ext.o target/Linux-x86_64/obj/common/aerospike/as_msgpack_serializer.o target/Linux-x86_64/obj/common/aerospike/as_nil.o target/Linux-x86_64/obj/common/aerospike/as_orderedmap.o target/Linux-x86_64/obj/common/aerospike/as_pair.o target/Linux-x86_64/obj/common/aerospike/as_password.o target/Linux-x86_64/obj/common/aerospike/as_queue.o target/Linux-x86_64/obj/common/aerospike/as_queue_mt.o target/Linux-x86_64/obj/common/aerospike/as_random.o target/Linux-x86_64/obj/common/aerospike/as_rec.o target/Linux-x86_64/obj/common/aerospike/as_result.o target/Linux-x86_64/obj/common/aerospike/as_serializer.o target/Linux-x86_64/obj/common/aerospike/as_stream.o target/Linux-x86_64/obj/common/aerospike/as_string.o target/Linux-x86_64/obj/common/aerospike/as_string_builder.o target/Linux-x86_64/obj/common/aerospike/as_thread_pool.o target/Linux-x86_64/obj/common/aerospike/as_timer.o target/Linux-x86_64/obj/common/aerospike/as_val.o target/Linux-x86_64/obj/common/aerospike/as_vector.o target/Linux-x86_64/obj/common/aerospike/crypt_blowfish.o target/Linux-x86_64/obj/common/aerospike/ssl_util.o target/Linux-x86_64/obj/common/citrusleaf/cf_alloc.o target/Linux-x86_64/obj/common/citrusleaf/cf_b64.o target/Linux-x86_64/obj/common/citrusleaf/cf_clock.o target/Linux-x86_64/obj/common/citrusleaf/cf_crypto.o target/Linux-x86_64/obj/common/citrusleaf/cf_digest.o target/Linux-x86_64/obj/common/citrusleaf/cf_ll.o target/Linux-x86_64/obj/common/citrusleaf/cf_queue.o target/Linux-x86_64/obj/common/citrusleaf/cf_random.o +make[1]: Leaving directory '/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common' +make -e -C /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua COMMON=/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common LUAMOD=/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua EXT_CFLAGS=-DAS_MOD_LUA_CLIENT +make[1]: Entering directory '/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua' +make -C /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua CFLAGS="-Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g" a +make[2]: Entering directory '/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua' +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o lapi.o lapi.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o lcode.o lcode.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o lctype.o lctype.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o ldebug.o ldebug.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o ldo.o ldo.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o ldump.o ldump.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o lfunc.o lfunc.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o lgc.o lgc.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o llex.o llex.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o lmem.o lmem.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o lobject.o lobject.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o lopcodes.o lopcodes.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o lparser.o lparser.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o lstate.o lstate.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o lstring.o lstring.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o ltable.o ltable.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o ltm.o ltm.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o lundump.o lundump.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o lvm.o lvm.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o lzio.o lzio.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o ltests.o ltests.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o lauxlib.o lauxlib.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o lbaselib.o lbaselib.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o ldblib.o ldblib.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o liolib.o liolib.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o lmathlib.o lmathlib.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o loslib.o loslib.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o ltablib.o ltablib.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o lstrlib.o lstrlib.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o lutf8lib.o lutf8lib.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o loadlib.o loadlib.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o lcorolib.o lcorolib.c +gcc -Wall -O2 -std=c99 -DLUA_USE_LINUX -fPIC -fno-stack-protector -fno-common -march=nocona -g -I/usr/local/opt/openssl/include:-I/usr/local/opt/zlib/include -c -o linit.o linit.c +ar rc liblua.a lapi.o lcode.o lctype.o ldebug.o ldo.o ldump.o lfunc.o lgc.o llex.o lmem.o lobject.o lopcodes.o lparser.o lstate.o lstring.o ltable.o ltm.o lundump.o lvm.o lzio.o ltests.o lauxlib.o lbaselib.o ldblib.o liolib.o lmathlib.o loslib.o ltablib.o lstrlib.o lutf8lib.o loadlib.o lcorolib.o linit.o +ranlib liblua.a +make[2]: Leaving directory '/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua' +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -DAS_MOD_LUA_CLIENT -DAS_MOD_LUA_CLIENT -finline-functions -rdynamic -o target/Linux-x86_64/obj/mod_lua.o -c src/main/mod_lua.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -DAS_MOD_LUA_CLIENT -DAS_MOD_LUA_CLIENT -finline-functions -rdynamic -o target/Linux-x86_64/obj/mod_lua_aerospike.o -c src/main/mod_lua_aerospike.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -DAS_MOD_LUA_CLIENT -DAS_MOD_LUA_CLIENT -finline-functions -rdynamic -o target/Linux-x86_64/obj/mod_lua_bytes.o -c src/main/mod_lua_bytes.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -DAS_MOD_LUA_CLIENT -DAS_MOD_LUA_CLIENT -finline-functions -rdynamic -o target/Linux-x86_64/obj/mod_lua_geojson.o -c src/main/mod_lua_geojson.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -DAS_MOD_LUA_CLIENT -DAS_MOD_LUA_CLIENT -finline-functions -rdynamic -o target/Linux-x86_64/obj/mod_lua_iterator.o -c src/main/mod_lua_iterator.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -DAS_MOD_LUA_CLIENT -DAS_MOD_LUA_CLIENT -finline-functions -rdynamic -o target/Linux-x86_64/obj/mod_lua_list.o -c src/main/mod_lua_list.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -DAS_MOD_LUA_CLIENT -DAS_MOD_LUA_CLIENT -finline-functions -rdynamic -o target/Linux-x86_64/obj/mod_lua_map.o -c src/main/mod_lua_map.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -DAS_MOD_LUA_CLIENT -DAS_MOD_LUA_CLIENT -finline-functions -rdynamic -o target/Linux-x86_64/obj/mod_lua_record.o -c src/main/mod_lua_record.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -DAS_MOD_LUA_CLIENT -DAS_MOD_LUA_CLIENT -finline-functions -rdynamic -o target/Linux-x86_64/obj/mod_lua_reg.o -c src/main/mod_lua_reg.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -DAS_MOD_LUA_CLIENT -DAS_MOD_LUA_CLIENT -finline-functions -rdynamic -o target/Linux-x86_64/obj/mod_lua_stream.o -c src/main/mod_lua_stream.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -DAS_MOD_LUA_CLIENT -DAS_MOD_LUA_CLIENT -finline-functions -rdynamic -o target/Linux-x86_64/obj/mod_lua_system.o -c src/main/mod_lua_system.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -DAS_MOD_LUA_CLIENT -DAS_MOD_LUA_CLIENT -finline-functions -rdynamic -o target/Linux-x86_64/obj/mod_lua_val.o -c src/main/mod_lua_val.c +make -e -C /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common prepare +make[2]: Entering directory '/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common' +cp src/include/aerospike/as_map_iterator.h target/Linux-x86_64/include/aerospike/as_map_iterator.h +cp src/include/aerospike/as_double.h target/Linux-x86_64/include/aerospike/as_double.h +cp src/include/aerospike/as_buffer_pool.h target/Linux-x86_64/include/aerospike/as_buffer_pool.h +cp src/include/aerospike/as_integer.h target/Linux-x86_64/include/aerospike/as_integer.h +cp src/include/aerospike/as_types.h target/Linux-x86_64/include/aerospike/as_types.h +cp src/include/aerospike/as_module.h target/Linux-x86_64/include/aerospike/as_module.h +cp src/include/aerospike/as_log_macros.h target/Linux-x86_64/include/aerospike/as_log_macros.h +cp src/include/aerospike/as_arch.h target/Linux-x86_64/include/aerospike/as_arch.h +cp src/include/aerospike/as_queue.h target/Linux-x86_64/include/aerospike/as_queue.h +cp src/include/aerospike/as_hashmap_iterator.h target/Linux-x86_64/include/aerospike/as_hashmap_iterator.h +cp src/include/aerospike/as_orderedmap.h target/Linux-x86_64/include/aerospike/as_orderedmap.h +cp src/include/aerospike/as_sleep.h target/Linux-x86_64/include/aerospike/as_sleep.h +cp src/include/aerospike/as_dir.h target/Linux-x86_64/include/aerospike/as_dir.h +cp src/include/aerospike/as_nil.h target/Linux-x86_64/include/aerospike/as_nil.h +cp src/include/aerospike/as_std.h target/Linux-x86_64/include/aerospike/as_std.h +cp src/include/aerospike/as_queue_mt.h target/Linux-x86_64/include/aerospike/as_queue_mt.h +cp src/include/aerospike/as_vector.h target/Linux-x86_64/include/aerospike/as_vector.h +cp src/include/aerospike/as_thread_pool.h target/Linux-x86_64/include/aerospike/as_thread_pool.h +cp src/include/aerospike/as_result.h target/Linux-x86_64/include/aerospike/as_result.h +cp src/include/aerospike/as_monitor.h target/Linux-x86_64/include/aerospike/as_monitor.h +cp src/include/aerospike/as_atomic_gcc.h target/Linux-x86_64/include/aerospike/as_atomic_gcc.h +cp src/include/aerospike/as_stream.h target/Linux-x86_64/include/aerospike/as_stream.h +cp src/include/aerospike/as_pair.h target/Linux-x86_64/include/aerospike/as_pair.h +cp src/include/aerospike/as_bytes.h target/Linux-x86_64/include/aerospike/as_bytes.h +cp src/include/aerospike/as_timer.h target/Linux-x86_64/include/aerospike/as_timer.h +cp src/include/aerospike/as_atomic_win.h target/Linux-x86_64/include/aerospike/as_atomic_win.h +cp src/include/aerospike/as_random.h target/Linux-x86_64/include/aerospike/as_random.h +cp src/include/aerospike/as_buffer.h target/Linux-x86_64/include/aerospike/as_buffer.h +cp src/include/aerospike/ssl_util.h target/Linux-x86_64/include/aerospike/ssl_util.h +cp src/include/aerospike/as_string_builder.h target/Linux-x86_64/include/aerospike/as_string_builder.h +cp src/include/aerospike/as_stringmap.h target/Linux-x86_64/include/aerospike/as_stringmap.h +cp src/include/aerospike/as_list.h target/Linux-x86_64/include/aerospike/as_list.h +cp src/include/aerospike/as_rec.h target/Linux-x86_64/include/aerospike/as_rec.h +cp src/include/aerospike/as_aerospike.h target/Linux-x86_64/include/aerospike/as_aerospike.h +cp src/include/aerospike/as_password.h target/Linux-x86_64/include/aerospike/as_password.h +cp src/include/aerospike/as_map.h target/Linux-x86_64/include/aerospike/as_map.h +cp src/include/aerospike/as_thread.h target/Linux-x86_64/include/aerospike/as_thread.h +cp src/include/aerospike/as_val.h target/Linux-x86_64/include/aerospike/as_val.h +cp src/include/aerospike/as_msgpack_serializer.h target/Linux-x86_64/include/aerospike/as_msgpack_serializer.h +cp src/include/aerospike/as_hashmap.h target/Linux-x86_64/include/aerospike/as_hashmap.h +cp src/include/aerospike/as_list_iterator.h target/Linux-x86_64/include/aerospike/as_list_iterator.h +cp src/include/aerospike/as_util.h target/Linux-x86_64/include/aerospike/as_util.h +cp src/include/aerospike/as_boolean.h target/Linux-x86_64/include/aerospike/as_boolean.h +cp src/include/aerospike/as_msgpack.h target/Linux-x86_64/include/aerospike/as_msgpack.h +cp src/include/aerospike/as_arraylist_iterator.h target/Linux-x86_64/include/aerospike/as_arraylist_iterator.h +cp src/include/aerospike/as_arraylist.h target/Linux-x86_64/include/aerospike/as_arraylist.h +cp src/include/aerospike/as_udf_context.h target/Linux-x86_64/include/aerospike/as_udf_context.h +cp src/include/aerospike/as_atomic.h target/Linux-x86_64/include/aerospike/as_atomic.h +cp src/include/aerospike/as_msgpack_ext.h target/Linux-x86_64/include/aerospike/as_msgpack_ext.h +cp src/include/aerospike/as_geojson.h target/Linux-x86_64/include/aerospike/as_geojson.h +cp src/include/aerospike/as_string.h target/Linux-x86_64/include/aerospike/as_string.h +cp src/include/aerospike/as_log.h target/Linux-x86_64/include/aerospike/as_log.h +cp src/include/aerospike/as_serializer.h target/Linux-x86_64/include/aerospike/as_serializer.h +cp src/include/aerospike/as_iterator.h target/Linux-x86_64/include/aerospike/as_iterator.h +cp src/include/citrusleaf/cf_ll.h target/Linux-x86_64/include/citrusleaf/cf_ll.h +cp src/include/citrusleaf/alloc.h target/Linux-x86_64/include/citrusleaf/alloc.h +cp src/include/citrusleaf/cf_b64.h target/Linux-x86_64/include/citrusleaf/cf_b64.h +cp src/include/citrusleaf/cf_random.h target/Linux-x86_64/include/citrusleaf/cf_random.h +cp src/include/citrusleaf/cf_crypto.h target/Linux-x86_64/include/citrusleaf/cf_crypto.h +cp src/include/citrusleaf/cf_hash_math.h target/Linux-x86_64/include/citrusleaf/cf_hash_math.h +cp src/include/citrusleaf/cf_queue.h target/Linux-x86_64/include/citrusleaf/cf_queue.h +cp src/include/citrusleaf/cf_clock.h target/Linux-x86_64/include/citrusleaf/cf_clock.h +cp src/include/citrusleaf/cf_digest.h target/Linux-x86_64/include/citrusleaf/cf_digest.h +cp src/include/citrusleaf/cf_byte_order.h target/Linux-x86_64/include/citrusleaf/cf_byte_order.h +make[2]: Leaving directory '/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common' +ar rcs target/Linux-x86_64/lib/libmod_lua.a target/Linux-x86_64/obj/mod_lua.o target/Linux-x86_64/obj/mod_lua_aerospike.o target/Linux-x86_64/obj/mod_lua_bytes.o target/Linux-x86_64/obj/mod_lua_geojson.o target/Linux-x86_64/obj/mod_lua_iterator.o target/Linux-x86_64/obj/mod_lua_list.o target/Linux-x86_64/obj/mod_lua_map.o target/Linux-x86_64/obj/mod_lua_record.o target/Linux-x86_64/obj/mod_lua_reg.o target/Linux-x86_64/obj/mod_lua_stream.o target/Linux-x86_64/obj/mod_lua_system.o target/Linux-x86_64/obj/mod_lua_val.o +cc -shared -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -o target/Linux-x86_64/lib/libmod_lua.so target/Linux-x86_64/obj/mod_lua.o target/Linux-x86_64/obj/mod_lua_aerospike.o target/Linux-x86_64/obj/mod_lua_bytes.o target/Linux-x86_64/obj/mod_lua_geojson.o target/Linux-x86_64/obj/mod_lua_iterator.o target/Linux-x86_64/obj/mod_lua_list.o target/Linux-x86_64/obj/mod_lua_map.o target/Linux-x86_64/obj/mod_lua_record.o target/Linux-x86_64/obj/mod_lua_reg.o target/Linux-x86_64/obj/mod_lua_stream.o target/Linux-x86_64/obj/mod_lua_system.o target/Linux-x86_64/obj/mod_lua_val.o +cp -p src/include/aerospike/mod_lua_aerospike.h target/Linux-x86_64/include/aerospike/mod_lua_aerospike.h +cp -p src/include/aerospike/mod_lua_bytes.h target/Linux-x86_64/include/aerospike/mod_lua_bytes.h +cp -p src/include/aerospike/mod_lua_config.h target/Linux-x86_64/include/aerospike/mod_lua_config.h +cp -p src/include/aerospike/mod_lua_geojson.h target/Linux-x86_64/include/aerospike/mod_lua_geojson.h +cp -p src/include/aerospike/mod_lua.h target/Linux-x86_64/include/aerospike/mod_lua.h +cp -p src/include/aerospike/mod_lua_iterator.h target/Linux-x86_64/include/aerospike/mod_lua_iterator.h +cp -p src/include/aerospike/mod_lua_list.h target/Linux-x86_64/include/aerospike/mod_lua_list.h +cp -p src/include/aerospike/mod_lua_map.h target/Linux-x86_64/include/aerospike/mod_lua_map.h +cp -p src/include/aerospike/mod_lua_record.h target/Linux-x86_64/include/aerospike/mod_lua_record.h +cp -p src/include/aerospike/mod_lua_reg.h target/Linux-x86_64/include/aerospike/mod_lua_reg.h +cp -p src/include/aerospike/mod_lua_stream.h target/Linux-x86_64/include/aerospike/mod_lua_stream.h +cp -p src/include/aerospike/mod_lua_val.h target/Linux-x86_64/include/aerospike/mod_lua_val.h +make[1]: Leaving directory '/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua' +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_aerospike.h target/Linux-x86_64/include/aerospike/as_aerospike.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_arch.h target/Linux-x86_64/include/aerospike/as_arch.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_arraylist.h target/Linux-x86_64/include/aerospike/as_arraylist.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_arraylist_iterator.h target/Linux-x86_64/include/aerospike/as_arraylist_iterator.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_atomic_gcc.h target/Linux-x86_64/include/aerospike/as_atomic_gcc.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_atomic.h target/Linux-x86_64/include/aerospike/as_atomic.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_atomic_win.h target/Linux-x86_64/include/aerospike/as_atomic_win.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_boolean.h target/Linux-x86_64/include/aerospike/as_boolean.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_buffer.h target/Linux-x86_64/include/aerospike/as_buffer.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_buffer_pool.h target/Linux-x86_64/include/aerospike/as_buffer_pool.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_bytes.h target/Linux-x86_64/include/aerospike/as_bytes.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_dir.h target/Linux-x86_64/include/aerospike/as_dir.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_double.h target/Linux-x86_64/include/aerospike/as_double.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_geojson.h target/Linux-x86_64/include/aerospike/as_geojson.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_hashmap.h target/Linux-x86_64/include/aerospike/as_hashmap.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_hashmap_iterator.h target/Linux-x86_64/include/aerospike/as_hashmap_iterator.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_integer.h target/Linux-x86_64/include/aerospike/as_integer.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_iterator.h target/Linux-x86_64/include/aerospike/as_iterator.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_list.h target/Linux-x86_64/include/aerospike/as_list.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_list_iterator.h target/Linux-x86_64/include/aerospike/as_list_iterator.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_log.h target/Linux-x86_64/include/aerospike/as_log.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_log_macros.h target/Linux-x86_64/include/aerospike/as_log_macros.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_map.h target/Linux-x86_64/include/aerospike/as_map.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_map_iterator.h target/Linux-x86_64/include/aerospike/as_map_iterator.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_module.h target/Linux-x86_64/include/aerospike/as_module.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_monitor.h target/Linux-x86_64/include/aerospike/as_monitor.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_msgpack_ext.h target/Linux-x86_64/include/aerospike/as_msgpack_ext.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_msgpack.h target/Linux-x86_64/include/aerospike/as_msgpack.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_msgpack_serializer.h target/Linux-x86_64/include/aerospike/as_msgpack_serializer.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_nil.h target/Linux-x86_64/include/aerospike/as_nil.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_orderedmap.h target/Linux-x86_64/include/aerospike/as_orderedmap.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_pair.h target/Linux-x86_64/include/aerospike/as_pair.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_password.h target/Linux-x86_64/include/aerospike/as_password.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_queue.h target/Linux-x86_64/include/aerospike/as_queue.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_queue_mt.h target/Linux-x86_64/include/aerospike/as_queue_mt.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_random.h target/Linux-x86_64/include/aerospike/as_random.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_rec.h target/Linux-x86_64/include/aerospike/as_rec.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_result.h target/Linux-x86_64/include/aerospike/as_result.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_serializer.h target/Linux-x86_64/include/aerospike/as_serializer.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_sleep.h target/Linux-x86_64/include/aerospike/as_sleep.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_std.h target/Linux-x86_64/include/aerospike/as_std.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_stream.h target/Linux-x86_64/include/aerospike/as_stream.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_string_builder.h target/Linux-x86_64/include/aerospike/as_string_builder.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_string.h target/Linux-x86_64/include/aerospike/as_string.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_stringmap.h target/Linux-x86_64/include/aerospike/as_stringmap.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_thread.h target/Linux-x86_64/include/aerospike/as_thread.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_thread_pool.h target/Linux-x86_64/include/aerospike/as_thread_pool.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_timer.h target/Linux-x86_64/include/aerospike/as_timer.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_types.h target/Linux-x86_64/include/aerospike/as_types.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_udf_context.h target/Linux-x86_64/include/aerospike/as_udf_context.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_util.h target/Linux-x86_64/include/aerospike/as_util.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_val.h target/Linux-x86_64/include/aerospike/as_val.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/as_vector.h target/Linux-x86_64/include/aerospike/as_vector.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/aerospike/ssl_util.h target/Linux-x86_64/include/aerospike/ssl_util.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/citrusleaf/alloc.h target/Linux-x86_64/include/citrusleaf/alloc.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/citrusleaf/cf_b64.h target/Linux-x86_64/include/citrusleaf/cf_b64.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/citrusleaf/cf_byte_order.h target/Linux-x86_64/include/citrusleaf/cf_byte_order.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/citrusleaf/cf_clock.h target/Linux-x86_64/include/citrusleaf/cf_clock.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/citrusleaf/cf_ll.h target/Linux-x86_64/include/citrusleaf/cf_ll.h +cp /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include/citrusleaf/cf_queue.h target/Linux-x86_64/include/citrusleaf/cf_queue.h +make -e -C /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common prepare +make[1]: Entering directory '/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common' +make[1]: Nothing to be done for 'prepare'. +make[1]: Leaving directory '/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common' +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/_bin.o -c src/main/aerospike/_bin.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/aerospike.o -c src/main/aerospike/aerospike.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/aerospike_batch.o -c src/main/aerospike/aerospike_batch.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/aerospike_index.o -c src/main/aerospike/aerospike_index.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/aerospike_info.o -c src/main/aerospike/aerospike_info.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/aerospike_key.o -c src/main/aerospike/aerospike_key.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/aerospike_query.o -c src/main/aerospike/aerospike_query.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/aerospike_scan.o -c src/main/aerospike/aerospike_scan.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/aerospike_stats.o -c src/main/aerospike/aerospike_stats.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/aerospike_udf.o -c src/main/aerospike/aerospike_udf.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_address.o -c src/main/aerospike/as_address.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_admin.o -c src/main/aerospike/as_admin.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_async.o -c src/main/aerospike/as_async.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_batch.o -c src/main/aerospike/as_batch.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_bit_operations.o -c src/main/aerospike/as_bit_operations.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_cdt_ctx.o -c src/main/aerospike/as_cdt_ctx.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_cdt_internal.o -c src/main/aerospike/as_cdt_internal.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_command.o -c src/main/aerospike/as_command.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_config.o -c src/main/aerospike/as_config.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_cluster.o -c src/main/aerospike/as_cluster.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_error.o -c src/main/aerospike/as_error.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_event.o -c src/main/aerospike/as_event.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_event_ev.o -c src/main/aerospike/as_event_ev.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_event_uv.o -c src/main/aerospike/as_event_uv.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_event_event.o -c src/main/aerospike/as_event_event.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_event_none.o -c src/main/aerospike/as_event_none.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_exp_operations.o -c src/main/aerospike/as_exp_operations.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_exp.o -c src/main/aerospike/as_exp.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_hll_operations.o -c src/main/aerospike/as_hll_operations.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_host.o -c src/main/aerospike/as_host.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_info.o -c src/main/aerospike/as_info.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_job.o -c src/main/aerospike/as_job.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_key.o -c src/main/aerospike/as_key.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_latency.o -c src/main/aerospike/as_latency.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_list_operations.o -c src/main/aerospike/as_list_operations.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_lookup.o -c src/main/aerospike/as_lookup.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_map_operations.o -c src/main/aerospike/as_map_operations.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_metrics.o -c src/main/aerospike/as_metrics.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_metrics_writer.o -c src/main/aerospike/as_metrics_writer.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_node.o -c src/main/aerospike/as_node.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_operations.o -c src/main/aerospike/as_operations.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_partition.o -c src/main/aerospike/as_partition.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_partition_tracker.o -c src/main/aerospike/as_partition_tracker.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_peers.o -c src/main/aerospike/as_peers.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_pipe.o -c src/main/aerospike/as_pipe.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_policy.o -c src/main/aerospike/as_policy.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_proto.o -c src/main/aerospike/as_proto.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_query.o -c src/main/aerospike/as_query.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_query_validate.o -c src/main/aerospike/as_query_validate.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_record.o -c src/main/aerospike/as_record.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_record_hooks.o -c src/main/aerospike/as_record_hooks.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_record_iterator.o -c src/main/aerospike/as_record_iterator.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_scan.o -c src/main/aerospike/as_scan.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_shm_cluster.o -c src/main/aerospike/as_shm_cluster.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_socket.o -c src/main/aerospike/as_socket.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_tls.o -c src/main/aerospike/as_tls.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/as_udf.o -c src/main/aerospike/as_udf.c +cc -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -std=gnu99 -g -Wall -fPIC -O3 -fno-common -fno-strict-aliasing -D_FILE_OFFSET_BITS=64 -D_REENTRANT -D_GNU_SOURCE -I/home/dpelini/Documents/type-overhaul/libuv-v1.45.0/include -DAS_USE_LIBUV -finline-functions -rdynamic -o target/Linux-x86_64/obj/aerospike/version.o -c src/main/aerospike/version.c +ar rcs target/Linux-x86_64/lib/libaerospike.a target/Linux-x86_64/obj/aerospike/_bin.o target/Linux-x86_64/obj/aerospike/aerospike.o target/Linux-x86_64/obj/aerospike/aerospike_batch.o target/Linux-x86_64/obj/aerospike/aerospike_index.o target/Linux-x86_64/obj/aerospike/aerospike_info.o target/Linux-x86_64/obj/aerospike/aerospike_key.o target/Linux-x86_64/obj/aerospike/aerospike_query.o target/Linux-x86_64/obj/aerospike/aerospike_scan.o target/Linux-x86_64/obj/aerospike/aerospike_stats.o target/Linux-x86_64/obj/aerospike/aerospike_udf.o target/Linux-x86_64/obj/aerospike/as_address.o target/Linux-x86_64/obj/aerospike/as_admin.o target/Linux-x86_64/obj/aerospike/as_async.o target/Linux-x86_64/obj/aerospike/as_batch.o target/Linux-x86_64/obj/aerospike/as_bit_operations.o target/Linux-x86_64/obj/aerospike/as_cdt_ctx.o target/Linux-x86_64/obj/aerospike/as_cdt_internal.o target/Linux-x86_64/obj/aerospike/as_command.o target/Linux-x86_64/obj/aerospike/as_config.o target/Linux-x86_64/obj/aerospike/as_cluster.o target/Linux-x86_64/obj/aerospike/as_error.o target/Linux-x86_64/obj/aerospike/as_event.o target/Linux-x86_64/obj/aerospike/as_event_ev.o target/Linux-x86_64/obj/aerospike/as_event_uv.o target/Linux-x86_64/obj/aerospike/as_event_event.o target/Linux-x86_64/obj/aerospike/as_event_none.o target/Linux-x86_64/obj/aerospike/as_exp_operations.o target/Linux-x86_64/obj/aerospike/as_exp.o target/Linux-x86_64/obj/aerospike/as_hll_operations.o target/Linux-x86_64/obj/aerospike/as_host.o target/Linux-x86_64/obj/aerospike/as_info.o target/Linux-x86_64/obj/aerospike/as_job.o target/Linux-x86_64/obj/aerospike/as_key.o target/Linux-x86_64/obj/aerospike/as_latency.o target/Linux-x86_64/obj/aerospike/as_list_operations.o target/Linux-x86_64/obj/aerospike/as_lookup.o target/Linux-x86_64/obj/aerospike/as_map_operations.o target/Linux-x86_64/obj/aerospike/as_metrics.o target/Linux-x86_64/obj/aerospike/as_metrics_writer.o target/Linux-x86_64/obj/aerospike/as_node.o target/Linux-x86_64/obj/aerospike/as_operations.o target/Linux-x86_64/obj/aerospike/as_partition.o target/Linux-x86_64/obj/aerospike/as_partition_tracker.o target/Linux-x86_64/obj/aerospike/as_peers.o target/Linux-x86_64/obj/aerospike/as_pipe.o target/Linux-x86_64/obj/aerospike/as_policy.o target/Linux-x86_64/obj/aerospike/as_proto.o target/Linux-x86_64/obj/aerospike/as_query.o target/Linux-x86_64/obj/aerospike/as_query_validate.o target/Linux-x86_64/obj/aerospike/as_record.o target/Linux-x86_64/obj/aerospike/as_record_hooks.o target/Linux-x86_64/obj/aerospike/as_record_iterator.o target/Linux-x86_64/obj/aerospike/as_scan.o target/Linux-x86_64/obj/aerospike/as_shm_cluster.o target/Linux-x86_64/obj/aerospike/as_socket.o target/Linux-x86_64/obj/aerospike/as_tls.o target/Linux-x86_64/obj/aerospike/as_udf.o target/Linux-x86_64/obj/aerospike/version.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/target/Linux-x86_64/obj/common/aerospike/*.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/target/Linux-x86_64/obj/common/citrusleaf/*.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/target/Linux-x86_64/obj/*.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lapi.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lauxlib.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lbaselib.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lcode.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lcorolib.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lctype.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/ldblib.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/ldebug.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/ldo.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/ldump.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lfunc.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lgc.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/linit.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/liolib.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/llex.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lmathlib.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lmem.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/loadlib.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lobject.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lopcodes.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/loslib.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lparser.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lstate.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lstring.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lstrlib.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/ltable.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/ltablib.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/ltests.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/ltm.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lundump.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lutf8lib.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lvm.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lzio.o +cc -shared -Isrc/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/src/include -I/home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua -o target/Linux-x86_64/lib/libaerospike.so target/Linux-x86_64/obj/aerospike/_bin.o target/Linux-x86_64/obj/aerospike/aerospike.o target/Linux-x86_64/obj/aerospike/aerospike_batch.o target/Linux-x86_64/obj/aerospike/aerospike_index.o target/Linux-x86_64/obj/aerospike/aerospike_info.o target/Linux-x86_64/obj/aerospike/aerospike_key.o target/Linux-x86_64/obj/aerospike/aerospike_query.o target/Linux-x86_64/obj/aerospike/aerospike_scan.o target/Linux-x86_64/obj/aerospike/aerospike_stats.o target/Linux-x86_64/obj/aerospike/aerospike_udf.o target/Linux-x86_64/obj/aerospike/as_address.o target/Linux-x86_64/obj/aerospike/as_admin.o target/Linux-x86_64/obj/aerospike/as_async.o target/Linux-x86_64/obj/aerospike/as_batch.o target/Linux-x86_64/obj/aerospike/as_bit_operations.o target/Linux-x86_64/obj/aerospike/as_cdt_ctx.o target/Linux-x86_64/obj/aerospike/as_cdt_internal.o target/Linux-x86_64/obj/aerospike/as_command.o target/Linux-x86_64/obj/aerospike/as_config.o target/Linux-x86_64/obj/aerospike/as_cluster.o target/Linux-x86_64/obj/aerospike/as_error.o target/Linux-x86_64/obj/aerospike/as_event.o target/Linux-x86_64/obj/aerospike/as_event_ev.o target/Linux-x86_64/obj/aerospike/as_event_uv.o target/Linux-x86_64/obj/aerospike/as_event_event.o target/Linux-x86_64/obj/aerospike/as_event_none.o target/Linux-x86_64/obj/aerospike/as_exp_operations.o target/Linux-x86_64/obj/aerospike/as_exp.o target/Linux-x86_64/obj/aerospike/as_hll_operations.o target/Linux-x86_64/obj/aerospike/as_host.o target/Linux-x86_64/obj/aerospike/as_info.o target/Linux-x86_64/obj/aerospike/as_job.o target/Linux-x86_64/obj/aerospike/as_key.o target/Linux-x86_64/obj/aerospike/as_latency.o target/Linux-x86_64/obj/aerospike/as_list_operations.o target/Linux-x86_64/obj/aerospike/as_lookup.o target/Linux-x86_64/obj/aerospike/as_map_operations.o target/Linux-x86_64/obj/aerospike/as_metrics.o target/Linux-x86_64/obj/aerospike/as_metrics_writer.o target/Linux-x86_64/obj/aerospike/as_node.o target/Linux-x86_64/obj/aerospike/as_operations.o target/Linux-x86_64/obj/aerospike/as_partition.o target/Linux-x86_64/obj/aerospike/as_partition_tracker.o target/Linux-x86_64/obj/aerospike/as_peers.o target/Linux-x86_64/obj/aerospike/as_pipe.o target/Linux-x86_64/obj/aerospike/as_policy.o target/Linux-x86_64/obj/aerospike/as_proto.o target/Linux-x86_64/obj/aerospike/as_query.o target/Linux-x86_64/obj/aerospike/as_query_validate.o target/Linux-x86_64/obj/aerospike/as_record.o target/Linux-x86_64/obj/aerospike/as_record_hooks.o target/Linux-x86_64/obj/aerospike/as_record_iterator.o target/Linux-x86_64/obj/aerospike/as_scan.o target/Linux-x86_64/obj/aerospike/as_shm_cluster.o target/Linux-x86_64/obj/aerospike/as_socket.o target/Linux-x86_64/obj/aerospike/as_tls.o target/Linux-x86_64/obj/aerospike/as_udf.o target/Linux-x86_64/obj/aerospike/version.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/target/Linux-x86_64/obj/common/aerospike/*.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/common/target/Linux-x86_64/obj/common/citrusleaf/*.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/mod-lua/target/Linux-x86_64/obj/*.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lapi.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lauxlib.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lbaselib.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lcode.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lcorolib.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lctype.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/ldblib.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/ldebug.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/ldo.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/ldump.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lfunc.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lgc.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/linit.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/liolib.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/llex.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lmathlib.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lmem.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/loadlib.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lobject.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lopcodes.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/loslib.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lparser.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lstate.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lstring.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lstrlib.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/ltable.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/ltablib.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/ltests.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/ltm.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lundump.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lutf8lib.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lvm.o /home/dpelini/Documents/type-overhaul/aerospike-client-c/modules/lua/lzio.o +cp -p src/include/aerospike/aerospike_batch.h target/Linux-x86_64/include/aerospike/aerospike_batch.h +cp -p src/include/aerospike/aerospike.h target/Linux-x86_64/include/aerospike/aerospike.h +cp -p src/include/aerospike/aerospike_index.h target/Linux-x86_64/include/aerospike/aerospike_index.h +cp -p src/include/aerospike/aerospike_info.h target/Linux-x86_64/include/aerospike/aerospike_info.h +cp -p src/include/aerospike/aerospike_key.h target/Linux-x86_64/include/aerospike/aerospike_key.h +cp -p src/include/aerospike/aerospike_query.h target/Linux-x86_64/include/aerospike/aerospike_query.h +cp -p src/include/aerospike/aerospike_scan.h target/Linux-x86_64/include/aerospike/aerospike_scan.h +cp -p src/include/aerospike/aerospike_stats.h target/Linux-x86_64/include/aerospike/aerospike_stats.h +cp -p src/include/aerospike/aerospike_udf.h target/Linux-x86_64/include/aerospike/aerospike_udf.h +cp -p src/include/aerospike/as_address.h target/Linux-x86_64/include/aerospike/as_address.h +cp -p src/include/aerospike/as_admin.h target/Linux-x86_64/include/aerospike/as_admin.h +cp -p src/include/aerospike/as_async.h target/Linux-x86_64/include/aerospike/as_async.h +cp -p src/include/aerospike/as_async_proto.h target/Linux-x86_64/include/aerospike/as_async_proto.h +cp -p src/include/aerospike/as_batch.h target/Linux-x86_64/include/aerospike/as_batch.h +cp -p src/include/aerospike/as_bin.h target/Linux-x86_64/include/aerospike/as_bin.h +cp -p src/include/aerospike/as_bit_operations.h target/Linux-x86_64/include/aerospike/as_bit_operations.h +cp -p src/include/aerospike/as_cdt_ctx.h target/Linux-x86_64/include/aerospike/as_cdt_ctx.h +cp -p src/include/aerospike/as_cdt_internal.h target/Linux-x86_64/include/aerospike/as_cdt_internal.h +cp -p src/include/aerospike/as_cdt_order.h target/Linux-x86_64/include/aerospike/as_cdt_order.h +cp -p src/include/aerospike/as_cluster.h target/Linux-x86_64/include/aerospike/as_cluster.h +cp -p src/include/aerospike/as_command.h target/Linux-x86_64/include/aerospike/as_command.h +cp -p src/include/aerospike/as_config.h target/Linux-x86_64/include/aerospike/as_config.h +cp -p src/include/aerospike/as_conn_pool.h target/Linux-x86_64/include/aerospike/as_conn_pool.h +cp -p src/include/aerospike/as_cpu.h target/Linux-x86_64/include/aerospike/as_cpu.h +cp -p src/include/aerospike/as_error.h target/Linux-x86_64/include/aerospike/as_error.h +cp -p src/include/aerospike/as_event.h target/Linux-x86_64/include/aerospike/as_event.h +cp -p src/include/aerospike/as_event_internal.h target/Linux-x86_64/include/aerospike/as_event_internal.h +cp -p src/include/aerospike/as_exp.h target/Linux-x86_64/include/aerospike/as_exp.h +cp -p src/include/aerospike/as_exp_operations.h target/Linux-x86_64/include/aerospike/as_exp_operations.h +cp -p src/include/aerospike/as_hll_operations.h target/Linux-x86_64/include/aerospike/as_hll_operations.h +cp -p src/include/aerospike/as_host.h target/Linux-x86_64/include/aerospike/as_host.h +cp -p src/include/aerospike/as_info.h target/Linux-x86_64/include/aerospike/as_info.h +cp -p src/include/aerospike/as_job.h target/Linux-x86_64/include/aerospike/as_job.h +cp -p src/include/aerospike/as_key.h target/Linux-x86_64/include/aerospike/as_key.h +cp -p src/include/aerospike/as_latency.h target/Linux-x86_64/include/aerospike/as_latency.h +cp -p src/include/aerospike/as_listener.h target/Linux-x86_64/include/aerospike/as_listener.h +cp -p src/include/aerospike/as_list_operations.h target/Linux-x86_64/include/aerospike/as_list_operations.h +cp -p src/include/aerospike/as_lookup.h target/Linux-x86_64/include/aerospike/as_lookup.h +cp -p src/include/aerospike/as_map_operations.h target/Linux-x86_64/include/aerospike/as_map_operations.h +cp -p src/include/aerospike/as_metrics.h target/Linux-x86_64/include/aerospike/as_metrics.h +cp -p src/include/aerospike/as_metrics_writer.h target/Linux-x86_64/include/aerospike/as_metrics_writer.h +cp -p src/include/aerospike/as_node.h target/Linux-x86_64/include/aerospike/as_node.h +cp -p src/include/aerospike/as_operations.h target/Linux-x86_64/include/aerospike/as_operations.h +cp -p src/include/aerospike/as_partition_filter.h target/Linux-x86_64/include/aerospike/as_partition_filter.h +cp -p src/include/aerospike/as_partition.h target/Linux-x86_64/include/aerospike/as_partition.h +cp -p src/include/aerospike/as_partition_tracker.h target/Linux-x86_64/include/aerospike/as_partition_tracker.h +cp -p src/include/aerospike/as_peers.h target/Linux-x86_64/include/aerospike/as_peers.h +cp -p src/include/aerospike/as_pipe.h target/Linux-x86_64/include/aerospike/as_pipe.h +cp -p src/include/aerospike/as_policy.h target/Linux-x86_64/include/aerospike/as_policy.h +cp -p src/include/aerospike/as_poll.h target/Linux-x86_64/include/aerospike/as_poll.h +cp -p src/include/aerospike/as_proto.h target/Linux-x86_64/include/aerospike/as_proto.h +cp -p src/include/aerospike/as_query.h target/Linux-x86_64/include/aerospike/as_query.h +cp -p src/include/aerospike/as_query_validate.h target/Linux-x86_64/include/aerospike/as_query_validate.h +cp -p src/include/aerospike/as_record.h target/Linux-x86_64/include/aerospike/as_record.h +cp -p src/include/aerospike/as_record_iterator.h target/Linux-x86_64/include/aerospike/as_record_iterator.h +cp -p src/include/aerospike/as_scan.h target/Linux-x86_64/include/aerospike/as_scan.h +cp -p src/include/aerospike/as_shm_cluster.h target/Linux-x86_64/include/aerospike/as_shm_cluster.h +cp -p src/include/aerospike/as_socket.h target/Linux-x86_64/include/aerospike/as_socket.h +cp -p src/include/aerospike/as_status.h target/Linux-x86_64/include/aerospike/as_status.h +cp -p src/include/aerospike/as_tls.h target/Linux-x86_64/include/aerospike/as_tls.h +cp -p src/include/aerospike/as_udf.h target/Linux-x86_64/include/aerospike/as_udf.h +cp -p src/include/aerospike/version.h target/Linux-x86_64/include/aerospike/version.h diff --git a/src/main/enums/status.cc b/src/main/enums/status.cc index 6b19c2758..007e09850 100644 --- a/src/main/enums/status.cc +++ b/src/main/enums/status.cc @@ -98,6 +98,7 @@ Local status() set(obj, "AEROSPIKE_EXPIRED_PASSWORD", AEROSPIKE_EXPIRED_PASSWORD); set(obj, "AEROSPIKE_FORBIDDEN_PASSWORD", AEROSPIKE_FORBIDDEN_PASSWORD); set(obj, "AEROSPIKE_INVALID_CREDENTIAL", AEROSPIKE_INVALID_CREDENTIAL); + set(obj, "AEROSPIKE_EXPIRED_SESSION", AEROSPIKE_EXPIRED_SESSION); set(obj, "AEROSPIKE_INVALID_ROLE", AEROSPIKE_INVALID_ROLE); set(obj, "AEROSPIKE_ROLE_ALREADY_EXISTS", AEROSPIKE_ROLE_ALREADY_EXISTS); set(obj, "AEROSPIKE_INVALID_PRIVILEGE", AEROSPIKE_INVALID_PRIVILEGE); diff --git a/ts-test/package-lock.json b/ts-test/package-lock.json new file mode 100644 index 000000000..afae7e3cf --- /dev/null +++ b/ts-test/package-lock.json @@ -0,0 +1,1853 @@ +{ + "name": "ts-test", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "ts-test", + "version": "1.0.0", + "license": "ISC", + "dependencies": { + "aerospike": "file:.." + }, + "devDependencies": { + "@types/chai": "^4.3.19", + "@types/mocha": "^10.0.7", + "@types/semver": "^7.5.8", + "@types/tmp": "^0.2.6", + "@types/yargs": "^17.0.33", + "mocha": "^10.7.3", + "ts-node": "^10.9.2", + "typedoc": "^0.26.7", + "typescript": "^5.5.4" + } + }, + "..": { + "name": "aerospike", + "version": "5.12.1", + "cpu": [ + "x64", + "arm64" + ], + "hasInstallScript": true, + "license": "Apache-2.0", + "os": [ + "linux", + "darwin", + "win32" + ], + "dependencies": { + "ansi-colors": "^4.1.3", + "bindings": "^1.5.0", + "minimatch": "^3.1.2", + "nan": "^2.19.0", + "node-gyp": "^10.1.0", + "npm-run-all": "^4.1.5" + }, + "devDependencies": { + "@eslint/js": "^9.11.1", + "@mapbox/node-pre-gyp": "^1.0.11", + "@types/node": "^22.7.4", + "chai": "^4.4.1", + "choma": "^1.2.1", + "codecov": "^3.8.3", + "deep-eql": "^4.1.3", + "eslint": "^8.57.1", + "globals": "^15.9.0", + "husky": "^9.0.11", + "mocha": "^10.4.0", + "mocha-clean": "^1.0.0", + "nyc": "^15.1.0", + "p-throttle": "^3.1.0", + "semver": "^7.6.0", + "standard": "^17.1.0", + "tmp": "^0.2.3", + "typescript": "^5.6.2", + "typescript-eslint": "^8.7.0", + "yargs": "^17.7.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@cspotcode/source-map-support": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "0.3.9" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", + "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", + "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, + "node_modules/@shikijs/core": { + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@shikijs/core/-/core-1.21.0.tgz", + "integrity": "sha512-zAPMJdiGuqXpZQ+pWNezQAk5xhzRXBNiECFPcJLtUdsFM3f//G95Z15EHTnHchYycU8kIIysqGgxp8OVSj1SPQ==", + "dev": true, + "dependencies": { + "@shikijs/engine-javascript": "1.21.0", + "@shikijs/engine-oniguruma": "1.21.0", + "@shikijs/types": "1.21.0", + "@shikijs/vscode-textmate": "^9.2.2", + "@types/hast": "^3.0.4", + "hast-util-to-html": "^9.0.3" + } + }, + "node_modules/@shikijs/engine-javascript": { + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@shikijs/engine-javascript/-/engine-javascript-1.21.0.tgz", + "integrity": "sha512-jxQHNtVP17edFW4/0vICqAVLDAxmyV31MQJL4U/Kg+heQALeKYVOWo0sMmEZ18FqBt+9UCdyqGKYE7bLRtk9mg==", + "dev": true, + "dependencies": { + "@shikijs/types": "1.21.0", + "@shikijs/vscode-textmate": "^9.2.2", + "oniguruma-to-js": "0.4.3" + } + }, + "node_modules/@shikijs/engine-oniguruma": { + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@shikijs/engine-oniguruma/-/engine-oniguruma-1.21.0.tgz", + "integrity": "sha512-AIZ76XocENCrtYzVU7S4GY/HL+tgHGbVU+qhiDyNw1qgCA5OSi4B4+HY4BtAoJSMGuD/L5hfTzoRVbzEm2WTvg==", + "dev": true, + "dependencies": { + "@shikijs/types": "1.21.0", + "@shikijs/vscode-textmate": "^9.2.2" + } + }, + "node_modules/@shikijs/types": { + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/@shikijs/types/-/types-1.21.0.tgz", + "integrity": "sha512-tzndANDhi5DUndBtpojEq/42+dpUF2wS7wdCDQaFtIXm3Rd1QkrcVgSSRLOvEwexekihOXfbYJINW37g96tJRw==", + "dev": true, + "dependencies": { + "@shikijs/vscode-textmate": "^9.2.2", + "@types/hast": "^3.0.4" + } + }, + "node_modules/@shikijs/vscode-textmate": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/@shikijs/vscode-textmate/-/vscode-textmate-9.2.2.tgz", + "integrity": "sha512-TMp15K+GGYrWlZM8+Lnj9EaHEFmOen0WJBrfa17hF7taDOYthuPPV0GWzfd/9iMij0akS/8Yw2ikquH7uVi/fg==", + "dev": true + }, + "node_modules/@tsconfig/node10": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.11.tgz", + "integrity": "sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node12": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz", + "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node14": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz", + "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node16": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz", + "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/chai": { + "version": "4.3.19", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-4.3.19.tgz", + "integrity": "sha512-2hHHvQBVE2FiSK4eN0Br6snX9MtolHaTo/batnLjlGRhoQzlCL61iVpxoqO7SfFyOw+P/pwv+0zNHzKoGWz9Cw==", + "dev": true + }, + "node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "dev": true, + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/@types/mdast": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz", + "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==", + "dev": true, + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/@types/mocha": { + "version": "10.0.7", + "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-10.0.7.tgz", + "integrity": "sha512-GN8yJ1mNTcFcah/wKEFIJckJx9iJLoMSzWcfRRuxz/Jk+U6KQNnml+etbtxFK8lPjzOw3zp4Ha/kjSst9fsHYw==", + "dev": true + }, + "node_modules/@types/node": { + "version": "22.2.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.2.0.tgz", + "integrity": "sha512-bm6EG6/pCpkxDf/0gDNDdtDILMOHgaQBVOJGdwsqClnxA3xL6jtMv76rLBc006RVMWbmaf0xbmom4Z/5o2nRkQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "undici-types": "~6.13.0" + } + }, + "node_modules/@types/semver": { + "version": "7.5.8", + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.8.tgz", + "integrity": "sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ==", + "dev": true + }, + "node_modules/@types/tmp": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/@types/tmp/-/tmp-0.2.6.tgz", + "integrity": "sha512-chhaNf2oKHlRkDGt+tiKE2Z5aJ6qalm7Z9rlLdBwmOiAAf09YQvvoLXjWK4HWPF1xU/fqvMgfNfpVoBscA/tKA==", + "dev": true + }, + "node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", + "dev": true + }, + "node_modules/@types/yargs": { + "version": "17.0.33", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.33.tgz", + "integrity": "sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA==", + "dev": true, + "dependencies": { + "@types/yargs-parser": "*" + } + }, + "node_modules/@types/yargs-parser": { + "version": "21.0.3", + "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz", + "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==", + "dev": true + }, + "node_modules/@ungap/structured-clone": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", + "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==", + "dev": true + }, + "node_modules/acorn": { + "version": "8.12.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz", + "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-walk": { + "version": "8.3.3", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.3.tgz", + "integrity": "sha512-MxXdReSRhGO7VlFe1bRG/oI7/mdLV9B9JJT0N8vZOhF7gFRR5l3M8W9G8JxmKV+JC5mGqJ0QvqfSOLsCPa4nUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "acorn": "^8.11.0" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/aerospike": { + "resolved": "..", + "link": true + }, + "node_modules/ansi-colors": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.3.tgz", + "integrity": "sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "license": "ISC", + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", + "dev": true, + "license": "MIT" + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true, + "license": "Python-2.0" + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/binary-extensions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browser-stdout": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "dev": true, + "license": "ISC" + }, + "node_modules/camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ccount": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz", + "integrity": "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==", + "dev": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/chalk/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/character-entities-html4": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-2.1.0.tgz", + "integrity": "sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==", + "dev": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-entities-legacy": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz", + "integrity": "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==", + "dev": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/chokidar": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true, + "license": "MIT" + }, + "node_modules/comma-separated-tokens": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz", + "integrity": "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==", + "dev": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/create-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", + "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/debug": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", + "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/debug/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true, + "license": "MIT" + }, + "node_modules/decamelize": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", + "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/dequal": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", + "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/devlop": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/devlop/-/devlop-1.1.0.tgz", + "integrity": "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==", + "dev": true, + "dependencies": { + "dequal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/diff": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.2.0.tgz", + "integrity": "sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "dev": true, + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/escalade": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.2.tgz", + "integrity": "sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", + "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", + "dev": true, + "license": "BSD-3-Clause", + "bin": { + "flat": "cli.js" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true, + "license": "ISC" + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "license": "ISC", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/glob": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/hast-util-to-html": { + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/hast-util-to-html/-/hast-util-to-html-9.0.3.tgz", + "integrity": "sha512-M17uBDzMJ9RPCqLMO92gNNUDuBSq10a25SDBI08iCCxmorf4Yy6sYHK57n9WAbRAAaU+DuR4W6GN9K4DFZesYg==", + "dev": true, + "dependencies": { + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "ccount": "^2.0.0", + "comma-separated-tokens": "^2.0.0", + "hast-util-whitespace": "^3.0.0", + "html-void-elements": "^3.0.0", + "mdast-util-to-hast": "^13.0.0", + "property-information": "^6.0.0", + "space-separated-tokens": "^2.0.0", + "stringify-entities": "^4.0.0", + "zwitch": "^2.0.4" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-whitespace": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-3.0.0.tgz", + "integrity": "sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw==", + "dev": true, + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "dev": true, + "license": "MIT", + "bin": { + "he": "bin/he" + } + }, + "node_modules/html-void-elements": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-3.0.0.tgz", + "integrity": "sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==", + "dev": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "dev": true, + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "license": "MIT", + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-plain-obj": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", + "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-unicode-supported": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/linkify-it": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-5.0.0.tgz", + "integrity": "sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==", + "dev": true, + "dependencies": { + "uc.micro": "^2.0.0" + } + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-symbols": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", + "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lunr": { + "version": "2.3.9", + "resolved": "https://registry.npmjs.org/lunr/-/lunr-2.3.9.tgz", + "integrity": "sha512-zTU3DaZaF3Rt9rhN3uBMGQD3dD2/vFQqnvZCDv4dl5iOzq2IZQqTxu90r4E5J+nP70J3ilqVCrbho2eWaeW8Ow==", + "dev": true + }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true, + "license": "ISC" + }, + "node_modules/markdown-it": { + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-14.1.0.tgz", + "integrity": "sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1", + "entities": "^4.4.0", + "linkify-it": "^5.0.0", + "mdurl": "^2.0.0", + "punycode.js": "^2.3.1", + "uc.micro": "^2.1.0" + }, + "bin": { + "markdown-it": "bin/markdown-it.mjs" + } + }, + "node_modules/mdast-util-to-hast": { + "version": "13.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.0.tgz", + "integrity": "sha512-QGYKEuUsYT9ykKBCMOEDLsU5JRObWQusAolFMeko/tYPufNkRffBAQjIE+99jbA87xv6FgmjLtwjh9wBWajwAA==", + "dev": true, + "dependencies": { + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "@ungap/structured-clone": "^1.0.0", + "devlop": "^1.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "trim-lines": "^3.0.0", + "unist-util-position": "^5.0.0", + "unist-util-visit": "^5.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz", + "integrity": "sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==", + "dev": true + }, + "node_modules/micromark-util-character": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz", + "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-encode": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.0.tgz", + "integrity": "sha512-pS+ROfCXAGLWCOc8egcBvT0kf27GoWMqtdarNfDcjb6YLuV5cM3ioG45Ys2qOVqeqSbjaKg72vU+Wby3eddPsA==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromark-util-sanitize-uri": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.0.tgz", + "integrity": "sha512-WhYv5UEcZrbAtlsnPuChHUAsu/iBPOVaEVsntLBIdpibO0ddy8OzavZz3iL2xVvBZOpolujSliP65Kq0/7KIYw==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-encode": "^2.0.0", + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-symbol": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.0.tgz", + "integrity": "sha512-8JZt9ElZ5kyTnO94muPxIGS8oyElRJaiJO8EzV6ZSyGQ1Is8xwl4Q45qU5UOg+bGH4AikWziz0iN4sFLWs8PGw==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromark-util-types": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.0.tgz", + "integrity": "sha512-oNh6S2WMHWRZrmutsRmDDfkzKtxF+bc2VxLC9dvtrDIRFln627VsFP6fLMgTryGDljgLPjkrzQSDcPrjPyDJ5w==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/mocha": { + "version": "10.7.3", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-10.7.3.tgz", + "integrity": "sha512-uQWxAu44wwiACGqjbPYmjo7Lg8sFrS3dQe7PP2FQI+woptP4vZXSMcfMyFL/e1yFEeEpV4RtyTpZROOKmxis+A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-colors": "^4.1.3", + "browser-stdout": "^1.3.1", + "chokidar": "^3.5.3", + "debug": "^4.3.5", + "diff": "^5.2.0", + "escape-string-regexp": "^4.0.0", + "find-up": "^5.0.0", + "glob": "^8.1.0", + "he": "^1.2.0", + "js-yaml": "^4.1.0", + "log-symbols": "^4.1.0", + "minimatch": "^5.1.6", + "ms": "^2.1.3", + "serialize-javascript": "^6.0.2", + "strip-json-comments": "^3.1.1", + "supports-color": "^8.1.1", + "workerpool": "^6.5.1", + "yargs": "^16.2.0", + "yargs-parser": "^20.2.9", + "yargs-unparser": "^2.0.0" + }, + "bin": { + "_mocha": "bin/_mocha", + "mocha": "bin/mocha.js" + }, + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/oniguruma-to-js": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/oniguruma-to-js/-/oniguruma-to-js-0.4.3.tgz", + "integrity": "sha512-X0jWUcAlxORhOqqBREgPMgnshB7ZGYszBNspP+tS9hPD3l13CdaXcHbgImoHUHlrvGx/7AvFEkTRhAGYh+jzjQ==", + "dev": true, + "dependencies": { + "regex": "^4.3.2" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/property-information": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.5.0.tgz", + "integrity": "sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig==", + "dev": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/punycode.js": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode.js/-/punycode.js-2.3.1.tgz", + "integrity": "sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "safe-buffer": "^5.1.0" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/regex": { + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/regex/-/regex-4.3.3.tgz", + "integrity": "sha512-r/AadFO7owAq1QJVeZ/nq9jNS1vyZt+6t1p/E59B56Rn2GCya+gr1KSyOzNL/er+r+B7phv5jG2xU2Nz1YkmJg==", + "dev": true + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/serialize-javascript": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.2.tgz", + "integrity": "sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "randombytes": "^2.1.0" + } + }, + "node_modules/shiki": { + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/shiki/-/shiki-1.21.0.tgz", + "integrity": "sha512-apCH5BoWTrmHDPGgg3RF8+HAAbEL/CdbYr8rMw7eIrdhCkZHdVGat5mMNlRtd1erNG01VPMIKHNQ0Pj2HMAiog==", + "dev": true, + "dependencies": { + "@shikijs/core": "1.21.0", + "@shikijs/engine-javascript": "1.21.0", + "@shikijs/engine-oniguruma": "1.21.0", + "@shikijs/types": "1.21.0", + "@shikijs/vscode-textmate": "^9.2.2", + "@types/hast": "^3.0.4" + } + }, + "node_modules/space-separated-tokens": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz", + "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==", + "dev": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/stringify-entities": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-4.0.4.tgz", + "integrity": "sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==", + "dev": true, + "dependencies": { + "character-entities-html4": "^2.0.0", + "character-entities-legacy": "^3.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/trim-lines": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/trim-lines/-/trim-lines-3.0.1.tgz", + "integrity": "sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==", + "dev": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/ts-node": { + "version": "10.9.2", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz", + "integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@cspotcode/source-map-support": "^0.8.0", + "@tsconfig/node10": "^1.0.7", + "@tsconfig/node12": "^1.0.7", + "@tsconfig/node14": "^1.0.0", + "@tsconfig/node16": "^1.0.2", + "acorn": "^8.4.1", + "acorn-walk": "^8.1.1", + "arg": "^4.1.0", + "create-require": "^1.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "v8-compile-cache-lib": "^3.0.1", + "yn": "3.1.1" + }, + "bin": { + "ts-node": "dist/bin.js", + "ts-node-cwd": "dist/bin-cwd.js", + "ts-node-esm": "dist/bin-esm.js", + "ts-node-script": "dist/bin-script.js", + "ts-node-transpile-only": "dist/bin-transpile.js", + "ts-script": "dist/bin-script-deprecated.js" + }, + "peerDependencies": { + "@swc/core": ">=1.2.50", + "@swc/wasm": ">=1.2.50", + "@types/node": "*", + "typescript": ">=2.7" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "@swc/wasm": { + "optional": true + } + } + }, + "node_modules/ts-node/node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/typedoc": { + "version": "0.26.7", + "resolved": "https://registry.npmjs.org/typedoc/-/typedoc-0.26.7.tgz", + "integrity": "sha512-gUeI/Wk99vjXXMi8kanwzyhmeFEGv1LTdTQsiyIsmSYsBebvFxhbcyAx7Zjo4cMbpLGxM4Uz3jVIjksu/I2v6Q==", + "dev": true, + "dependencies": { + "lunr": "^2.3.9", + "markdown-it": "^14.1.0", + "minimatch": "^9.0.5", + "shiki": "^1.16.2", + "yaml": "^2.5.1" + }, + "bin": { + "typedoc": "bin/typedoc" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "typescript": "4.6.x || 4.7.x || 4.8.x || 4.9.x || 5.0.x || 5.1.x || 5.2.x || 5.3.x || 5.4.x || 5.5.x || 5.6.x" + } + }, + "node_modules/typedoc/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/typescript": { + "version": "5.5.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.4.tgz", + "integrity": "sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/uc.micro": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz", + "integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==", + "dev": true + }, + "node_modules/undici-types": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.13.0.tgz", + "integrity": "sha512-xtFJHudx8S2DSoujjMd1WeWvn7KKWFRESZTMeL1RptAYERu29D6jphMjjY+vn96jvN3kVPDNxU/E13VTaXj6jg==", + "dev": true, + "license": "MIT", + "peer": true + }, + "node_modules/unist-util-is": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz", + "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==", + "dev": true, + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-position": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-5.0.0.tgz", + "integrity": "sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==", + "dev": true, + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-stringify-position": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz", + "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==", + "dev": true, + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-visit": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz", + "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==", + "dev": true, + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0", + "unist-util-visit-parents": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-visit-parents": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz", + "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==", + "dev": true, + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/v8-compile-cache-lib": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", + "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", + "dev": true, + "license": "MIT" + }, + "node_modules/vfile": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz", + "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==", + "dev": true, + "dependencies": { + "@types/unist": "^3.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/vfile-message": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.2.tgz", + "integrity": "sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==", + "dev": true, + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/workerpool": { + "version": "6.5.1", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.5.1.tgz", + "integrity": "sha512-Fs4dNYcsdpYSAfVxhnl1L5zTksjvOJxtC5hzMNl+1t9B8hTJTdKDyZ5ju7ztgPy+ft9tBFXoOlDNiOT9WUXZlA==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/yaml": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.5.1.tgz", + "integrity": "sha512-bLQOjaX/ADgQ20isPJRvF0iRUHIxVhYvr53Of7wGcWlO2jvtUlH5m87DsmulFVxRpNLOnI4tB6p/oh8D7kpn9Q==", + "dev": true, + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dev": true, + "license": "MIT", + "dependencies": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs-parser": { + "version": "20.2.9", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", + "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs-unparser": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz", + "integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==", + "dev": true, + "license": "MIT", + "dependencies": { + "camelcase": "^6.0.0", + "decamelize": "^4.0.0", + "flat": "^5.0.2", + "is-plain-obj": "^2.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/zwitch": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz", + "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==", + "dev": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + } + } +} diff --git a/ts-test/package.json b/ts-test/package.json new file mode 100644 index 000000000..f423e6e56 --- /dev/null +++ b/ts-test/package.json @@ -0,0 +1,29 @@ +{ + "name": "ts-test", + "version": "1.0.0", + "main": "index.js", + "directories": { + "test": "tests" + }, + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "keywords": [], + "author": "", + "license": "ISC", + "description": "", + "devDependencies": { + "@types/chai": "^4.3.19", + "@types/mocha": "^10.0.7", + "@types/semver": "^7.5.8", + "@types/tmp": "^0.2.6", + "@types/yargs": "^17.0.33", + "mocha": "^10.7.3", + "ts-node": "^10.9.2", + "typedoc": "^0.26.7", + "typescript": "^5.5.4" + }, + "dependencies": { + "aerospike": "file:.." + } +} diff --git a/ts-test/tests/README.md b/ts-test/tests/README.md new file mode 100644 index 000000000..d8230d4ab --- /dev/null +++ b/ts-test/tests/README.md @@ -0,0 +1,44 @@ +# Testing Aerospike Node.js Client + +## Installation + +We also recommend installing Aerospike dependencies by running the following +from the module's root directory: + + $ npm update + +This will install all required dependencies. + +## Usage + +To run the test cases: + + $ npm test + +The tests are written and run using [`mocha`](http://visionmedia.github.io/mocha). +You can choose to use `mocha` directly, but you must first install `mocha`: + + $ npm install -g mocha + +Note: some users may need to run this as sudo. + +Then to run the tests via mocha, you will want to run it from the modules' root +directory: + + $ mocha -R spec + +## Options + +You can modify the test with various options: + + --help Display this message. + --host, -h Aerospike database address. [default: "127.0.0.1"] + --port, -p Aerospike database port. [default: 3000] + --timeout, -t Timeout in milliseconds. [default: 10] + --log, -l Log level [0-5] [default: 2] + --namespace, -n Namespace for the keys. [default: "test"] + --set, -s Set for the keys. [default: "demo"] + +Options can be set via an environment variable `OPTIONS`: + + $ OPTIONS="--port 3010" npm test diff --git a/ts-test/tests/admin.ts b/ts-test/tests/admin.ts new file mode 100644 index 000000000..e913fecff --- /dev/null +++ b/ts-test/tests/admin.ts @@ -0,0 +1,517 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* global expect, describe, it, context */ + +import Aerospike, { Client, AdminPolicy, admin, ConfigOptions } from 'aerospike'; +import * as helper from './test_helper'; +import { expect } from 'chai'; + +function getRandomInt (max: number) { + return Math.floor(Math.random() * max) +} +function randomString (num: number) { + return getRandomInt(num) +} + +function wait (ms: number) { + return new Promise(resolve => setTimeout(resolve, ms)) +} + +context('admin commands', async function () { + if (helper.config.user !== 'admin') { + return + } + const client: Client = helper.client + const randomFactor: number = 1000000 + const waitMs: number = 100 + const username1: string = 'username' + randomString(getRandomInt(randomFactor)) + const username2: string = 'username' + randomString(getRandomInt(randomFactor)) + const username3: string = 'username' + randomString(getRandomInt(randomFactor)) + const username4: string = 'username' + randomString(getRandomInt(randomFactor)) + + const rolename1: string = 'rolename' + randomString(getRandomInt(randomFactor)) + const rolename2: string = 'rolename' + randomString(getRandomInt(randomFactor)) + const rolename3: string = 'rolename' + randomString(getRandomInt(randomFactor)) + + const policy: AdminPolicy = new Aerospike.AdminPolicy({ timeout: 1000 }) + + describe('Client#queryRole()', function () { + it('query role', async function () { + const result: admin.Role = await client.queryRole('user-admin', null) + expect(result).to.have.property('name', 'user-admin') + expect(result).to.have.property('readQuota', 0) + expect(result).to.have.property('writeQuota', 0) + expect(result).to.have.property('whitelist').that.deep.equals([]) + expect(result).to.have.property('privileges') + }) + + it('with policy', async function () { + const result: admin.Role = await client.queryRole('truncate', policy) + expect(result).to.have.property('name', 'truncate') + expect(result).to.have.property('readQuota', 0) + expect(result).to.have.property('writeQuota', 0) + expect(result).to.have.property('whitelist').that.deep.equals([]) + expect(result).to.have.property('privileges') + }) + }) + + describe('Client#queryRoles()', function () { + it('query roles', async function () { + const results = await client.queryRoles(null) + expect(results.length).to.be.above(0) + results.forEach((result: admin.Role) => { + expect(result).to.have.property('name') + expect(result).to.have.property('readQuota', 0) + expect(result).to.have.property('writeQuota', 0) + expect(result).to.have.property('whitelist').that.is.an('array') + expect(result).to.have.property('privileges') + }) + }) + + it('with policy', async function () { + const results = await client.queryRoles(policy) + expect(results.length).to.be.above(0) + results.forEach((result: admin.Role) => { + expect(result).to.have.property('name') + expect(result).to.have.property('readQuota', 0) + expect(result).to.have.property('writeQuota', 0) + expect(result).to.have.property('whitelist').that.is.an('array') + expect(result).to.have.property('privileges') + }) + }) + }) + + describe('Client#createRole()', function () { + it('Creates role', async function () { + client.createRole(rolename1, [new Aerospike.admin.Privilege(Aerospike.privilegeCode.SINDEX_ADMIN)], null) + await wait(waitMs) + const result: admin.Role = await client.queryRole(rolename1, null) + expect(result).to.have.property('name', rolename1) + expect(result).to.have.property('readQuota', 0) + expect(result).to.have.property('writeQuota', 0) + expect(result).to.have.property('whitelist').that.deep.equals([]) + expect(result).to.have.property('privileges').that.deep.equals([new Aerospike.admin.Privilege(Aerospike.privilegeCode.SINDEX_ADMIN)]) + }) + + it('with admin policy', async function () { + client.createRole(rolename2, [new Aerospike.admin.Privilege(Aerospike.privilegeCode.READ)], policy) + await wait(waitMs) + const result: admin.Role = await client.queryRole(rolename2, null) + expect(result).to.have.property('name', rolename2) + expect(result).to.have.property('readQuota', 0) + expect(result).to.have.property('writeQuota', 0) + expect(result).to.have.property('whitelist').that.deep.equals([]) + expect(result).to.have.property('privileges').that.deep.equals([new Aerospike.admin.Privilege(Aerospike.privilegeCode.READ)]) + }) + + it('With multiple privilegeCodes', async function () { + await client.createRole(rolename3, [new Aerospike.admin.Privilege(Aerospike.privilegeCode.SINDEX_ADMIN), new Aerospike.admin.Privilege(Aerospike.privilegeCode.READ_WRITE_UDF), new Aerospike.admin.Privilege(Aerospike.privilegeCode.WRITE)], null) + await wait(waitMs) + const result: admin.Role = await client.queryRole(rolename3, null) + expect(result).to.have.property('name', rolename3) + expect(result).to.have.property('readQuota', 0) + expect(result).to.have.property('writeQuota', 0) + expect(result).to.have.property('whitelist').that.deep.equals([]) + expect(result).to.have.property('privileges').that.deep.equals([new Aerospike.admin.Privilege(Aerospike.privilegeCode.SINDEX_ADMIN), new Aerospike.admin.Privilege(Aerospike.privilegeCode.READ_WRITE_UDF), new Aerospike.admin.Privilege(Aerospike.privilegeCode.WRITE)]) + }) + }) + + describe('Client#grantPrivileges()', function () { + it('grants privilege to role', async function () { + client.grantPrivileges(rolename1, [new Aerospike.admin.Privilege(Aerospike.privilegeCode.READ_WRITE)], null) + await wait(waitMs) + const result: admin.Role = await client.queryRole(rolename1, null) + expect(result).to.have.property('name', rolename1) + expect(result).to.have.property('readQuota', 0) + expect(result).to.have.property('writeQuota', 0) + expect(result).to.have.property('whitelist').that.deep.equals([]) + expect(result).to.have.property('privileges').that.deep.equals([new Aerospike.admin.Privilege(Aerospike.privilegeCode.SINDEX_ADMIN), new Aerospike.admin.Privilege(Aerospike.privilegeCode.READ_WRITE)]) + }) + + it('with admin policy', async function () { + client.grantPrivileges(rolename2, [new Aerospike.admin.Privilege(Aerospike.privilegeCode.TRUNCATE)], policy) + await wait(waitMs) + const result: admin.Role = await client.queryRole(rolename2, null) + expect(result).to.have.property('name', rolename2) + expect(result).to.have.property('readQuota', 0) + expect(result).to.have.property('writeQuota', 0) + expect(result).to.have.property('whitelist').that.deep.equals([]) + expect(result).to.have.property('privileges').that.deep.equals([new Aerospike.admin.Privilege(Aerospike.privilegeCode.READ), new Aerospike.admin.Privilege(Aerospike.privilegeCode.TRUNCATE)]) + }) + it('with multiple privileges', async function () { + client.grantPrivileges(rolename3, [new Aerospike.admin.Privilege(Aerospike.privilegeCode.READ), new Aerospike.admin.Privilege(Aerospike.privilegeCode.TRUNCATE)], policy) + await wait(waitMs) + const result: admin.Role = await client.queryRole(rolename3, null) + expect(result).to.have.property('name', rolename3) + expect(result).to.have.property('readQuota', 0) + expect(result).to.have.property('writeQuota', 0) + expect(result).to.have.property('whitelist').that.deep.equals([]) + expect(result).to.have.property('privileges').that.is.an('array') + expect(result.privileges).to.have.length(5) + for (let i = 0; i < 5; i++) { + expect(result.privileges[i]).to.have.property('code').that.is.a('number') + expect(result.privileges[i]).to.have.property('namespace').that.is.a('string') + expect(result.privileges[i]).to.have.property('set').that.is.a('string') + } + }) + }) + + describe('Client#revokePrivileges()', function () { + it('Revokes privilege from role', async function () { + client.revokePrivileges(rolename1, [new Aerospike.admin.Privilege(Aerospike.privilegeCode.SINDEX_ADMIN)]) + await wait(waitMs) + const result: admin.Role = await client.queryRole(rolename1, null) + expect(result).to.have.property('name', rolename1) + expect(result).to.have.property('readQuota', 0) + expect(result).to.have.property('writeQuota', 0) + expect(result).to.have.property('whitelist').that.deep.equals([]) + expect(result).to.have.property('privileges').that.deep.equals([new Aerospike.admin.Privilege(Aerospike.privilegeCode.READ_WRITE)]) + }) + + it('With admin policy', async function () { + client.revokePrivileges(rolename2, [new Aerospike.admin.Privilege(Aerospike.privilegeCode.READ)], policy) + await wait(waitMs) + const result: admin.Role = await client.queryRole(rolename2, null) + expect(result).to.have.property('name', rolename2) + expect(result).to.have.property('readQuota', 0) + expect(result).to.have.property('writeQuota', 0) + expect(result).to.have.property('whitelist').that.deep.equals([]) + expect(result).to.have.property('privileges').that.deep.equals([new Aerospike.admin.Privilege(Aerospike.privilegeCode.TRUNCATE)]) + }) + + it('With mutliple privileges', async function () { + client.revokePrivileges(rolename3, [new Aerospike.admin.Privilege(Aerospike.privilegeCode.READ), new Aerospike.admin.Privilege(Aerospike.privilegeCode.TRUNCATE)], policy) + await wait(waitMs) + const result: admin.Role = await client.queryRole(rolename3, null) + expect(result).to.have.property('name', rolename3) + expect(result).to.have.property('readQuota', 0) + expect(result).to.have.property('writeQuota', 0) + expect(result).to.have.property('whitelist').that.deep.equals([]) + expect(result).to.have.property('privileges').that.deep.equals([new Aerospike.admin.Privilege(Aerospike.privilegeCode.SINDEX_ADMIN), new Aerospike.admin.Privilege(Aerospike.privilegeCode.READ_WRITE_UDF), new Aerospike.admin.Privilege(Aerospike.privilegeCode.WRITE)]) + }) + }) + + describe('Client#queryUser()', function () { + it('Queries user', async function () { + const result: admin.User = await client.queryUser('admin', null) + expect(result).to.have.property('name', 'admin') + expect(result).to.have.property('readInfo').that.deep.equals([0, 0, 0, 0]) + expect(result).to.have.property('writeInfo').that.deep.equals([0, 0, 0, 0]) + expect(result.connsInUse).to.be.a('number') + expect(result).to.have.property('roles').that.deep.equals(['user-admin']) + }) + + it('with policy', async function () { + const result: admin.User = await client.queryUser('admin', policy) + expect(result).to.have.property('name', 'admin') + expect(result).to.have.property('readInfo').that.deep.equals([0, 0, 0, 0]) + expect(result).to.have.property('writeInfo').that.deep.equals([0, 0, 0, 0]) + expect(result.connsInUse).to.be.a('number') + expect(result).to.have.property('roles').that.deep.equals(['user-admin']) + }) + }) + + describe('Client#queryUsers()', function () { + it('Queries users', async function () { + const results: admin.User[] = await client.queryUsers(null) + results.forEach((result: admin.User) => { + expect(result).to.have.property('name').that.is.a('string') + expect(result).to.have.property('readInfo').that.deep.equals([0, 0, 0, 0]) + expect(result).to.have.property('writeInfo').that.deep.equals([0, 0, 0, 0]) + expect(result.connsInUse).to.be.a('number') + expect(result).to.have.property('roles').that.is.an('array') + }) + }) + it('With policy', async function () { + const results: admin.User[] = await client.queryUsers(policy) + results.forEach((result: admin.User) => { + expect(result).to.have.property('name').that.is.a('string') + expect(result).to.have.property('readInfo').that.deep.equals([0, 0, 0, 0]) + expect(result).to.have.property('writeInfo').that.deep.equals([0, 0, 0, 0]) + expect(result.connsInUse).to.be.a('number') + expect(result).to.have.property('roles').that.is.an('array') + }) + }) + }) + + describe('Client#createUser()', function () { + it('Creates user', async function () { + client.createUser(username1, 'password') + await wait(waitMs) + const result: admin.User = await client.queryUser(username1, null) + expect(result).to.have.property('name', username1) + expect(result).to.have.property('readInfo').that.deep.equals([0, 0, 0, 0]) + expect(result).to.have.property('writeInfo').that.deep.equals([0, 0, 0, 0]) + expect(result.connsInUse).to.be.a('number') + expect(result).to.have.property('roles').that.deep.equals([]) + }) + + it('With policy', async function () { + client.createUser(username2, 'password', null, policy) + await wait(waitMs) + const result: admin.User = await client.queryUser(username2, null) + expect(result).to.have.property('name', username2) + expect(result).to.have.property('readInfo').that.deep.equals([0, 0, 0, 0]) + expect(result).to.have.property('writeInfo').that.deep.equals([0, 0, 0, 0]) + expect(result.connsInUse).to.be.a('number') + expect(result).to.have.property('roles').that.deep.equals([]) + }) + + it('With role', async function () { + client.createUser(username3, 'password', [rolename1]) + await wait(waitMs) + const result = await client.queryUser(username3, null) + expect(result).to.have.property('name', username3) + expect(result).to.have.property('readInfo').that.deep.equals([0, 0, 0, 0]) + expect(result).to.have.property('writeInfo').that.deep.equals([0, 0, 0, 0]) + expect(result.connsInUse).to.be.a('number') + expect(result).to.have.property('roles').that.deep.equals([rolename1]) + }) + + it('With multiple roles', async function () { + client.createUser(username4, 'password', [rolename1, rolename2, rolename3]) + await wait(waitMs) + const result: admin.User = await client.queryUser(username4, null) + expect(result).to.have.property('name', username4) + expect(result).to.have.property('readInfo').that.deep.equals([0, 0, 0, 0]) + expect(result).to.have.property('writeInfo').that.deep.equals([0, 0, 0, 0]) + expect(result).to.have.property('connsInUse', 0) + expect(result).to.have.property('roles').that.has.members([rolename1, rolename2, rolename3]) + }) + }) + + describe('Client#grantRoles()', function () { + it('grants role to user', async function () { + client.grantRoles(username1, [rolename1], null) + await wait(waitMs) + const result: admin.User = await client.queryUser(username1, null) + expect(result).to.have.property('name', username1) + expect(result).to.have.property('readInfo').that.deep.equals([0, 0, 0, 0]) + expect(result).to.have.property('writeInfo').that.deep.equals([0, 0, 0, 0]) + expect(result.connsInUse).to.be.a('number') + expect(result).to.have.property('roles').that.deep.equals([rolename1]) + }) + + it('With policy', async function () { + client.grantRoles(username2, [rolename2], policy) + await wait(waitMs) + const result: admin.User = await client.queryUser(username2, null) + expect(result).to.have.property('name', username2) + expect(result).to.have.property('readInfo').that.deep.equals([0, 0, 0, 0]) + expect(result).to.have.property('writeInfo').that.deep.equals([0, 0, 0, 0]) + expect(result.connsInUse).to.be.a('number') + expect(result).to.have.property('roles').that.deep.equals([rolename2]) + }) + + it('With multiple roles', async function () { + client.grantRoles(username3, [rolename1, rolename2, rolename3], policy) + await wait(waitMs) + const result: admin.User = await client.queryUser(username3, null) + expect(result).to.have.property('name', username3) + expect(result).to.have.property('readInfo').that.deep.equals([0, 0, 0, 0]) + expect(result).to.have.property('writeInfo').that.deep.equals([0, 0, 0, 0]) + expect(result.connsInUse).to.be.a('number') + expect(result).to.have.property('roles').that.has.members([rolename1, rolename2, rolename3]) + }) + }) + + describe('Client#revokeRoles()', function () { + it('Revokes role from user', async function () { + client.revokeRoles(username1, [rolename1], null) + await wait(waitMs) + const result: admin.User = await client.queryUser(username1, null) + expect(result).to.have.property('name', username1) + expect(result).to.have.property('readInfo').that.deep.equals([0, 0, 0, 0]) + expect(result).to.have.property('writeInfo').that.deep.equals([0, 0, 0, 0]) + expect(result.connsInUse).to.be.a('number') + expect(result).to.have.property('roles').that.deep.equals([]) + }) + + it('With policy', async function () { + client.revokeRoles(username2, [rolename2], policy) + await wait(waitMs) + const result: admin.User = await client.queryUser(username2, null) + expect(result).to.have.property('name', username2) + expect(result).to.have.property('readInfo').that.deep.equals([0, 0, 0, 0]) + expect(result).to.have.property('writeInfo').that.deep.equals([0, 0, 0, 0]) + expect(result.connsInUse).to.be.a('number') + expect(result).to.have.property('roles').that.deep.equals([]) + }) + + it('With multiple roles', async function () { + client.revokeRoles(username3, [rolename1, rolename2, rolename3], policy) + await wait(waitMs) + const result: admin.User = await client.queryUser(username3, null) + expect(result).to.have.property('name', username3) + expect(result).to.have.property('readInfo').that.deep.equals([0, 0, 0, 0]) + expect(result).to.have.property('writeInfo').that.deep.equals([0, 0, 0, 0]) + expect(result.connsInUse).to.be.a('number') + expect(result).to.have.property('roles').that.deep.equals([]) + }) + }) + + describe('Client#setWhitelist()', function () { + it('Set whitelist', async function () { + client.setWhitelist(rolename1, ['192.168.0.0'], null) + await wait(waitMs) + const result: admin.Role = await client.queryRole(rolename1, null) + expect(result).to.have.property('name', rolename1) + expect(result).to.have.property('readQuota', 0) + expect(result).to.have.property('writeQuota', 0) + expect(result).to.have.property('whitelist').that.deep.equals(['192.168.0.0']) + expect(result).to.have.property('privileges').that.deep.equals([new Aerospike.admin.Privilege(Aerospike.privilegeCode.READ_WRITE)]) + }) + + it('With policy', async function () { + client.setWhitelist(rolename2, ['192.168.0.0'], policy) + await wait(waitMs) + const result: admin.Role = await client.queryRole(rolename2, null) + expect(result).to.have.property('name', rolename2) + expect(result).to.have.property('readQuota', 0) + expect(result).to.have.property('writeQuota', 0) + expect(result).to.have.property('whitelist').that.deep.equals(['192.168.0.0']) + expect(result).to.have.property('privileges').that.deep.equals([new Aerospike.admin.Privilege(Aerospike.privilegeCode.TRUNCATE)]) + }) + + it('With multiple addresses', async function () { + client.setWhitelist(rolename3, ['192.168.0.0', '149.14.182.255'], policy) + await wait(waitMs) + const result: admin.Role = await client.queryRole(rolename3, null) + expect(result).to.have.property('name', rolename3) + expect(result).to.have.property('readQuota', 0) + expect(result).to.have.property('writeQuota', 0) + expect(result).to.have.property('whitelist').that.deep.equals(['192.168.0.0', '149.14.182.255']) + expect(result).to.have.property('privileges').that.is.an('array') + expect(result.privileges).to.have.length(3) + for (let i = 0; i < 3; i++) { + expect(result.privileges[i]).to.have.property('code').that.is.a('number') + expect(result.privileges[i]).to.have.property('namespace').that.is.a('string') + expect(result.privileges[i]).to.have.property('set').that.is.a('string') + } + }) + }) + + describe('Client#setQuotas()', function () { + it('Sets quotas', async function () { + client.setQuotas(rolename1, 100, 150, null) + await wait(waitMs) + const result: admin.Role = await client.queryRole(rolename1, null) + expect(result).to.have.property('name', rolename1) + expect(result).to.have.property('readQuota', 100) + expect(result).to.have.property('writeQuota', 150) + expect(result).to.have.property('whitelist').that.deep.equals(['192.168.0.0']) + expect(result).to.have.property('privileges').that.deep.equals([new Aerospike.admin.Privilege(Aerospike.privilegeCode.READ_WRITE)]) + }) + + it('With policy', async function () { + client.setQuotas(rolename2, 150, 250, policy) + await wait(waitMs) + const result: admin.Role = await client.queryRole(rolename2, null) + const privilege: admin.Privilege = new Aerospike.admin.Privilege(Aerospike.privilegeCode.TRUNCATE) + expect(result).to.have.property('name', rolename2) + expect(result).to.have.property('readQuota', 150) + expect(result).to.have.property('writeQuota', 250) + expect(result).to.have.property('whitelist').that.deep.equals(['192.168.0.0']) + expect(result).to.have.property('privileges').that.deep.equals([privilege]) + }) + }) + + describe('Client#dropRole()', function () { + it('Drops role', async function () { + client.dropRole(rolename1, null) + await wait(waitMs) + try { + await client.queryRole(rolename1, policy) + // Should fail, assert failure if error is not returned. + expect(1).to.equal(2) + } catch (error) { + expect(error).to.exist.and.have.property('code', Aerospike.status.INVALID_ROLE) + } + }) + + it('With policy', async function () { + client.dropRole(rolename2, policy) + await wait(waitMs) + try { + await client.queryRole(rolename2, policy) + // Should fail, assert failure if error is not returned. + expect(1).to.equal(2) + } catch (error) { + expect(error).to.exist.and.have.property('code', Aerospike.status.INVALID_ROLE) + } + }) + }) + + describe('Client#changePassword()', function () { + it('Changes password for user', async function () { + client.changePassword(username1, 'password350', null) + await wait(waitMs + 30000) + const config: ConfigOptions = { + hosts: helper.config.hosts, + user: username1, + password: 'password350' + } + const dummyClient = await Aerospike.connect(config) + return dummyClient.close() + }) + + it('With policy', async function () { + client.changePassword(username2, 'password250', policy) + await wait(waitMs + 3000) + + const config: ConfigOptions = { + hosts: helper.config.hosts, + user: username2, + password: 'password250' + } + + const dummyClient = await Aerospike.connect(config) + return dummyClient.close() + }) + }) + + describe('Client#dropUser()', function () { + it('Drops user', async function () { + client.dropUser(username1, null) + await wait(waitMs) + try { + await client.queryUser(username1, policy) + // Should fail, assert failure if error is not returned. + expect(1).to.equal(2) + } catch (error) { + expect(error).to.exist.and.have.property('code', Aerospike.status.INVALID_USER) + } + }) + it('With policy', async function () { + client.dropUser(username2, policy) + await wait(waitMs) + try { + await client.queryUser(username2, policy) + // Should fail, assert failure if error is not returned. + expect(1).to.equal(2) + } catch (error) { + expect(error).to.exist.and.have.property('code', Aerospike.status.INVALID_USER) + } + }) + }) + + client.dropRole(rolename3, null) + client.dropUser(username4, policy) + client.dropUser(username3, policy) +}) diff --git a/ts-test/tests/aerospike.ts b/ts-test/tests/aerospike.ts new file mode 100644 index 000000000..bec314690 --- /dev/null +++ b/ts-test/tests/aerospike.ts @@ -0,0 +1,57 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* global expect, describe, it */ + +import Aerospike, { Client } from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; +let client: Client +describe('Aerospike', function () { + describe('Aerospike.client() #noserver', function () { + it('instantiates a new client instance', function (done) { + client = Aerospike.client(helper.config) + expect(client).to.be.instanceof(Aerospike.Client) + done() + }) + }) + + describe('Aerospike.connect()', function () { + it('instantiates a new client instance and connects to the cluster', function (done) { + Aerospike.connect(helper.config, (error?: Error, client?) => { + if (error) throw error + expect(client).to.be.instanceof(Aerospike.Client) + client?.infoAny((err?: Error) => { + if (err) throw err + client.close(false) + done() + }) + }) + }) + + it('returns a Promise that resolves to a client', function () { + return Aerospike.connect(helper.config) + .then(client => { + expect(client).to.be.instanceof(Aerospike.Client) + return client + }) + .then(client => client.close(false)) + }) + }) +}) diff --git a/ts-test/tests/apply.ts b/ts-test/tests/apply.ts new file mode 100644 index 000000000..86558e52a --- /dev/null +++ b/ts-test/tests/apply.ts @@ -0,0 +1,96 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* global expect, describe, it, before, after */ + +import Aerospike, { AerospikeRecord, AerospikeError as ASError, Client as Cli, Key, UDF, ApplyPolicyOptions } from 'aerospike'; +import * as helper from './test_helper'; +import { expect } from 'chai'; + +const AerospikeError: typeof ASError = Aerospike.AerospikeError + +const keygen = helper.keygen + +describe('client.apply()', function () { + const client: Cli = helper.client + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/apply/' })() + + before(() => helper.udf.register('udf.lua') + .then(() => client.put(key, { foo: 'bar' }, { ttl: 1000 }))) + + after(() => helper.udf.remove('udf.lua') + .then(() => client.remove(key))) + + it('should invoke an UDF to without any args', function (done) { + const udfArgs: UDF= { module: 'udf', funcname: 'withoutArguments' } + client.apply(key, udfArgs, function (error?: Error, result?: AerospikeRecord) { + if (error) throw error + expect(result).to.equal(1) + done() + }) + }) + + it('should invoke an UDF with arguments', function (done) { + const udfArgs: UDF = { module: 'udf', funcname: 'withArguments', args: [42] } + client.apply(key, udfArgs, function (error?: Error, result?: AerospikeRecord) { + if (error) throw error + expect(result).to.equal(42) + done() + }) + }) + + it('should invoke an UDF with apply policy', function (done) { + const policy: ApplyPolicyOptions = new Aerospike.ApplyPolicy({ + totalTimeout: 1500 + }) + const udf: UDF = { + module: 'udf', + funcname: 'withArguments', + args: [[1, 2, 3]] + } + + client.apply(key, udf, policy, function (error?: Error, result?: AerospikeRecord) { + if (error) throw error + expect(result).to.eql([1, 2, 3]) + done() + }) + }) + + it('should return an error if the user-defined function does not exist', function (done) { + const udfArgs: UDF = { module: 'udf', funcname: 'not-such-function' } + client.apply(key, udfArgs, function (error?: Error, result?: AerospikeRecord) { + expect(error).to.be.instanceof(AerospikeError).with.property('code', Aerospike.status.ERR_UDF) + done() + }) + }) + /* + it('should return an error if the UDF arguments are invalid', function (done) { + const udfArgs = { module: 'udf', funcname: 'noop', args: 42 } // args should always be an array + client.apply(key, udfArgs, function (error?: Error, result?: AerospikeRecord) { + expect(error).to.be.instanceof(AerospikeError).with.property('code', Aerospike.status.ERR_PARAM) + done() + }) + }) + */ + it('should return a Promise that resolves to the return value of the UDF function', function () { + const udfArgs: UDF = { module: 'udf', funcname: 'withoutArguments' } + + return client.apply(key, udfArgs) + .then(result => expect(result).to.equal(1)) + }) +}) diff --git a/ts-test/tests/batch_apply.ts b/ts-test/tests/batch_apply.ts new file mode 100644 index 000000000..a30c3b37b --- /dev/null +++ b/ts-test/tests/batch_apply.ts @@ -0,0 +1,89 @@ +// ***************************************************************************** +// Copyright 2022-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* eslint-env mocha */ +/* global expect */ +/* eslint-disable no-unused-expressions */ + +import Aerospike, { Client, AerospikeRecord, BatchResult, Key as K, UDF, BatchPolicyOptions} from 'aerospike'; +import * as helper from './test_helper'; +import { expect } from 'chai'; + +// const util = require('util') + +const keygen = helper.keygen +const metagen = helper.metagen +const recgen = helper.recgen +const putgen = helper.putgen +const valgen = helper.valgen + +const Key: typeof K = Aerospike.Key + +describe('client.batchApply()', function () { + const client = helper.client + + before(function () { + const nrecords: number = 10 + const generators: any = { + keygen: keygen.string(helper.namespace, helper.set, { prefix: 'test/batch_apply/', random: false }), + recgen: recgen.record({ + i: valgen.integer(), + s: valgen.string(), + str2: valgen.string('hello'), + l: () => [1, 2, 3], + m: () => { return { a: 1, b: 2, c: 3 } } + }), + metagen: metagen.constant({ ttl: 1000 }) + } + helper.udf.register('udf.lua') + return putgen.put(nrecords, generators, {}) + }) + + context('with batch apply', function () { + helper.skipUnlessVersion('>= 6.0.0', this) + + it('apply udf on batch of records', function (done) { + const batchRecords: K[] = [ + new Key(helper.namespace, helper.set, 'test/batch_apply/1'), + new Key(helper.namespace, helper.set, 'test/batch_apply/2'), + new Key(helper.namespace, helper.set, 'test/batch_apply/3'), + new Key(helper.namespace, helper.set, 'test/batch_apply/4'), + new Key(helper.namespace, helper.set, 'test/batch_apply/5') + ] + const policy: BatchPolicyOptions = new Aerospike.BatchPolicy({ + totalTimeout: 1500 + }) + const udf: UDF = { + module: 'udf', + funcname: 'withArguments', + args: [[1, 2, 3]] + } + + client.batchApply(batchRecords, udf, policy, function (err?: Error, results?: BatchResult[]) { + expect(err).not.to.be.ok + expect(results?.length).to.equal(5) + results?.forEach(function (result: BatchResult) { + // console.log(util.inspect(result, true, 10, true)) + expect(result.status).to.equal(Aerospike.status.OK) + expect(result.record.bins.SUCCESS).to.eql([1, 2, 3]) + }) + done() + }) + }) + }) +}) diff --git a/ts-test/tests/batch_exists.ts b/ts-test/tests/batch_exists.ts new file mode 100644 index 000000000..8583a8308 --- /dev/null +++ b/ts-test/tests/batch_exists.ts @@ -0,0 +1,75 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* global expect, describe, it */ +/* eslint-disable no-unused-expressions */ + +import Aerospike, { BatchResult, AerospikeRecord, Client, Key} from 'aerospike'; +import * as helper from './test_helper'; +import { expect } from 'chai'; + +const keygen = helper.keygen +const metagen = helper.metagen +const recgen = helper.recgen +const putgen = helper.putgen +const valgen = helper.valgen + +describe('client.batchExists()', function () { + const client: Client = helper.client + + it('should successfully find 10 records', function () { + const numberOfRecords = 10 + const generators: any = { + keygen: keygen.string(helper.namespace, helper.set, { prefix: 'test/batch_exists/10/', random: false }), + recgen: recgen.record({ i: valgen.integer(), s: valgen.string(), b: valgen.bytes() }), + metagen: metagen.constant({ ttl: 1000 }) + } + return putgen.put(numberOfRecords, generators) + .then((records: AerospikeRecord[]) => { + const keys = records.map((record: AerospikeRecord) => record.key) + return client.batchExists(keys) + }) + .then((results: BatchResult[]) => { + expect(results.length).to.equal(numberOfRecords) + results.forEach((result: BatchResult) => { + expect(result.status).to.equal(Aerospike.status.OK) + expect(result.record).to.be.instanceof(Aerospike.Record) + }) + }) + }) + + it('should fail finding 10 records', function (done) { + const numberOfRecords: number = 10 + const kgen: any = keygen.string(helper.namespace, helper.set, { prefix: 'test/batch_exists/fail/', random: false }) + const keys: Key[] = keygen.range(kgen, numberOfRecords) + + client.batchExists(keys, function (err?: Error, results?: BatchResult[]) { + expect(err).not.to.be.ok + expect(results?.length).to.equal(numberOfRecords) + results?.forEach(function (result: BatchResult) { + expect(result.status).to.equal(Aerospike.status.ERR_RECORD_NOT_FOUND) + }) + done() + }) + }) + + it('returns an empty array when no keys are passed', () => { + client.batchExists([]) + .then((results: BatchResult[]) => expect(results).to.eql([])) + }) +}) diff --git a/ts-test/tests/batch_get.ts b/ts-test/tests/batch_get.ts new file mode 100644 index 000000000..bd9fc7853 --- /dev/null +++ b/ts-test/tests/batch_get.ts @@ -0,0 +1,75 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* global expect, describe, it */ +/* eslint-disable no-unused-expressions */ + +import Aerospike, { AerospikeRecord, KeyOptions, BatchResult, Client, Key } from 'aerospike'; +import * as helper from './test_helper'; +import { expect } from 'chai'; + +const keygen = helper.keygen +const metagen = helper.metagen +const recgen = helper.recgen +const putgen = helper.putgen +const valgen = helper.valgen + +describe('client.batchGet()', function () { + const client: Client = helper.client + + it('should successfully read 10 records', function () { + const numberOfRecords = 10 + const generators: any = { + keygen: keygen.string(helper.namespace, helper.set, { prefix: 'test/batch_get/success', random: false }), + recgen: recgen.record({ i: valgen.integer(), s: valgen.string(), b: valgen.bytes() }), + metagen: metagen.constant({ ttl: 1000 }) + } + return putgen.put(numberOfRecords, generators) + .then((records: AerospikeRecord[]) => { + const keys: KeyOptions[] = records.map((record: AerospikeRecord) => record.key) + return client.batchGet(keys) + .then((results: BatchResult[]) => { + expect(results.length).to.equal(numberOfRecords) + results.forEach((result?: BatchResult) => { + const putRecord: AerospikeRecord | undefined = records.find((record: AerospikeRecord) => record.key.key === result?.record.key.key) + expect(result?.status).to.equal(Aerospike.status.OK) + expect(result?.record.bins).to.eql(putRecord?.bins) + }) + }) + }) + }) + + it('should fail reading 10 records', function (done) { + const numberOfRecords = 10 + const kgen: any = keygen.string(helper.namespace, helper.set, { prefix: 'test/batch_get/fail/', random: false }) + const keys: Key[] = keygen.range(kgen, numberOfRecords) + client.batchGet(keys, function (err?: Error, results?: BatchResult[]) { + expect(err).not.to.be.ok + expect(results?.length).to.equal(numberOfRecords) + results?.forEach(function (result: BatchResult) { + expect(result.status).to.equal(Aerospike.status.ERR_RECORD_NOT_FOUND) + }) + done() + }) + }) + + it('returns an empty array when no keys are passed', () => { + client.batchGet([]) + .then((results: BatchResult[]) => expect(results).to.eql([])) + }) +}) diff --git a/ts-test/tests/batch_read.ts b/ts-test/tests/batch_read.ts new file mode 100644 index 000000000..606182252 --- /dev/null +++ b/ts-test/tests/batch_read.ts @@ -0,0 +1,396 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* eslint-env mocha */ +/* global expect */ +/* eslint-disable no-unused-expressions */ + +import Aerospike, { Client, BatchReadRecord, BatchResult, AerospikeRecord, BatchReadPolicyOptions, BatchPolicyOptions, AerospikeError } from 'aerospike'; +import * as helper from './test_helper'; +import { expect } from 'chai'; + +const keygen = helper.keygen +const metagen = helper.metagen +const recgen = helper.recgen +const putgen = helper.putgen +const valgen = helper.valgen + +const Key = Aerospike.Key + +describe('client.batchRead()', function () { + const client: Client = helper.client + + before(function () { + const nrecords: number = 10 + const generators: any = { + keygen: keygen.string(helper.namespace, helper.set, { prefix: 'test/batch_read/', random: false }), + recgen: recgen.record({ + i: valgen.integer(), + s: valgen.string(), + l: () => [1, 2, 3], + m: () => { return { a: 1, b: 2, c: 3 } } + }), + metagen: metagen.constant({ ttl: 1000 }) + } + return putgen.put(nrecords, generators) + }) + + it('returns the status whether each key was found or not', function (done) { + const batchRecords: BatchReadRecord[] = [ + { key: new Key(helper.namespace, helper.set, 'test/batch_read/1') }, + { key: new Key(helper.namespace, helper.set, 'test/batch_read/3') }, + { key: new Key(helper.namespace, helper.set, 'test/batch_read/5') }, + { key: new Key(helper.namespace, helper.set, 'test/batch_read/no_such_key') }, + { key: new Key(helper.namespace, helper.set, 'test/batch_read/not_either') } + ] + + client.batchRead(batchRecords, function (err?: Error, results?: BatchResult[]) { + expect(err).not.to.be.ok + expect(results?.length).to.equal(5) + const found: BatchResult[] | undefined = results?.filter( + (result?: BatchResult) => result?.status === Aerospike.status.OK) + expect(found?.length).to.equal(3) + const notFound: BatchResult[] | undefined = results?.filter( + (result?: BatchResult) => result?.status === Aerospike.status.ERR_RECORD_NOT_FOUND) + expect(notFound?.length).to.equal(2) + done() + }) + }) + + it('returns only meta data if no bins are selected', function (done) { + const batchRecords: BatchReadRecord[] = [ + { key: new Key(helper.namespace, helper.set, 'test/batch_read/1') }, + { key: new Key(helper.namespace, helper.set, 'test/batch_read/3') }, + { key: new Key(helper.namespace, helper.set, 'test/batch_read/5') } + ] + + client.batchRead(batchRecords, function (err?: Error, results?: BatchResult[]) { + expect(err).not.to.be.ok + expect(results?.length).to.equal(3) + results?.forEach(function (result) { + expect(result?.status).to.equal(Aerospike.status.OK) + expect(result?.record.bins).to.be.empty + }) + done() + }) + }) + + it('returns just the selected bins', function (done) { + const batchRecords: BatchReadRecord[] = [ + { key: new Key(helper.namespace, helper.set, 'test/batch_read/1'), bins: ['i'] }, + { key: new Key(helper.namespace, helper.set, 'test/batch_read/3'), bins: ['i'] }, + { key: new Key(helper.namespace, helper.set, 'test/batch_read/5'), bins: ['i'] } + ] + + client.batchRead(batchRecords, function (err?: Error, results?: BatchResult[]) { + expect(err).not.to.be.ok + expect(results?.length).to.equal(3) + results?.forEach(function (result: BatchResult) { + expect(result?.status).to.equal(Aerospike.status.OK) + expect(result?.record.bins).to.have.all.keys('i') + expect(result?.record.gen).to.be.ok + expect(result?.record.ttl).to.be.ok + }) + done() + }) + }) + + it('returns the entire record', function (done) { + const batchRecords: BatchReadRecord[] = [ + { key: new Key(helper.namespace, helper.set, 'test/batch_read/1'), readAllBins: true }, + { key: new Key(helper.namespace, helper.set, 'test/batch_read/3'), readAllBins: true }, + { key: new Key(helper.namespace, helper.set, 'test/batch_read/5'), readAllBins: true } + ] + + client.batchRead(batchRecords, function (err?: AerospikeError, results?: BatchResult[]) { + expect(err).not.to.be.ok + expect(results?.length).to.equal(3) + results?.forEach(function (result: BatchResult) { + expect(result?.status).to.equal(Aerospike.status.OK) + expect(result?.record.bins).to.have.keys('i', 's', 'l', 'm') + expect(result?.record.gen).to.be.ok + expect(result?.record.ttl).to.be.ok + }) + done() + }) + }) + + it('returns selected bins for each key', function (done) { + const batchRecords = [ + { key: new Key(helper.namespace, helper.set, 'test/batch_read/1'), readAllBins: true }, + { key: new Key(helper.namespace, helper.set, 'test/batch_read/3'), readAllBins: false, bins: ['i'] }, + { key: new Key(helper.namespace, helper.set, 'test/batch_read/5'), readAllBins: false } + ] + + client.batchRead(batchRecords, function (err?: AerospikeError, results?: BatchResult[]) { + expect(err).not.to.be.ok + expect(results?.length).to.equal(3) + results?.forEach(function (result: BatchResult) { + const record = result.record + switch (record.key.key) { + case 'test/batch_read/1': + expect(record.bins).to.have.all.keys('i', 's', 'l', 'm') + break + case 'test/batch_read/3': + expect(record.bins).to.have.all.keys('i') + break + case 'test/batch_read/5': + expect(record.bins).to.be.empty + break + default: + throw new Error('unpexected record key') + } + }) + done() + }) + }) + + context('with BatchPolicy', function () { + context('with deserialize: false', function () { + const policy: BatchPolicyOptions = new Aerospike.BatchPolicy({ + deserialize: false + }) + + it('returns list and map bins as byte buffers', function () { + const batch = [{ + key: new Key(helper.namespace, helper.set, 'test/batch_read/1'), + readAllBins: true + }] + + return client.batchRead(batch, policy) + .then(results => { + const bins = results[0].record.bins + expect(bins.i).to.be.a('number') + expect(bins.s).to.be.a('string') + expect(bins.l).to.be.instanceof(Buffer) + expect(bins.m).to.be.instanceof(Buffer) + }) + }) + }) + }) + context('readTouchTtlPercent policy', function () { + this.timeout(4000) + + context('BatchPolicy policy', function () { + helper.skipUnlessVersion('>= 7.1.0', this) + + it('100% touches record', async function () { + const policy: BatchReadPolicyOptions = new Aerospike.BatchReadPolicy({ + readTouchTtlPercent: 100 + }) + + await client.put(new Aerospike.Key('test', 'demo', 'batchTtl1'), { i: 2 }, { ttl: 10 }) + await new Promise(resolve => setTimeout(resolve, 3000)) + + const batch = [{ + key: new Aerospike.Key('test', 'demo', 'batchTtl1'), + readAllBins: true + }] + + const batchResult = await client.batchRead(batch, policy) + expect(batchResult[0].record.bins).to.eql({ i: 2 }) + expect(batchResult[0].record.ttl).to.be.within(7, 8) + + const record = await client.get(new Aerospike.Key('test', 'demo', 'batchTtl1')) + expect(record.bins).to.eql({ i: 2 }) + expect(record.ttl).to.be.within(9, 10) + + await client.remove(new Aerospike.Key('test', 'demo', 'batchTtl1')) + }) + + it('71% touches record', async function () { + const policy = new Aerospike.BatchReadPolicy({ + readTouchTtlPercent: 71 + }) + + await client.put(new Aerospike.Key('test', 'demo', 'batchTtl2'), { i: 2 }, { ttl: 10 }) + await new Promise(resolve => setTimeout(resolve, 3000)) + + const batch = [{ + key: new Aerospike.Key('test', 'demo', 'batchTtl2'), + readAllBins: true + }] + + const batchResult = await client.batchRead(batch, policy) + expect(batchResult[0].record.bins).to.eql({ i: 2 }) + expect(batchResult[0].record.ttl).to.be.within(7, 8) + + const record = await client.get(new Aerospike.Key('test', 'demo', 'batchTtl2')) + expect(record.bins).to.eql({ i: 2 }) + expect(record.ttl).to.be.within(9, 10) + + await client.remove(new Aerospike.Key('test', 'demo', 'batchTtl2')) + }) + + it('60% doesnt touch record', async function () { + const policy: BatchReadPolicyOptions = new Aerospike.BatchReadPolicy({ + readTouchTtlPercent: 60 + }) + + await client.put(new Aerospike.Key('test', 'demo', 'batchTtl3'), { i: 2 }, { ttl: 10 }) + await new Promise(resolve => setTimeout(resolve, 3000)) + + const batch = [{ + key: new Aerospike.Key('test', 'demo', 'batchTtl3'), + readAllBins: true + }] + + const batchResult = await client.batchRead(batch, policy) + expect(batchResult[0].record.bins).to.eql({ i: 2 }) + expect(batchResult[0].record.ttl).to.be.within(7, 8) + + const record = await client.get(new Aerospike.Key('test', 'demo', 'batchTtl3')) + expect(record.bins).to.eql({ i: 2 }) + expect(record.ttl).to.be.within(7, 8) + + await client.remove(new Aerospike.Key('test', 'demo', 'batchTtl3')) + }) + + it('0% doesnt touch record', async function () { + const policy: BatchReadPolicyOptions = new Aerospike.BatchReadPolicy({ + readTouchTtlPercent: 0 + }) + + await client.put(new Aerospike.Key('test', 'demo', 'batchTtl4'), { i: 2 }, { ttl: 10 }) + await new Promise(resolve => setTimeout(resolve, 3000)) + + const batch = [{ + key: new Aerospike.Key('test', 'demo', 'batchTtl4'), + readAllBins: true + }] + + const batchResult = await client.batchRead(batch, policy) + expect(batchResult[0].record.bins).to.eql({ i: 2 }) + expect(batchResult[0].record.ttl).to.be.within(7, 8) + + const record = await client.get(new Aerospike.Key('test', 'demo', 'batchTtl4')) + expect(record.bins).to.eql({ i: 2 }) + expect(record.ttl).to.be.within(7, 8) + + await client.remove(new Aerospike.Key('test', 'demo', 'batchTtl4')) + }) + }) + + context('BatchReadPolicy policy', function () { + helper.skipUnlessVersion('>= 7.1.0', this) + it('100% touches record', async function () { + const batch = [{ + key: new Aerospike.Key('test', 'demo', 'batchReadTtl1'), + readAllBins: true, + policy: new Aerospike.BatchPolicy({ + readTouchTtlPercent: 100 + }) + }] + await client.put(new Aerospike.Key('test', 'demo', 'batchReadTtl1'), { i: 2 }, { ttl: 10 }) + await new Promise(resolve => setTimeout(resolve, 3000)) + + const batchResult = await client.batchRead(batch) + expect(batchResult[0].record.bins).to.eql({ i: 2 }) + expect(batchResult[0].record.ttl).to.be.within(7, 8) + + const record = await client.get(new Aerospike.Key('test', 'demo', 'batchReadTtl1')) + expect(record.bins).to.eql({ i: 2 }) + expect(record.ttl).to.be.within(9, 10) + + await client.remove(new Aerospike.Key('test', 'demo', 'batchReadTtl1')) + }) + + it('71% touches record', async function () { + const batch = [{ + key: new Aerospike.Key('test', 'demo', 'batchReadTtl2'), + readAllBins: true, + policy: new Aerospike.BatchPolicy({ + readTouchTtlPercent: 71 + }) + }] + await client.put(new Aerospike.Key('test', 'demo', 'batchReadTtl2'), { i: 2 }, { ttl: 10 }) + await new Promise(resolve => setTimeout(resolve, 3000)) + + const batchResult: BatchResult[] = await client.batchRead(batch) + expect(batchResult[0].record.bins).to.eql({ i: 2 }) + expect(batchResult[0].record.ttl).to.be.within(7, 8) + + const record = await client.get(new Aerospike.Key('test', 'demo', 'batchReadTtl2')) + expect(record.bins).to.eql({ i: 2 }) + expect(record.ttl).to.be.within(9, 10) + + await client.remove(new Aerospike.Key('test', 'demo', 'batchReadTtl2')) + }) + + it('60% doesnt touch record', async function () { + const batch: BatchReadRecord[] = [{ + key: new Aerospike.Key('test', 'demo', 'batchReadTtl3'), + readAllBins: true, + policy: new Aerospike.BatchPolicy({ + readTouchTtlPercent: 60 + }) + }] + await client.put(new Aerospike.Key('test', 'demo', 'batchReadTtl3'), { i: 2 }, { ttl: 10 }) + await new Promise(resolve => setTimeout(resolve, 3000)) + + const batchResult: BatchResult[] = await client.batchRead(batch) + expect(batchResult[0].record.bins).to.eql({ i: 2 }) + expect(batchResult[0].record.ttl).to.be.within(7, 8) + + const record: AerospikeRecord = await client.get(new Aerospike.Key('test', 'demo', 'batchReadTtl3')) + expect(record.bins).to.eql({ i: 2 }) + expect(record.ttl).to.be.within(7, 8) + + await client.remove(new Aerospike.Key('test', 'demo', 'batchReadTtl3')) + }) + + it('0% doesnt touch record', async function () { + const batch: BatchReadRecord[] = [{ + key: new Aerospike.Key('test', 'demo', 'batchReadTtl4'), + readAllBins: true, + policy: new Aerospike.BatchPolicy({ + readTouchTtlPercent: 0 + }) + }] + await client.put(new Aerospike.Key('test', 'demo', 'batchReadTtl4'), { i: 2 }, { ttl: 10 }) + await new Promise(resolve => setTimeout(resolve, 3000)) + + const batchResult: BatchResult[] = await client.batchRead(batch) + expect(batchResult[0].record.bins).to.eql({ i: 2 }) + expect(batchResult[0].record.ttl).to.be.within(7, 8) + + const record: AerospikeRecord = await client.get(new Aerospike.Key('test', 'demo', 'batchReadTtl4')) + expect(record.bins).to.eql({ i: 2 }) + expect(record.ttl).to.be.within(7, 8) + + await client.remove(new Aerospike.Key('test', 'demo', 'batchReadTtl4')) + }) + }) + }) + + it('returns a Promise that resolves to the batch results', function () { + const batchRecords: BatchReadRecord[] = [ + { key: new Key(helper.namespace, helper.set, 'test/batch_read/1'), readAllBins: true } + ] + + return client.batchRead(batchRecords) + .then((results?: BatchResult[]) => { + expect(results?.length).to.equal(1) + return results?.pop() + }) + .then((result?: BatchResult) => { + expect(result?.status).to.equal(Aerospike.status.OK) + expect(result?.record).to.be.instanceof(Aerospike.Record) + }) + }) +}) diff --git a/ts-test/tests/batch_remove.ts b/ts-test/tests/batch_remove.ts new file mode 100644 index 000000000..b1bb41c55 --- /dev/null +++ b/ts-test/tests/batch_remove.ts @@ -0,0 +1,104 @@ +// ***************************************************************************** +// Copyright 2022-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* eslint-env mocha */ +/* global expect */ +/* eslint-disable no-unused-expressions */ + +import Aerospike, { AerospikeError, Client, Key as K, BatchResult} from 'aerospike'; +import * as helper from './test_helper'; +import { expect } from 'chai'; +// const util = require('util') + +const keygen = helper.keygen +const metagen = helper.metagen +const recgen = helper.recgen +const putgen = helper.putgen +const valgen = helper.valgen + +const Key = Aerospike.Key + +describe('client.batchRemove()', function () { + const client: Client = helper.client + + before(function () { + const nrecords: number = 10 + const generators: any = { + keygen: keygen.string(helper.namespace, helper.set, { prefix: 'test/batch_remove/', random: false }), + recgen: recgen.record({ + i: valgen.integer(), + s: valgen.string(), + str2: valgen.string('hello'), + l: () => [1, 2, 3], + m: () => { return { a: 1, b: 2, c: 3 } } + }), + metagen: metagen.constant({ ttl: 1000 }) + } + return putgen.put(nrecords, generators) + }) + + context('with batch remove', function () { + helper.skipUnlessVersion('>= 6.0.0', this) + it('removes batch of records', function (done) { + const batchRecords: K[] = [ + new Key(helper.namespace, helper.set, 'test/batch_remove/1'), + new Key(helper.namespace, helper.set, 'test/batch_remove/2'), + new Key(helper.namespace, helper.set, 'test/batch_remove/3'), + new Key(helper.namespace, helper.set, 'test/batch_remove/4'), + new Key(helper.namespace, helper.set, 'test/batch_remove/5') + ] + + client.batchRemove(batchRecords, function (err?: AerospikeError, results?: BatchResult[]) { + expect(err).not.to.be.ok + results = results || [] + expect(results.length).to.equal(5) + results.forEach(function (result) { + expect(result.status).to.equal(Aerospike.status.OK) + // expect(results.record.bins).to.be.empty() + // console.log(util.inspect(result, true, 10, true)) + }) + done() + }) + }) + + it('Will return records even if generation values is not correct', async function () { + const batchRecords: K[] = [ + new Key(helper.namespace, helper.set, 'test/batch_remove/6'), + new Key(helper.namespace, helper.set, 'test/batch_remove/7'), + new Key(helper.namespace, helper.set, 'test/batch_remove/8'), + new Key(helper.namespace, helper.set, 'test/batch_remove/9'), + new Key(helper.namespace, helper.set, 'test/batch_remove/0') + ] + try { + await client.batchRemove(batchRecords, null, new Aerospike.BatchRemovePolicy({ gen: Aerospike.policy.gen.EQ, generation: 10 })) + // Will fail if code makes it here + expect(1).to.eql(2) + } catch (error: any) { + // code will fail with undefined if expect(1).to.eql(2) executes + expect(error.code).to.eql(-16) + const results: BatchResult[] = await client.batchRemove(batchRecords) + expect(results.length).to.equal(5) + results.forEach(function (result) { + expect(result.status).to.equal(Aerospike.status.OK) + // expect(results.record.bins).to.be.empty() + // console.log(util.inspect(result, true, 10, true)) + }) + } + }) + }) +}) diff --git a/ts-test/tests/batch_select.ts b/ts-test/tests/batch_select.ts new file mode 100644 index 000000000..a1a7943f8 --- /dev/null +++ b/ts-test/tests/batch_select.ts @@ -0,0 +1,76 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* global expect, describe, it */ +/* eslint-disable no-unused-expressions */ + +import Aerospike, { Client, BatchResult, AerospikeRecord, BatchSelectRecord, Key } from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +const keygen = helper.keygen +const metagen = helper.metagen +const recgen = helper.recgen +const putgen = helper.putgen +const valgen = helper.valgen + +describe('client.batchSelect()', function () { + const client: Client = helper.client + + it('should successfully read bins from 10 records', function () { + const numberOfRecords: number = 10 + const generators: any = { + keygen: keygen.string(helper.namespace, helper.set, { prefix: 'test/batch_get/success', random: false }), + recgen: recgen.record({ i: valgen.integer(), s: valgen.string(), b: valgen.bytes() }), + metagen: metagen.constant({ ttl: 1000 }) + } + return putgen.put(numberOfRecords, generators) + .then((records: AerospikeRecord[]) => { + const keys = records.map((record: AerospikeRecord) => record.key) + return client.batchSelect(keys, ['i', 's']) + }) + .then((results: BatchResult[]) => { + expect(results.length).to.equal(numberOfRecords) + results.forEach((result: BatchResult) => { + expect(result.status).to.equal(Aerospike.status.OK) + expect(result.record.bins).to.have.all.keys('i', 's') + }) + }) + }) + + it('should fail reading bins from non-existent records', function (done) { + const numberOfRecords = 10 + const kgen: any = keygen.string(helper.namespace, helper.set, { prefix: 'test/batch_get/fail', random: false }) + const keys: Key[] = keygen.range(kgen, numberOfRecords) + const bins: string[] = ['i', 's'] + client.batchSelect(keys, bins, function (err?: Error, results?: BatchSelectRecord[]) { + expect(err).not.to.be.ok + expect(results?.length).to.equal(numberOfRecords) + results?.forEach(function (result: BatchSelectRecord) { + expect(result.status).to.equal(Aerospike.status.ERR_RECORD_NOT_FOUND) + }) + done() + }) + }) + + it('returns an empty array when no keys are passed', () => { + client.batchSelect([], ['i']) + .then((results: BatchSelectRecord[]) => expect(results).to.eql([])) + }) +}) diff --git a/ts-test/tests/batch_write.ts b/ts-test/tests/batch_write.ts new file mode 100644 index 000000000..e653b0200 --- /dev/null +++ b/ts-test/tests/batch_write.ts @@ -0,0 +1,575 @@ +// ***************************************************************************** +// Copyright 2022-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* eslint-env mocha */ +/* global expect */ +/* eslint-disable no-unused-expressions */ + +import Aerospike, { Client, ConfigOptions, AerospikeBins, AerospikeRecord, BatchWriteRecord, BatchResult, AerospikeError, KeyOptions} from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +// const util = require('util') +const batchType = Aerospike.batchType +const status = Aerospike.status + +const op = Aerospike.operations +const GeoJSON = Aerospike.GeoJSON + +const keygen = helper.keygen +const metagen = helper.metagen +const recgen = helper.recgen +const putgen = helper.putgen +const valgen = helper.valgen + +const Key = Aerospike.Key + +const { + assertResultSatisfy, +} = require('./util/statefulAsyncTest') + +describe('client.batchWrite()', function () { + const client = helper.client + + before(function () { + const nrecords: number = 20 + const generators: any = { + keygen: keygen.string(helper.namespace, helper.set, { prefix: 'test/batch_write/', random: false }), + recgen: recgen.record({ + i: valgen.integer(), + s: valgen.string(), + str2: valgen.string('hello'), + l: () => [1, 2, 3], + m: () => { return { a: 1, b: 2, c: 3 } } + }), + metagen: metagen.constant({ ttl: 1000 }) + } + return putgen.put(nrecords, generators) + }) + + context('with batch write', function () { + helper.skipUnlessVersion('>= 6.0.0', this) + + it('returns the status whether each key was found or not', function (done) { + const batchRecords: BatchWriteRecord[] = [ + { + type: batchType.BATCH_READ, + key: new Key(helper.namespace, helper.set, 'test/batch_write/1'), + readAllBins: true + }, + { + type: batchType.BATCH_READ, + key: new Key(helper.namespace, helper.set, 'test/batch_write/2') + }, + { + type: batchType.BATCH_READ, + key: new Key(helper.namespace, helper.set, 'test/batch_write/3') + }, + { + type: batchType.BATCH_READ, + key: new Key(helper.namespace, helper.set, 'test/batch_write/no_such_key') + }, + { + type: batchType.BATCH_READ, + key: new Key(helper.namespace, helper.set, 'test/batch_write/not_either') + } + ] + + client.batchWrite(batchRecords, function (err?: AerospikeError, results?: BatchResult[]) { + const found: BatchResult[] = results?.filter( + (result: BatchResult) => result?.status === Aerospike.status.OK) || [] + const inDoubt: BatchResult[] = results?.filter( + (result: any) => result?.inDoubt === true) || [] + const notFound: BatchResult[] = results?.filter( + (result: BatchResult) => result?.status === Aerospike.status.ERR_RECORD_NOT_FOUND) || [] + expect(err).not.to.be.ok + expect(results?.length).to.equal(5) + expect(found.length).to.equal(3 - inDoubt.length) + expect(notFound.length).to.equal(2) + done() + }) + }) + + it('returns only meta data if no bins are selected', function (done) { + const batchWriteRecords = [ + { + type: batchType.BATCH_WRITE, + key: new Key(helper.namespace, helper.set, 'test/batch_write/4'), + ops: [ + op.write('string', 'def'), + op.write('geo', new GeoJSON({ type: 'Point', coordinates: [123.456, 1.308] })), + op.write('blob', Buffer.from('bar')), + op.append('str2', 'world')] + }, + { + type: batchType.BATCH_REMOVE, + key: new Key(helper.namespace, helper.set, 'test/batch_write/5') + } + ] + + const batchReadRecords = [ + { + type: batchType.BATCH_READ, + key: new Key(helper.namespace, helper.set, 'test/batch_write/4'), + readAllBins: true + }, + { + type: batchType.BATCH_READ, + key: new Key(helper.namespace, helper.set, 'test/batch_write/5'), + readAllBins: true + }, + { + type: batchType.BATCH_READ, + key: new Key(helper.namespace, helper.set, 'test/batch_write/4') + } + ] + + client.batchWrite(batchWriteRecords, function (err?: AerospikeError, results?: BatchResult[]) { + expect(err).to.be.null + expect(results?.length).to.equal(2) + expect(results?.[1].record.bins).to.be.empty + client.batchWrite(batchReadRecords, function (err?: AerospikeError, results?: BatchResult[]) { + expect(err).not.to.be.ok + expect(results?.length).to.equal(3) + expect(results?.[0].record.bins).to.have.all.keys('i', 's', 'l', 'm', 'str2', 'geo', 'blob', 'string') + expect(results?.[1].status).to.equal(Aerospike.status.ERR_RECORD_NOT_FOUND) + expect(results?.[2].record.bins).to.be.empty + // results.forEach(function (result) { + // console.log(util.inspect(result, true, 10, true)) + // }) + done() + }) + }) + }) + }) + + context('with BatchPolicy', function () { + helper.skipUnlessVersion('>= 6.0.0', this) + + it('returns list and map bins as byte buffers', function () { + const batch = [{ + type: batchType.BATCH_READ, + key: new Key(helper.namespace, helper.set, 'test/batch_write/6'), + readAllBins: true + }] + const policy = new Aerospike.BatchPolicy({ + deserialize: false + }) + + return client.batchWrite(batch, policy) + .then(results => { + const bins = results[0].record.bins + expect(bins.i).to.be.a('number') + expect(bins.s).to.be.a('string') + expect(bins.l).to.be.instanceof(Buffer) + expect(bins.m).to.be.instanceof(Buffer) + }) + }) + + it('returns a Promise that resolves to the batch results', function () { + const batchRecords = [ + { + type: batchType.BATCH_READ, + key: new Key(helper.namespace, helper.set, 'test/batch_write/7'), + readAllBins: true + } + ] + + return client.batchWrite(batchRecords) + .then((results: BatchResult[]) => { + expect(results.length).to.equal(1) + return results.pop() + }) + .then((result?: BatchResult) => { + expect(result?.status).to.equal(status.OK) + expect(result?.record).to.be.instanceof(Aerospike.Record) + }) + }) + }) + + context('with exists.IGNORE returning callback', function () { + helper.skipUnlessVersion('>= 6.0.0', this) + + it('returns the status whether each key was found or not', function (done) { + const batchRecords = [ + { + type: batchType.BATCH_WRITE, + key: new Key(helper.namespace, helper.set, 'test/batch_write/8'), + ops: [ + op.write('geo', new GeoJSON({ type: 'Point', coordinates: [123.456, 1.308] })), + op.write('blob', Buffer.from('bar')) + ], + policy: new Aerospike.BatchWritePolicy({ + exists: Aerospike.policy.exists.IGNORE + }) + } + + ] + + client.batchWrite(batchRecords, function (error?: AerospikeError, results?: BatchResult[]) { + if (error) throw error + client.batchWrite(batchRecords, function (error?: AerospikeError, results?: BatchResult[]) { + expect(error).not.to.be.ok + expect(results?.[0].status).to.equal(status.OK) + done() + }) + }) + }) + }) + + context('with exists.IGNORE returning promise', function () { + helper.skipUnlessVersion('>= 6.0.0', this) + + it('returns the status whether each key was found or not', function () { + const batchRecords: BatchWriteRecord[] = [ + { + type: batchType.BATCH_WRITE, + key: new Key(helper.namespace, helper.set, 'test/batch_write/9'), + ops: [ + op.write('geo', new GeoJSON({ type: 'Point', coordinates: [123.456, 1.308] })), + op.write('blob', Buffer.from('bar')) + ], + policy: new Aerospike.BatchWritePolicy({ + exists: Aerospike.policy.exists.IGNORE + }) + } + + ] + + return client.batchWrite(batchRecords) + .then(results => { + return client.batchWrite(batchRecords) + }) + .then(results => { + expect(results[0].status).to.equal(status.OK) + }) + }) + }) + + context('with exists.CREATE returning callback', function () { + helper.skipUnlessVersion('>= 6.0.0', this) + + it('returns the correct status and error value', function (done) { + const batchRecords = [ + { + type: batchType.BATCH_WRITE, + key: new Key(helper.namespace, helper.set, 'test/batch_write/10'), + ops: [ + op.write('geo', new GeoJSON({ type: 'Point', coordinates: [123.456, 1.308] })), + op.write('blob', Buffer.from('bar')) + ], + policy: new Aerospike.BatchWritePolicy({ + exists: Aerospike.policy.exists.CREATE + }) + } + + ] + + client.batchWrite(batchRecords, function (error?: AerospikeError, results?: BatchResult[]) { + if (error) throw error + client.batchWrite(batchRecords, function (error?: AerospikeError, results?: BatchResult[]) { + expect(error).not.to.be.ok + expect(results?.[0].status).to.equal(status.ERR_RECORD_EXISTS) + done() + }) + }) + }) + + it('Returns correct status and error with async', async function () { + const batchRecords = [ + { + type: batchType.BATCH_WRITE, + key: new Key(helper.namespace, helper.set, 'test/batch_write/11'), + ops: [ + op.write('geo', new GeoJSON({ type: 'Point', coordinates: [123.456, 1.308] })), + op.write('blob', Buffer.from('bar')) + ], + policy: new Aerospike.BatchWritePolicy({ + exists: Aerospike.policy.exists.CREATE + }) + } + + ] + + await client.batchWrite(batchRecords) + const results = await client.batchWrite(batchRecords) + + expect(results[0].status).to.equal(status.ERR_RECORD_EXISTS) + }) + }) + + context('with exists.CREATE returning promise', function () { + helper.skipUnlessVersion('>= 6.0.0', this) + + it('returns the status whether each key was found or not', function () { + const batchRecords = [ + { + type: batchType.BATCH_WRITE, + key: new Key(helper.namespace, helper.set, 'test/batch_write/11'), + ops: [ + op.write('geo', new GeoJSON({ type: 'Point', coordinates: [123.456, 1.308] })), + op.write('blob', Buffer.from('bar')) + ], + policy: new Aerospike.BatchWritePolicy({ + exists: Aerospike.policy.exists.CREATE + }) + } + + ] + return client.batchWrite(batchRecords) + .then((results) => { + return client.batchWrite(batchRecords) + }) + .then((results) => { + expect(results[0].status).to.equal(status.ERR_RECORD_EXISTS) + }) + }) + }) + + context('with exists.UPDATE return callback', function () { + helper.skipUnlessVersion('>= 6.0.0', this) + + it('returns the status whether each key was found or not', function (done) { + const batchRecords = [ + { + type: batchType.BATCH_WRITE, + key: new Key(helper.namespace, helper.set, 'test/batch_write/12'), + ops: [ + op.write('geo', new GeoJSON({ type: 'Point', coordinates: [123.456, 1.308] })), + op.write('blob', Buffer.from('bar')) + ], + policy: new Aerospike.BatchWritePolicy({ + exists: Aerospike.policy.exists.UPDATE + }) + } + + ] + + client.remove(new Key(helper.namespace, helper.set, 'test/batch_write/12'), function (error?: AerospikeError, results?: KeyOptions) { + if (error) throw error + client.batchWrite(batchRecords, function (error?: AerospikeError, results?: BatchResult[]) { + expect(error).not.to.be.ok + expect(results?.[0].status).to.equal(status.ERR_RECORD_NOT_FOUND) + done() + }) + }) + }) + }) + + context('with exists.UPDATE returning promise', function () { + helper.skipUnlessVersion('>= 6.0.0', this) + + it('returns the status whether each key was found or not', function () { + const batchRecords = [ + { + type: batchType.BATCH_WRITE, + key: new Key(helper.namespace, helper.set, 'test/batch_write/13'), + ops: [ + op.write('geo', new GeoJSON({ type: 'Point', coordinates: [123.456, 1.308] })), + op.write('blob', Buffer.from('bar')) + ], + policy: new Aerospike.BatchWritePolicy({ + exists: Aerospike.policy.exists.UPDATE + }) + } + + ] + + return client.remove(new Key(helper.namespace, helper.set, 'test/batch_write/13')) + .then((results) => { + return client.batchWrite(batchRecords) + }) + .then((results) => { + expect(results[0].status).to.equal(status.ERR_RECORD_NOT_FOUND) + }) + }) + }) + + context('with exists.REPLACE return callback', function () { + helper.skipUnlessVersion('>= 6.0.0', this) + + it('returns the status whether each key was found or not', function (done) { + const batchRecords = [ + { + type: batchType.BATCH_WRITE, + key: new Key(helper.namespace, helper.set, 'test/batch_write/14'), + ops: [ + op.write('geo', new GeoJSON({ type: 'Point', coordinates: [123.456, 1.308] })), + op.write('blob', Buffer.from('bar')) + ], + policy: new Aerospike.BatchWritePolicy({ + exists: Aerospike.policy.exists.REPLACE + }) + } + + ] + + client.remove(new Key(helper.namespace, helper.set, 'test/batch_write/14'), function (error?: AerospikeError, results?: KeyOptions) { + if (error) throw error + client.batchWrite(batchRecords, function (error?: AerospikeError, results?: BatchResult[]) { + expect(error).not.to.be.ok + done() + }) + }) + }) + }) + + context('with exists.REPLACE returning promise', function () { + helper.skipUnlessVersion('>= 6.0.0', this) + + it('returns the status whether each key was found or not', function () { + const batchRecords = [ + { + type: batchType.BATCH_WRITE, + key: new Key(helper.namespace, helper.set, 'test/batch_write/15'), + ops: [ + op.write('geo', new GeoJSON({ type: 'Point', coordinates: [123.456, 1.308] })), + op.write('blob', Buffer.from('bar')) + ], + policy: new Aerospike.BatchWritePolicy({ + exists: Aerospike.policy.exists.REPLACE + }) + } + + ] + + return client.remove(new Key(helper.namespace, helper.set, 'test/batch_write/15')) + .then((results?: KeyOptions) => { + return client.batchWrite(batchRecords) + }) + .then((results?: BatchResult[]) => { + expect(results?.[0].status).to.equal(status.ERR_RECORD_NOT_FOUND) + }) + }) + }) + + context('with exists.CREATE_OR_REPLACE return callback', function () { + helper.skipUnlessVersion('>= 6.0.0', this) + + it('returns the status whether each key was found or not', function (done) { + const batchRecords = [ + { + type: batchType.BATCH_WRITE, + key: new Key(helper.namespace, helper.set, 'test/batch_write/16'), + ops: [ + op.write('geo', new GeoJSON({ type: 'Point', coordinates: [123.456, 1.308] })), + op.write('blob', Buffer.from('bar')) + ], + policy: new Aerospike.BatchWritePolicy({ + exists: Aerospike.policy.exists.CREATE_OR_REPLACE + }) + } + + ] + + client.batchWrite(batchRecords, function (error?: AerospikeError, results?: BatchResult[]) { + if (error) throw error + client.batchWrite(batchRecords, function (error?: AerospikeError, results?: BatchResult[]) { + expect(error).not.to.be.ok + expect(results?.[0].status).to.equal(status.OK) + done() + }) + }) + }) + }) + + context('with exists.CREATE_OR_REPLACE returning promise', function () { + helper.skipUnlessVersion('>= 6.0.0', this) + + it('returns the status whether each key was found or not', function () { + const batchRecords = [ + { + type: batchType.BATCH_WRITE, + key: new Key(helper.namespace, helper.set, 'test/batch_write/17'), + ops: [ + op.write('geo', new GeoJSON({ type: 'Point', coordinates: [123.456, 1.308] })), + op.write('blob', Buffer.from('bar')) + ], + policy: new Aerospike.BatchWritePolicy({ + exists: Aerospike.policy.exists.CREATE_OR_REPLACE + }) + } + + ] + + return client.batchWrite(batchRecords) + .then((results) => { + return client.batchWrite(batchRecords) + }) + .then((results) => { + expect(results[0].status).to.equal(status.OK) + }) + }) + }) + + context('with BatchParentWritePolicy', function () { + helper.skipUnlessVersion('>= 6.0.0', this) + this.timeout(10000) + it('returns list and map bins as byte buffers', async function () { + const batch: BatchWriteRecord[] = [{ + type: batchType.BATCH_READ, + key: new Key(helper.namespace, helper.set, 'test/batch_write/18'), + readAllBins: true + }] + + const config: ConfigOptions = { + hosts: helper.config.hosts, + policies: { + batchParentWrite: new Aerospike.BatchPolicy({ socketTimeout: 0, totalTimeout: 0, deserialize: false }) + } + } + + const dummyClient = await Aerospike.connect(config) + const results: BatchResult[] = await dummyClient.batchWrite(batch) + const bins: AerospikeBins = results[0].record.bins + expect(bins.i).to.be.a('number') + expect(bins.s).to.be.a('string') + expect(bins.l).to.be.instanceof(Buffer) + expect(bins.m).to.be.instanceof(Buffer) + await dummyClient.close() + }) + }) + + context('with BatchWritePolicy ttl', function () { + helper.skipUnlessVersion('>= 6.0.0', this) + + it('writes value with correct ttl', async function () { + const batch: BatchWriteRecord[] = [{ + type: batchType.BATCH_WRITE, + key: new Key(helper.namespace, helper.set, 'test/batch_write/19'), + ops: [ + op.write('example', 35), + op.write('blob', [4, 14, 28]) + ], + policy: new Aerospike.BatchWritePolicy({ + exists: Aerospike.policy.exists.REPLACE, + ttl: 1367 + }) + }] + await client.batchWrite(batch) + return client.get(new Key(helper.namespace, helper.set, 'test/batch_write/19')) + .then((result: AerospikeRecord) => { + const bins = result.bins + expect(bins.example).to.be.a('number') + expect(bins.blob).to.be.a('array') + expect(result.ttl).to.equal(1367) + }) + }) + }) +}) diff --git a/ts-test/tests/bigint.ts b/ts-test/tests/bigint.ts new file mode 100644 index 000000000..1f6047f3b --- /dev/null +++ b/ts-test/tests/bigint.ts @@ -0,0 +1,35 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* eslint-env mocha */ +/* global expect */ + + +import * as helper from './test_helper'; + +import { expect } from 'chai'; + +describe('bigint', function () { + context('BigInt supported - Node.js 10 and later', function () { + describe('BigInt', function () { + it('is an alias for the built-in BigInt', function () { + expect(BigInt(42)).to.eq(global.BigInt(42)) + }) + }) + }) +}) diff --git a/ts-test/tests/bitwise.ts b/ts-test/tests/bitwise.ts new file mode 100644 index 000000000..289dcf35b --- /dev/null +++ b/ts-test/tests/bitwise.ts @@ -0,0 +1,466 @@ +// ***************************************************************************** +// Copyright 2019-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* eslint-env mocha */ +/* global expect */ + +import Aerospike, { Client } from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +const status_codes = Aerospike.status + +const bits = Aerospike.bitwise + +const { + assertError, + assertRecordEql, + assertResultEql, + cleanup, + createRecord, + expectError, + initState, + operate +} = require('./util/statefulAsyncTest') + +describe('client.operate() - Bitwise operations', function () { + helper.skipUnlessSupportsFeature(Aerospike.features.BLOB_BITS, this) + + describe('bitwise.resize', function () { + it('grows the bytes value', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0x01, 0x02]) })) + .then(operate(bits.resize('bits', 4))) + .then(assertRecordEql({ bits: Buffer.from([0x01, 0x02, 0x00, 0x00]) })) + .then(cleanup()) + }) + + it('shrinks the bytes value', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0x01, 0x02, 0x03]) })) + .then(operate(bits.resize('bits', 2))) + .then(assertRecordEql({ bits: Buffer.from([0x01, 0x02]) })) + .then(cleanup()) + }) + + context('with resize flags', function () { + context('with resize from front flag', function () { + const resizeFlags = bits.resizeFlags.FROM_FRONT + + it('grows the value from the front', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0x01, 0x02]) })) + .then(operate(bits.resize('bits', 4, resizeFlags))) + .then(assertRecordEql({ bits: Buffer.from([0x00, 0x00, 0x01, 0x02]) })) + .then(cleanup()) + }) + }) + + context('with grow only flag', function () { + const resizeFlags = bits.resizeFlags.GROW_ONLY + + it('returns an error when trying to shrink the value', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0x01, 0x02, 0x03]) })) + .then(expectError()) + .then(operate(bits.resize('bits', 2, resizeFlags))) + .then(assertError(status_codes.ERR_REQUEST_INVALID)) + .then(assertRecordEql({ bits: Buffer.from([0x01, 0x02, 0x03]) })) + .then(cleanup()) + }) + }) + + context('with shrink only flag', function () { + const resizeFlags = bits.resizeFlags.SHRINK_ONLY + + it('returns an error when trying to grow the value', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0x01, 0x02, 0x03]) })) + .then(expectError()) + .then(operate(bits.resize('bits', 4, resizeFlags))) + .then(assertError(status_codes.ERR_REQUEST_INVALID)) + .then(assertRecordEql({ bits: Buffer.from([0x01, 0x02, 0x03]) })) + .then(cleanup()) + }) + }) + }) + + context('with bitwise policy', function () { + context('with create-only write flag', function () { + const policy = { + writeFlags: bits.writeFlags.CREATE_ONLY + } + + it('creates a new byte value bin and initializes it with zeros', function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(operate(bits.resize('bits', 4).withPolicy(policy))) + .then(assertRecordEql({ bits: Buffer.from([0x00, 0x00, 0x00, 0x00]), foo: 'bar' })) + .then(cleanup()) + }) + + it('returns an error if the bin already exists', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0x01, 0x02, 0x03]) })) + .then(expectError()) + .then(operate(bits.resize('bits', 4).withPolicy(policy))) + .then(assertError(status_codes.ERR_BIN_EXISTS)) + .then(assertRecordEql({ bits: Buffer.from([0x01, 0x02, 0x03]) })) + .then(cleanup()) + }) + + context('with no-fail write flag', function () { + const policy = { + writeFlags: bits.writeFlags.CREATE_ONLY | bits.writeFlags.NO_FAIL + } + + it('does not update the bin', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0x01, 0x02, 0x03]) })) + .then(operate(bits.resize('bits', 4).withPolicy(policy))) + .then(assertRecordEql({ bits: Buffer.from([0x01, 0x02, 0x03]) })) + .then(cleanup()) + }) + }) + }) + + context('with update-only write flag', function () { + const policy = { + writeFlags: bits.writeFlags.UPDATE_ONLY + } + + it('updates an existing byte value', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0x00, 0x01, 0x02, 0x03, 0x04, 0x05]) })) + .then(operate(bits.resize('bits', 4).withPolicy(policy))) + .then(assertRecordEql({ bits: Buffer.from([0x00, 0x01, 0x02, 0x03]) })) + .then(cleanup()) + }) + + it('returns an error if the bin does not exists', function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(expectError()) + .then(operate(bits.resize('bits', 4).withPolicy(policy))) + .then(assertError(status_codes.ERR_BIN_NOT_FOUND)) + .then(assertRecordEql({ foo: 'bar' })) + .then(cleanup()) + }) + + context('with no-fail write flag', function () { + const policy = { + writeFlags: bits.writeFlags.UPDATE_ONLY | bits.writeFlags.NO_FAIL + } + + it('does not create the bin', function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(operate(bits.resize('bits', 4).withPolicy(policy))) + .then(assertRecordEql({ foo: 'bar' })) + .then(cleanup()) + }) + }) + }) + }) + }) + + describe('bitwise.insert', function () { + it('inserts value at the stated offset', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0x01, 0x02]) })) + .then(operate(bits.insert('bits', 1, Buffer.from([0x03, 0x04])))) + .then(assertRecordEql({ bits: Buffer.from([0x01, 0x03, 0x04, 0x02]) })) + .then(cleanup()) + }) + }) + + describe('bitwise.remove', function () { + it('removes number of bytes from the stated offset', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0x01, 0x02, 0x03, 0x04, 0x05]) })) + .then(operate(bits.remove('bits', 1, 3))) + .then(assertRecordEql({ bits: Buffer.from([0x01, 0x05]) })) + .then(cleanup()) + }) + }) + + describe('bitwise.set', function () { + context('with value as Buffer', function () { + it('sets value on bitmap at offset for size', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0b00000001, 0b01000010, 0b00000011, 0b00000100, 0b00000101]) })) + .then(operate(bits.set('bits', 13, 3, Buffer.from([0b11100000])))) + .then(assertRecordEql({ bits: Buffer.from([0b00000001, 0b01000111, 0b00000011, 0b00000100, 0b00000101]) })) + .then(cleanup()) + }) + }) + + context('with value as Integer', function () { + it('sets value on bitmap at offset for size', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0b00000001, 0b01000010, 0b00000011, 0b00000100, 0b00000101]) })) + .then(operate(bits.set('bits', 1, 8, 127))) + .then(assertRecordEql({ bits: Buffer.from([0b00111111, 0b11000010, 0b00000011, 0b0000100, 0b00000101]) })) + .then(cleanup()) + }) + }) + + it('throws a TypeError if passed an unsupported value type', function () { + expect(() => { bits.set('bin', 0, 0, 3.1416) }).to.throw(TypeError) + }) + + context('with bitwise policy', function () { + context('with no-fail flag', function () { + const policy = { + writeFlags: bits.writeFlags.UPDATE_ONLY | bits.writeFlags.NO_FAIL + } + + it('sets value on bitmap at offset for size', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0b00000000]) })) + .then(operate([ + bits.set('bits', 4, 8, Buffer.from([0b10101010])).withPolicy(policy) + ])) + .then(assertRecordEql({ + bits: Buffer.from([0b00000000]) + })) + .then(cleanup()) + }) + }) + + context('with partial flag', function () { + const policy = { + writeFlags: bits.writeFlags.UPDATE_ONLY | bits.writeFlags.NO_FAIL | bits.writeFlags.PARTIAL + } + + it('sets value on bitmap at offset for size', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0b00000000]) })) + .then(operate([ + bits.set('bits', 4, 8, Buffer.from([0b10101010])).withPolicy(policy) + ])) + .then(assertRecordEql({ + bits: Buffer.from([0b00001010]) + })) + .then(cleanup()) + }) + }) + }) + }) + + describe('bitwise.or', function () { + it('performs bitwise "or" on value and bitmap at offset for size', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0b00000001, 0b01000010, 0b00000011, 0b00000100, 0b00000101]) })) + .then(operate(bits.or('bits', 17, 6, Buffer.from([0b10101000])))) + .then(assertRecordEql({ bits: Buffer.from([0b00000001, 0b01000010, 0b01010111, 0b00000100, 0b00000101]) })) + .then(cleanup()) + }) + }) + + describe('bitwise.xor', function () { + it('performs bitwise "xor" on value and bitmap at offset for size', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0b00000001, 0b01000010, 0b00000011, 0b00000100, 0b00000101]) })) + .then(operate(bits.xor('bits', 17, 6, Buffer.from([0b10101100])))) + .then(assertRecordEql({ bits: Buffer.from([0b00000001, 0b01000010, 0b01010101, 0b00000100, 0b00000101]) })) + .then(cleanup()) + }) + }) + + describe('bitwise.and', function () { + it('performs bitwise "and" on value and bitmap at offset for size', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0b00000001, 0b01000010, 0b00000011, 0b00000100, 0b00000101]) })) + .then(operate(bits.and('bits', 23, 9, Buffer.from([0b00111100, 0b10000000])))) + .then(assertRecordEql({ bits: Buffer.from([0b00000001, 0b01000010, 0b00000010, 0b00000000, 0b00000101]) })) + .then(cleanup()) + }) + }) + + describe('bitwise.not', function () { + it('negates bitmap starting at offset for size', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0b00000001, 0b01000010, 0b00000011, 0b00000100, 0b00000101]) })) + .then(operate(bits.not('bits', 25, 6))) + .then(assertRecordEql({ bits: Buffer.from([0b00000001, 0b01000010, 0b00000011, 0b01111010, 0b00000101]) })) + .then(cleanup()) + }) + }) + + describe('bitwise.lshift', function () { + it('shifts left bitmap start at offset for size', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0b00000001, 0b01000010, 0b00000011, 0b00000100, 0b00000101]) })) + .then(operate(bits.lshift('bits', 32, 8, 3))) + .then(assertRecordEql({ bits: Buffer.from([0b00000001, 0b01000010, 0b00000011, 0b00000100, 0b00101000]) })) + .then(cleanup()) + }) + }) + + describe('bitwise.rshift', function () { + it('shifts right bitmap start at offset for size', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0b00000001, 0b01000010, 0b00000011, 0b00000100, 0b00000101]) })) + .then(operate(bits.rshift('bits', 0, 9, 1))) + .then(assertRecordEql({ bits: Buffer.from([0b00000000, 0b11000010, 0b00000011, 0b00000100, 0b00000101]) })) + .then(cleanup()) + }) + }) + + describe('bitwise.add', function () { + it('adds value to bitmap starting at bitOffset for bitSize', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0b00000001, 0b01000010, 0b00000011, 0b00000100, 0b00000101]) })) + .then(operate(bits.add('bits', 24, 16, 128, false))) + .then(assertRecordEql({ bits: Buffer.from([0b00000001, 0b01000010, 0b00000011, 0b00000100, 0b10000101]) })) + .then(cleanup()) + }) + + context('with overflow', function () { + context('on overflow fail', function () { + const FAIL = bits.overflow.FAIL + + it('returns an error if the addition overflows', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0b11111111]) })) + .then(expectError()) + .then(operate(bits.add('bits', 0, 8, 1, false).onOverflow(FAIL))) + .then(assertError(status_codes.ERR_OP_NOT_APPLICABLE)) + .then(assertRecordEql({ bits: Buffer.from([0b11111111]) })) + .then(cleanup()) + }) + }) + + context('on overflow saturate', function () { + const SATURATE = bits.overflow.SATURATE + + it('sets max value if the addition overlows', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0b11111100]) })) + .then(operate(bits.add('bits', 0, 8, 100, false).onOverflow(SATURATE))) + .then(assertRecordEql({ bits: Buffer.from([0b11111111]) })) + .then(cleanup()) + }) + }) + + context('on overflow wrap', function () { + const WRAP = bits.overflow.WRAP + + it('wraps the value if the addition overflows', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0b11111110]) })) + .then(operate(bits.add('bits', 0, 8, 2, false).onOverflow(WRAP))) + .then(assertRecordEql({ bits: Buffer.from([0b00000000]) })) + .then(cleanup()) + }) + }) + }) + }) + + describe('bitwise.subtract', function () { + it('subracts value from bitmap starting at bitOffset for bitSize', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0b00000001, 0b01000010, 0b00000011, 0b00000100, 0b00000101]) })) + .then(operate(bits.subtract('bits', 24, 16, 128, false))) + .then(assertRecordEql({ bits: Buffer.from([0b00000001, 0b01000010, 0b00000011, 0b00000011, 0b10000101]) })) + .then(cleanup()) + }) + + context('with overflow', function () { + context('on overflow fail', function () { + const FAIL = bits.overflow.FAIL + + it('returns an error if the subtraction underflows', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0b00000100]) })) + .then(expectError()) + .then(operate(bits.subtract('bits', 0, 8, 10, false).onOverflow(FAIL))) + .then(assertError(status_codes.ERR_OP_NOT_APPLICABLE)) + .then(assertRecordEql({ bits: Buffer.from([0b00000100]) })) + .then(cleanup()) + }) + }) + + context('on overflow saturate', function () { + const SATURATE = bits.overflow.SATURATE + + it('sets min value if the subtraction underflows', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0b00000100]) })) + .then(operate(bits.subtract('bits', 0, 8, 10, false).onOverflow(SATURATE))) + .then(assertRecordEql({ bits: Buffer.from([0b00000000]) })) + .then(cleanup()) + }) + }) + + context('on overflow wrap', function () { + const WRAP = bits.overflow.WRAP + + it('wraps the value if the subtraction underflows', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0b00000100]) })) + .then(operate(bits.subtract('bits', 0, 8, 10, false).onOverflow(WRAP))) + .then(assertRecordEql({ bits: Buffer.from([0b11111010]) })) + .then(cleanup()) + }) + }) + }) + }) + + describe('bitwise.get', function () { + it('returns bits from bitmap starting at offset for size', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0b00000001, 0b01000010, 0b00000011, 0b00000100, 0b00000101]) })) + .then(operate(bits.get('bits', 9, 5))) + .then(assertResultEql({ bits: Buffer.from([0b10000000]) })) + .then(cleanup()) + }) + }) + + describe('bitwise.getInt', function () { + it('returns integer from bitmap starting at offset for size', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0b00000001, 0b01000010, 0b00000011, 0b00000100, 0b00000101]) })) + .then(operate(bits.getInt('bits', 8, 16, false))) + .then(assertResultEql({ bits: 16899 })) + .then(cleanup()) + }) + }) + + describe('bitwise.lscan', function () { + it('returns interger bit offset of the first specified value bit in bitmap', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0b00000001, 0b01000010, 0b00000011, 0b00000100, 0b00000101]) })) + .then(operate(bits.lscan('bits', 24, 8, true))) + .then(assertResultEql({ bits: 5 })) + .then(cleanup()) + }) + }) + + describe('bitwise.rscan', function () { + it('returns interger bit offset of the last specified value bit in bitmap', function () { + return initState() + .then(createRecord({ bits: Buffer.from([0b00000001, 0b01000010, 0b00000011, 0b00000100, 0b00000101]) })) + .then(operate(bits.rscan('bits', 32, 8, true))) + .then(assertResultEql({ bits: 7 })) + .then(cleanup()) + }) + }) +}) diff --git a/ts-test/tests/cdt_context.ts b/ts-test/tests/cdt_context.ts new file mode 100644 index 000000000..22e81acca --- /dev/null +++ b/ts-test/tests/cdt_context.ts @@ -0,0 +1,265 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* global expect, describe, it */ + +import Aerospike, { cdt, status as statusModule, lists as listsModule, maps as Maps} from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +const status: typeof statusModule = Aerospike.status +const lists: typeof listsModule = Aerospike.lists +const maps: typeof Maps = Aerospike.maps +const Context: typeof cdt.Context = Aerospike.cdt.Context + +const { + assertResultEql, + cleanup, + createRecord, + initState, + operate, + expectError, + assertError +} = require('./util/statefulAsyncTest') + +describe('Aerospike.cdt.Context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + describe('Context.addListIndex', function () { + it('sets the context to the nested list/map at the specified list index', function () { + const context: cdt.Context = new Context().addListIndex(1) + + return initState() + .then(createRecord({ nested: [[1, 2], [5, 6], [3, 4], [5, 6]] })) + .then(operate(lists.get('nested', 0).withContext(context))) + .then(assertResultEql({ nested: 5 })) + .then(cleanup) + }) + it('Throws an error when index is too large', function () { + expect(() => new Context().addListIndex(2147483648)).to.throw(Error) + }) + it('Throws an error when index is too small', function () { + expect(() => new Context().addListIndex(-2147483649)).to.throw(Error) + }) + }) + + describe('Context.addListIndexCreate', function () { + it(`creates list index at the specified index using order.UNORDERED with padding set to true and sets context + to the nested list/map at the newly created index `, function () { + const context: cdt.Context = new Context().addListIndexCreate(4, 0, true) + return initState() + .then(createRecord({ nested: [[1, 2], [5, 6], [3, 4], [5, 6]] })) + .then(operate([lists.set('nested', 0, 150).withContext(context), lists.get('nested', 4)])) + .then(assertResultEql({ nested: [150] })) + .then(cleanup) + }) + it(`creates list index at the specified index using order.ORDERED with padding set to true and sets context + to the nested list/map at the newly created index `, function () { + const context: cdt.Context = new Context().addListIndexCreate(4, 1, true) + return initState() + .then(createRecord({ nested: [[1, 2], [5, 6], [3, 4], [5, 6]] })) + .then(operate([lists.set('nested', 0, 150).withContext(context), lists.get('nested', 4)])) + .then(assertResultEql({ nested: [150] })) + .then(cleanup) + }) + it(`creates list index at the specified index using order.UNORDERED with padding set to false and sets context + to the nested list/map at the newly created index `, function () { + const context: cdt.Context = new Context().addListIndexCreate(4, 0, false) + return initState() + .then(createRecord({ nested: [[1, 2], [5, 6], [3, 4], [5, 6]] })) + .then(operate([lists.set('nested', 0, 150).withContext(context), lists.get('nested', 4)])) + .then(assertResultEql({ nested: [150] })) + .then(cleanup) + }) + it(`creates list index at the specified index using order.ORDERED with padding set to true and sets context + to the nested list/map at the newly created index. Padding necessary for completion `, function () { + const context: cdt.Context = new Context().addListIndexCreate(4, 1, false) + return initState() + .then(createRecord({ nested: [[1, 2], [5, 6], [3, 4], [5, 6]] })) + .then(operate([lists.set('nested', 0, 150).withContext(context), lists.get('nested', 4)])) + .then(assertResultEql({ nested: [150] })) + .then(cleanup) + }) + it(`creates list index at the specified index using order.UNORDERED with padding set to true and sets context + to the nested list/map at the newly created index. Padding necessary for completion `, function () { + const context: cdt.Context = new Context().addListIndexCreate(5, 0, true) + return initState() + .then(createRecord({ nested: [[1, 2], [5, 6], [3, 4], [5, 6]] })) + .then(operate([lists.set('nested', 0, 150).withContext(context), lists.get('nested', 5)])) + .then(assertResultEql({ nested: [150] })) + .then(cleanup) + }) + it(`creates list index at the specified index using order.ORDERED with padding set to true and sets context + to the nested list/map at the newly created index. Padding necessary for completion `, function () { + const context: cdt.Context = new Context().addListIndexCreate(5, 1, true) + return initState() + .then(createRecord({ nested: [[1, 2], [5, 6], [3, 4], [5, 6]] })) + .then(expectError()) + .then(operate([lists.set('nested', 0, 150).withContext(context), lists.get('nested', 5)])) + .then(assertError(status.ERR_OP_NOT_APPLICABLE)) + .then(cleanup) + }) + it(`creates list index at the specified index using order.UNORDERED with padding set to false and sets context + to the nested list/map at the newly created index. Padding necessary for completion `, function () { + const context: cdt.Context = new Context().addListIndexCreate(5, 0, false) + return initState() + .then(createRecord({ nested: [[1, 2], [5, 6], [3, 4], [5, 6]] })) + .then(expectError()) + .then(operate([lists.set('nested', 0, 150).withContext(context), lists.get('nested', 5)])) + .then(assertError(status.ERR_OP_NOT_APPLICABLE)) + .then(cleanup) + }) + it(`creates list index at the specified index using order.ORDERED with padding set to true and sets context + to the nested list/map at the newly created index. Padding necessary for completion `, function () { + const context: cdt.Context = new Context().addListIndexCreate(5, 1, false) + return initState() + .then(createRecord({ nested: [[1, 2], [5, 6], [3, 4], [5, 6]] })) + .then(expectError()) + .then(operate([lists.set('nested', 0, 150).withContext(context), lists.get('nested', 5)])) + .then(assertError(status.ERR_OP_NOT_APPLICABLE)) + .then(cleanup) + }) + it('Throws an error when index is too large', function () { + expect(() => new Context().addListIndexCreate(2147483648)).to.throw(Error) + }) + it('Throws an error when index is too small', function () { + expect(() => new Context().addListIndexCreate(-2147483649)).to.throw(Error) + }) + }) + + describe('Context.addListRank', function () { + it('sets the context to the nested list/map at the specified list rank', function () { + const context: cdt.Context = new Context().addListRank(1) + + return initState() + .then(createRecord({ nested: [[1, 2], [5, 6], [3, 4], [5, 6]] })) + .then(operate(lists.get('nested', 0).withContext(context))) + .then(assertResultEql({ nested: 3 })) + .then(cleanup) + }) + it('Throws an error when rank is too large', function () { + expect(() => new Context().addListRank(2147483648)).to.throw(Error) + }) + it('Throws an error when rank is too small', function () { + expect(() => new Context().addListRank(-2147483649)).to.throw(Error) + }) + }) + + describe('Context.addListValue', function () { + it('sets the context to the specified list value', function () { + const context: cdt.Context = new Context().addListValue([5, 6]) + + return initState() + .then(createRecord({ nested: [[1, 2], [5, 6], [3, 4], [5, 6]] })) + .then(operate(lists.get('nested', 0).withContext(context))) + .then(assertResultEql({ nested: 5 })) + .then(cleanup) + }) + }) + + describe('Context.addMapIndex', function () { + it('sets the context to the nested list/map at the specified map index', function () { + const context: cdt.Context = new Context().addMapIndex(1) + + return initState() + .then(createRecord({ nested: { a: [1, 2], c: [5, 6], b: [3, 4], d: [5, 6] } })) + .then(operate(lists.get('nested', 0).withContext(context))) + .then(assertResultEql({ nested: 3 })) + .then(cleanup) + }) + it('Throws an error when index is too large', function () { + expect(() => new Context().addMapIndex(2147483648)).to.throw(Error) + }) + it('Throws an error when index is too small', function () { + expect(() => new Context().addMapIndex(-2147483649)).to.throw(Error) + }) + }) + + describe('Context.addMapRank', function () { + it('sets the context to the nested list/map at the specified map index', function () { + const context: cdt.Context = new Context().addMapRank(1) + + return initState() + .then(createRecord({ nested: { a: [1, 2], c: [5, 6], b: [3, 4], d: [5, 6] } })) + .then(operate(lists.get('nested', 0).withContext(context))) + .then(assertResultEql({ nested: 3 })) + .then(cleanup) + }) + it('Throws an error when rank is too large', function () { + expect(() => new Context().addMapRank(2147483648)).to.throw(Error) + }) + it('Throws an error when rank is too small', function () { + expect(() => new Context().addMapRank(-2147483649)).to.throw(Error) + }) + }) + + describe('Context.addMapKey', function () { + it('sets the context to the nested list/map with the specified map key', function () { + const context: cdt.Context = new Context().addMapKey('d') + + return initState() + .then(createRecord({ nested: { a: [1, 2], c: [5, 6], b: [3, 4], d: [5, 6] } })) + .then(operate(lists.get('nested', 0).withContext(context))) + .then(assertResultEql({ nested: 5 })) + .then(cleanup) + }) + }) + + describe('Context.addMapKeyCreate', function () { + it(`Create a map key at the specified location using order.UNORDERED and sets the context + to the nested list/map at the newly created index.`, function () { + const context: cdt.Context = new Context().addMapKeyCreate('e', 0) + return initState() + .then(createRecord({ nested: { a: [1, 2], c: [5, 6], b: [3, 4], d: [5, 6] } })) + .then(operate([lists.set('nested', 0, 150).withContext(context), maps.getByKey('nested', 'e').andReturn(maps.returnType.VALUE)])) + .then(assertResultEql({ nested: [150] })) + .then(cleanup) + }) + it(`Create a map key at the specified location using order.KEY_ORDERED and sets the context + to the nested list/map at the newly created index. `, function () { + const context: cdt.Context = new Context().addMapKeyCreate('e', 1) + return initState() + .then(createRecord({ nested: { a: [1, 2], c: [5, 6], b: [3, 4], d: [5, 6] } })) + .then(operate([lists.set('nested', 0, 150).withContext(context), maps.getByKey('nested', 'e').andReturn(maps.returnType.VALUE)])) + .then(assertResultEql({ nested: [150] })) + .then(cleanup) + }) + it(`Create a map key at the specified location using order.KEY_VALUE_ORDERED and sets the context + to the nested list/map at the newly created index.`, function () { + const context: cdt.Context = new Context().addMapKeyCreate('e', 3) + return initState() + .then(createRecord({ nested: { a: [1, 2], c: [5, 6], b: [3, 4], d: [5, 6] } })) + .then(operate([lists.set('nested', 0, 150).withContext(context), maps.getByKey('nested', 'e').andReturn(maps.returnType.VALUE)])) + .then(assertResultEql({ nested: [150] })) + .then(cleanup) + }) + }) + + describe('Context.addMapValue', function () { + it('sets the context to the nested list/map value', function () { + const context: cdt.Context = new Context().addMapValue([5, 6]) + + return initState() + .then(createRecord({ nested: { a: [1, 2], c: [5, 6], b: [3, 4], d: [5, 6] } })) + .then(operate(lists.get('nested', 0).withContext(context))) + .then(assertResultEql({ nested: 5 })) + .then(cleanup) + }) + }) +}) diff --git a/ts-test/tests/client.ts b/ts-test/tests/client.ts new file mode 100644 index 000000000..f3c68dd99 --- /dev/null +++ b/ts-test/tests/client.ts @@ -0,0 +1,328 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* global context, expect, describe, it */ +/* eslint-disable no-unused-expressions */ + +import Aerospike, { Client as Cli, ConfigOptions, cdt, AerospikeError, AerospikeRecord, Node, KeyOptions } from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +const keygen: any = helper.keygen + +const Client: typeof Cli = Aerospike.Client +const Context: typeof cdt.Context = Aerospike.cdt.Context + +describe('Client', function () { + describe('#connect', function () { + it('return self', function () { + const client: Cli = new Client(helper.config) + return client.connect() + .then((client2: Cli) => { + expect(client2).to.equal(client) + client.close() + }) + }) + + it('should call the callback asynchronously', function (done) { + const client: Cli = new Client(helper.config) + let async = false + client.connect((error?: Error) => { + if (error) throw error + expect(async).to.be.true + client.close(false) + done() + }) + async = true + }) + + it('should return a Promise if callback without callback', function () { + const client: Cli = new Client(helper.config) + const promise: Promise = client.connect() + expect(promise).to.be.instanceof(Promise) + return promise.then(() => client.close(false)) + }) + }) + + describe('#close', function () { + it('should be a no-op if close is called after connection error #noserver', function (done) { + const client: Cli = new Client({ hosts: '127.0.0.1:0' }) + client.connect((error?: Error) => { + expect(error?.message).to.match(/Failed to connect/) + client.close(false) + done() + }) + }) + + it('should be possible to call close multiple times', function (done) { + const client: Cli = new Client(helper.config) + client.connect((error?: Error) => { + expect(error).to.be.null + client.close(false) + client.close(false) + done() + }) + }) + /* + it('should allow exit when all clients are closed', async function () { + const test: Function = async function (Aero: typeof Aerospike, config: ConfigOptions) { + Object.assign(config, { log: { level: Aerospike.log.OFF } }) + const client: Cli = await Aero.connect(config) + client.close() + + await new Promise((resolve, reject) => { + // beforeExit signals that the process would exit + process.on('beforeExit', resolve) + + setTimeout(() => { + reject('Process did not exit within 100ms') // eslint-disable-line + }, 100).unref() + }) + } + + await helper.runInNewProcess(test, helper.config) + }) + */ + }) + + describe('#isConnected', function () { + context('without tender health check', function () { + it('returns false if the client is not connected', function () { + const client: Cli = new Client(helper.config) + expect(client.isConnected(false)).to.be.false + }) + + it('returns true if the client is connected', function (done) { + const client: Cli = new Client(helper.config) + client.connect(function () { + expect(client.isConnected(false)).to.be.true + client.close(false) + done() + }) + }) + + it('returns false after the connection is closed', function (done) { + const client: Cli = new Client(helper.config) + client.connect(function () { + client.close(false) + expect(client.isConnected(false)).to.be.false + done() + }) + }) + }) + + context('with tender health check', function () { + it("calls the Aerospike C client library's isConnected() method", function (done) { + const client: any = new Client(helper.config) + const orig: any = (client as any).as_client.isConnected + client.connect(function () { + let tenderHealthCheck = false + client.as_client.isConnected = function () { tenderHealthCheck = true; return false } + expect(client.isConnected(true)).to.be.false + expect(tenderHealthCheck).to.be.true + client.as_client.isConnected = orig + client.close(false) + done() + }) + }) + }) + }) + + describe('Client#getNodes', function () { + const client: Cli = helper.client + + it('returns a list of cluster nodes', function () { + const nodes: Node[] = client.getNodes() + + expect(nodes).to.be.an('array') + expect(nodes.length).to.be.greaterThan(0) + nodes.forEach(function (node) { + expect(node.name).to.match(/^[0-9A-F]{15}$/) + expect(node.address).to.be.a('string') + }) + }) + }) + + describe('Client#contextToBase64', function () { + const client: Cli = helper.client + const context: cdt.Context = new Context().addMapKey('nested') + it('Serializes a CDT context', function () { + expect(typeof client.contextToBase64(context)).to.equal('string') + }) + /* + it('Throws an error if no context is given', function () { + expect(() => { client.contextToBase64() }).to.throw(Error) + }) + it('Throws an error if a non-object is given', function () { + expect(() => { client.contextToBase64('test') }).to.throw(Error) + }) + */ + }) + + describe('Client#contextFromBase64', function () { + const client: Cli = helper.client + const addListIndex: cdt.Context = new Context().addListIndex(5) + const addListIndexCreate: cdt.Context = new Context().addListIndexCreate(45, Aerospike.lists.order.ORDERED, true) + const addListRank: cdt.Context = new Context().addListRank(15) + const addListValueString: cdt.Context = new Context().addListValue('apple') + const addListValueInt: cdt.Context = new Context().addListValue(4500) + const addMapIndex: cdt.Context = new Context().addMapIndex(10) + const addMapRank: cdt.Context = new Context().addMapRank(11) + const addMapKey: cdt.Context = new Context().addMapKey('nested') + const addMapKeyCreate: cdt.Context = new Context().addMapKeyCreate('nested', Aerospike.maps.order.KEY_ORDERED) + const addMapValueString: cdt.Context = new Context().addMapValue('nested') + const addMapValueInt: cdt.Context = new Context().addMapValue(1000) + it('Deserializes a cdt context with addListIndex', function () { + expect(client.contextFromBase64(client.contextToBase64(addListIndex))).to.eql(addListIndex) + }) + it('Deserializes a cdt context with addListIndexCreate', function () { + expect(client.contextFromBase64(client.contextToBase64(addListIndexCreate))).to.eql(addListIndexCreate) + }) + it('Deserializes a cdt context with addListRank', function () { + expect(client.contextFromBase64(client.contextToBase64(addListRank))).to.eql(addListRank) + }) + it('Deserializes a cdt context with addListValueString', function () { + expect(client.contextFromBase64(client.contextToBase64(addListValueString))).to.eql(addListValueString) + }) + it('Deserializes a cdt context with addListValueInt', function () { + expect(client.contextFromBase64(client.contextToBase64(addListValueInt))).to.eql(addListValueInt) + }) + it('Deserializes a cdt context with addMapIndex', function () { + expect(client.contextFromBase64(client.contextToBase64(addMapIndex))).to.eql(addMapIndex) + }) + it('Deserializes a cdt context with addMapRank', function () { + expect(client.contextFromBase64(client.contextToBase64(addMapRank))).to.eql(addMapRank) + }) + it('Deserializes a cdt context with addMapKey', function () { + expect(client.contextFromBase64(client.contextToBase64(addMapKey))).to.eql(addMapKey) + }) + it('Deserializes a cdt context with addMapKeyCreate', function () { + expect(client.contextFromBase64(client.contextToBase64(addMapKeyCreate))).to.eql(addMapKeyCreate) + }) + it('Deserializes a cdt context with addMapValueString', function () { + expect(client.contextFromBase64(client.contextToBase64(addMapValueString))).to.eql(addMapValueString) + }) + it('Deserializes a cdt context with addMapValueInt', function () { + expect(client.contextFromBase64(client.contextToBase64(addMapValueInt))).to.eql(addMapValueInt) + }) + /* + it('Throws an error if no value is given', function () { + expect(() => { client.contextFromBase64() }).to.throw(Error) + }) + it('Throws an error if an non-string value is given', function () { + expect(() => { client.contextFromBase64(45) }).to.throw(Error) + }) + */ + }) + + context.skip('cluster name', function () { + it('should fail to connect to the cluster if the cluster name does not match', function (done) { + const config = Object.assign({}, helper.config) + config.clusterName = 'notAValidClusterName' + const client = new Client(config) + client.connect(function (err?: AerospikeError) { + expect(err?.code).to.eq(Aerospike.status.ERR_CLIENT) + client.close(false) + done() + }) + }) + }) + + describe('Events', function () { + it('client should emit nodeAdded events when connecting', function (done) { + const client = new Client(helper.config) + client.once('nodeAdded', (event: any) => { + client.close() + done() + }) + client.connect() + }) + + it('client should emit events on cluster state changes', function (done) { + const client = new Client(helper.config) + client.once('event', (event: any) => { + expect(event.name).to.equal('nodeAdded') + client.close() + done() + }) + client.connect() + }) + }) + + context('callbacks', function () { + // Execute a client command on a client instance that has been setup to + // trigger an error; check that the error callback occurs asynchronously, + // i.e. only after the command function has returned. + // The get command is used for the test but the same behavior should apply + // to all client commands. + function assertErrorCbAsync (client?: Cli, errorCb?: Function, done?: any) { + const checkpoints: string[] = [] + const checkAssertions = function (checkpoint: string) { + checkpoints.push(checkpoint) + if (checkpoints.length !== 2) return + expect(checkpoints).to.eql(['after', 'callback']) + if (client?.isConnected()) client?.close(false) + done() + } + const key = keygen.string(helper.namespace, helper.set)() + client?.get(key, function (err?: AerospikeError, _record?: AerospikeRecord) { + errorCb?.(err) + checkAssertions('callback') + }) + checkAssertions('after') + } + + it('callback is asynchronous in case of an client error #noserver', function (done) { + // trying to send a command to a client that is not connected will trigger a client error + const client = Aerospike.client() + const errorCheck = function (err: Error) { + expect(err).to.be.instanceof(Error) + expect(err.message).to.equal('Not connected.') + } + assertErrorCbAsync(client, errorCheck, done) + }) + + it('callback is asynchronous in case of an I/O error', function (done) { + // maxConnsPerNode = 0 will trigger an error in the C client when trying to send a command + const config: ConfigOptions = Object.assign({ maxConnsPerNode: 0 }, helper.config) + Aerospike.connect(config, function (err?: AerospikeError, client?: Cli) { + if (err) throw err + const errorCheck = function (err: AerospikeError) { + expect(err).to.be.instanceof(Error) + expect(err.code).to.equal(Aerospike.status.ERR_NO_MORE_CONNECTIONS) + } + assertErrorCbAsync(client, errorCheck, done) + }) + }) + }) + + describe('#captureStackTraces', function () { + it('should capture stack traces that show the command being called', function (done) { + const client: Cli = helper.client + const key: KeyOptions = keygen.string(helper.namespace, helper.set)() + const orig: boolean = client.captureStackTraces + client.captureStackTraces = true + client.get(key, function (err?: AerospikeError) { + expect(err?.stack).to.match(/Client.get/) + client.captureStackTraces = orig + done() + }) + }) + }) +}) diff --git a/ts-test/tests/command.js b/ts-test/tests/command.js new file mode 100644 index 000000000..ef5832774 --- /dev/null +++ b/ts-test/tests/command.js @@ -0,0 +1,42 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* eslint-env mocha */ +/* global expect */ + +require('./test_helper') +const Command = require('../lib/commands/command') + +describe('Command', function () { + context('Extend Command', function () { + class TestCommand extends Command('testCmd') { + foo () { return 'bar' } + } + + it('creates subclasses with informative constructor names', function () { + const cmd = new TestCommand({}) + expect(cmd.constructor.name).to.equal('TestCommand') + }) + + it('keeps a reference to the client instance', function () { + const client = {} + const cmd = new TestCommand(client) + expect(cmd.client).to.equal(client) + }) + }) +}) diff --git a/ts-test/tests/command_queue.js b/ts-test/tests/command_queue.js new file mode 100644 index 000000000..d459a3ad4 --- /dev/null +++ b/ts-test/tests/command_queue.js @@ -0,0 +1,130 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* eslint-env mocha */ +/* global expect */ + +const helper = require('./test_helper') + +describe('Command Queue #slow', function () { + it('queues commands it cannot process immediately', async function () { + const test = async function (Aerospike, config) { + Object.assign(config, { log: { level: Aerospike.log.OFF } }) + Aerospike.setupGlobalCommandQueue({ maxCommandsInProcess: 5, maxCommandsInQueue: 5 }) + const client = await Aerospike.connect(config) + const cmds = Array.from({ length: 10 }, (_, i) => + client.put(new Aerospike.Key(helper.namespace, helper.set, i), { i }) + ) + const results = await Promise.all(cmds) + client.close() + return results.length + } + + const result = await helper.runInNewProcess(test, helper.config) + .then(() => expect(result).to.equal(10)) + .catch(error => console.error('Error:', error)) + }) + + it('rejects commands it cannot queue', async function () { + const test = async function (Aerospike, config) { + Object.assign(config, { log: { level: Aerospike.log.OFF } }) // disable logging for this test to suppress C client error messages + Aerospike.setupGlobalCommandQueue({ maxCommandsInProcess: 5, maxCommandsInQueue: 1 }) + const client = await Aerospike.connect(config) + const cmds = Array.from({ length: 10 }, (_, i) => + client.put(new Aerospike.Key(helper.namespace, helper.set, i), { i }) + ) + try { + await Promise.all(cmds) + client.close() + return 'All commands processed successfully' + } catch (error) { + client.close() + return error.message + } + } + + const result = await helper.runInNewProcess(test, helper.config) + .then(() => expect(result).to.match(/Async delay queue full/)) + .catch(error => console.error('Error:', error)) + }) + + it('throws an error when trying to configure command queue after client connect', async function () { + const test = async function (Aerospike, config) { + Object.assign(config, { log: { level: Aerospike.log.OFF } }) + const client = await Aerospike.connect(config) + try { + Aerospike.setupGlobalCommandQueue({ maxCommandsInProcess: 5, maxCommandsInQueue: 1 }) + client.close() + return 'Successfully setup command queue' + } catch (error) { + client.close() + return error.message + } + } + + const result = await helper.runInNewProcess(test, helper.config) + expect(result).to.match(/Command queue has already been initialized!/) + }) + it('does not deadlock on extra query with failOnClusterChange info commands #389', async function () { + const test = async function (Aerospike, config) { + Object.assign(config, { + log: { level: Aerospike.log.OFF }, + policies: { + query: new Aerospike.QueryPolicy({ totalTimeout: 10000, failOnClusterChange: true }) + } + }) + Aerospike.setupGlobalCommandQueue({ maxCommandsInProcess: 5, maxCommandsInQueue: 50 }) + const setName = 'testGlobalCommandQueueDeadlock389' + + const client = await Aerospike.connect(config) + try { + const job = await client.createIntegerIndex({ + ns: 'test', + set: setName, + bin: 'i', + index: `idx-${setName}` + }) + await job.wait(10) + } catch (error) { + // index already exists + if (error.code !== Aerospike.status.ERR_INDEX_FOUND) throw error + } + + const puts = Array.from({ length: 5 }, (_, i) => + client.put(new Aerospike.Key('test', setName, i), { i }) + ) + await Promise.all(puts) + + try { + let results = Array.from({ length: 5 }, (_, i) => { + const query = client.query('test', setName) + query.where(Aerospike.filter.equal('i', i)) + return query.results() + }) + results = await Promise.all(results) + return results.reduce((sum, records) => sum + records.length, 0) + } catch (error) { + // throws "Delay queue timeout" error on deadlock + return error.message + } + } + + const result = await helper.runInNewProcess(test, helper.config) + expect(result).to.eq(5) + }) +}) diff --git a/ts-test/tests/config.ts b/ts-test/tests/config.ts new file mode 100644 index 000000000..0615103c7 --- /dev/null +++ b/ts-test/tests/config.ts @@ -0,0 +1,191 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* global beforeEach, afterEach, expect, describe, it */ +/* eslint-disable no-unused-expressions */ + +import Aerospike, { ConfigOptions, Config as Conf, ConfigPolicies} from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +const Config = Aerospike.Config + +describe('Config #noserver', function () { + let asHostsEnv: any; + beforeEach(function () { + asHostsEnv = process.env.AEROSPIKE_HOSTS + delete process.env.AEROSPIKE_HOSTS + }) + + afterEach(function () { + delete process.env.AEROSPIKE_HOSTS + if (asHostsEnv) { + process.env.AEROSPIKE_HOSTS = asHostsEnv + } + }) + + describe('new Config', function () { + it('copies config values from the passed Object', function () { + const settings: ConfigOptions = { + authMode: Aerospike.auth.EXTERNAL_INSECURE, + clusterName: 'testCluster', + connTimeoutMs: 1000, + hosts: [{ addr: 'localhost', port: 3000 }], + log: { level: 1, file: 2 }, + loginTimeoutMs: 2000, + maxConnsPerNode: 200, + maxSocketIdle: 30, + minConnsPerNode: 10, + maxErrorRate: 100, + errorRateWindow: 1, + modlua: { userPath: '/user/path' }, + password: 'sekret', + port: 3333, + policies: { + apply: new Aerospike.ApplyPolicy({ totalTimeout: 1000 }), + batch: new Aerospike.BatchPolicy({ totalTimeout: 1000 }), + info: new Aerospike.InfoPolicy({ timeout: 1000 }), + operate: new Aerospike.OperatePolicy({ totalTimeout: 1000 }), + query: new Aerospike.QueryPolicy({ totalTimeout: 1000 }), + read: new Aerospike.ReadPolicy({ totalTimeout: 1000 }), + remove: new Aerospike.RemovePolicy({ totalTimeout: 1000 }), + scan: new Aerospike.ScanPolicy({ totalTimeout: 1000 }), + write: new Aerospike.WritePolicy({ totalTimeout: 1000 }) + }, + rackAware: true, + rackId: 42, + sharedMemory: { key: 1234 }, + tenderInterval: 1000, + tls: { enable: true }, + user: 'admin' + } + + const config: Conf = new Config(settings) + expect(config).to.have.property('authMode') + expect(config).to.have.property('clusterName') + expect(config).to.have.property('connTimeoutMs') + expect(config).to.have.property('maxErrorRate') + expect(config).to.have.property('errorRateWindow') + expect(config).to.have.property('hosts') + expect(config).to.have.property('log') + expect(config).to.have.property('loginTimeoutMs') + expect(config).to.have.property('maxConnsPerNode') + expect(config).to.have.property('maxSocketIdle') + expect(config).to.have.property('minConnsPerNode') + expect(config).to.have.property('modlua') + expect(config).to.have.property('password') + expect(config).to.have.property('policies') + expect(config).to.have.property('port') + expect(config).to.have.property('rackAware') + expect(config).to.have.property('rackId') + expect(config).to.have.property('sharedMemory') + expect(config).to.have.property('tenderInterval') + expect(config).to.have.property('tls') + expect(config).to.have.property('user') + + const policies: ConfigPolicies = config.policies + expect(policies.apply).to.be.instanceof(Aerospike.ApplyPolicy) + expect(policies.batch).to.be.instanceof(Aerospike.BatchPolicy) + expect(policies.info).to.be.instanceof(Aerospike.InfoPolicy) + expect(policies.operate).to.be.instanceof(Aerospike.OperatePolicy) + expect(policies.query).to.be.instanceof(Aerospike.QueryPolicy) + expect(policies.read).to.be.instanceof(Aerospike.ReadPolicy) + expect(policies.remove).to.be.instanceof(Aerospike.RemovePolicy) + expect(policies.scan).to.be.instanceof(Aerospike.ScanPolicy) + expect(policies.write).to.be.instanceof(Aerospike.WritePolicy) + }) + + it('initializes default policies', function () { + const settings: ConfigOptions = { + policies: { + apply: { totalTimeout: 1000 }, + batch: { totalTimeout: 1000 }, + info: { timeout: 1000 }, + operate: { totalTimeout: 1000 }, + query: { totalTimeout: 1000 }, + read: { totalTimeout: 1000 }, + remove: { totalTimeout: 1000 }, + scan: { totalTimeout: 1000 }, + write: { totalTimeout: 1000 } + } + } + const config: Conf = new Config(settings) + + expect(config.policies.apply).to.be.instanceof(Aerospike.ApplyPolicy) + expect(config.policies.batch).to.be.instanceof(Aerospike.BatchPolicy) + expect(config.policies.info).to.be.instanceof(Aerospike.InfoPolicy) + expect(config.policies.operate).to.be.instanceof(Aerospike.OperatePolicy) + expect(config.policies.query).to.be.instanceof(Aerospike.QueryPolicy) + expect(config.policies.read).to.be.instanceof(Aerospike.ReadPolicy) + expect(config.policies.remove).to.be.instanceof(Aerospike.RemovePolicy) + expect(config.policies.scan).to.be.instanceof(Aerospike.ScanPolicy) + expect(config.policies.write).to.be.instanceof(Aerospike.WritePolicy) + }) + /* + it('ignores invalid config properties', function () { + const obj = { + log: './debug.log', + policies: 1000, + connTimeoutMs: 1.5, + tenderInterval: '1000', + user: { name: 'admin' }, + password: 12345, + sharedMemory: true, + rackId: 'myRack' + } + const config = new Config(obj) + expect(config).to.not.have.property('log') + expect(config).to.not.have.property('connTimeoutMs') + expect(config).to.not.have.property('tenderInterval') + expect(config).to.not.have.property('user') + expect(config).to.not.have.property('password') + expect(config).to.not.have.property('sharedMemory') + expect(config).to.not.have.property('rackId') + expect(config.policies).to.be.empty + }) + + it('throws a TypeError if invalid policy values are passed', function () { + const settings = { + policies: { + timeout: 1000, + totalTimeout: 1000 + } + } + expect(() => new Config(settings)).to.throw(TypeError) + }) + */ + it('reads hosts from AEROSPIKE_HOSTS if not specified', function () { + process.env.AEROSPIKE_HOSTS = 'db1:3001' + const config: Conf = new Config() + expect(config.hosts).to.eql('db1:3001') + }) + + it('defaults to "localhost:3000"', function () { + const config: Conf = new Config() + expect(config.hosts).to.eql('localhost:3000') + }) + + it('defaults to the specified default port number', function () { + const config: Conf = new Config({ + port: 3333 + }) + expect(config.hosts).to.eql('localhost:3333') + }) + }) +}) diff --git a/ts-test/tests/double.ts b/ts-test/tests/double.ts new file mode 100644 index 000000000..752ce664c --- /dev/null +++ b/ts-test/tests/double.ts @@ -0,0 +1,55 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* global expect, describe, it */ + +import Aerospike, { Double as Doub } from 'aerospike'; + +import { expect } from 'chai'; + +const Double = Aerospike.Double + +describe('Aerospike.Double #noserver', function () { + describe('constructor', function () { + it('returns a new Double value', function () { + const subject: Doub = new Double(3.1415) + + expect(subject).to.be.instanceof(Double) + expect(subject.Double).to.equal(3.1415) + }) + /* + it('throws an error if not passed a number', function () { + const fn = () => new Double('four point nine') + expect(fn).to.throw(TypeError) + }) + + it('throws an error if called without `new`', function () { + const fn = () => Double(3.1415) + expect(fn).to.throw('Invalid use of Double constructor') + }) + */ + }) + + describe('#value()', function () { + const subject: Doub = new Double(3.1415) + + it('returns the double value', function () { + expect(subject.value()).to.equal(3.1415) + }) + }) +}) diff --git a/ts-test/tests/enterprise.ts b/ts-test/tests/enterprise.ts new file mode 100644 index 000000000..67f26aa17 --- /dev/null +++ b/ts-test/tests/enterprise.ts @@ -0,0 +1,69 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* global expect, it, context */ +/* eslint-disable no-unused-expressions */ +import Aerospike, { Client as Cli, WritePolicyOptions, RemovePolicyOptions, AerospikeRecord, AerospikeBins, Key } from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +const keygen = helper.keygen.string(helper.namespace, helper.set, { prefix: 'test/enterprise/' }) +const recgen = helper.recgen +const valgen = helper.valgen + +context('Enterprise server features', function () { + helper.skipUnlessEnterprise(this) + + const client: Cli = helper.client + + context('compression', function () { + helper.skipUnlessVersion('>= 4.8.0', this) + + // Client/server requests/responses > 128 bytes should get compressed; but + // note that the server only applies compression if it saves at least 32 + // bytes, so payloads close to the 128 byte threshold might not get + // compressed. In any case, we can't really verify whether compression was + // applied at the protocol level since that's handled by the C client + // library. So best we can do is to ensure normal put/get requests still + // work as expected. + it('should compress the request to the server', async function () { + const key: Key = keygen() + const record: AerospikeBins = recgen.record({ string: valgen.string({ length: { min: 1024 } }) })() + const policy: WritePolicyOptions = { compress: true } + + await client.put(key, record, {}, policy) + const result: AerospikeRecord = await client.get(key, policy) + await client.remove(key) + + expect(result.bins.string).to.equal(record.string) + }) + }) + + context('durable deletes', function () { + it('should apply the durable delete policy', async function () { + const key: Key = keygen() + const record: AerospikeBins = recgen.record({ string: valgen.string() })() + const policy: RemovePolicyOptions = { durableDelete: true } + + await client.put(key, record) + await client.remove(key, policy) + expect(await client.exists(key)).to.be.false + }) + }) +}) diff --git a/ts-test/tests/error.ts b/ts-test/tests/error.ts new file mode 100644 index 000000000..e5d102d5d --- /dev/null +++ b/ts-test/tests/error.ts @@ -0,0 +1,144 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* global expect, describe, it */ +/* eslint-disable no-unused-expressions */ + +import Aerospike, { AerospikeError, status as statusModule } from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +const ASError: typeof AerospikeError = Aerospike.AerospikeError +const status: typeof statusModule = Aerospike.status + +require('./test_helper.js') + +describe('AerospikeError #noserver', function () { + describe('constructor', function () { + it('creates a new AerospikeError instance', function () { + expect(new ASError()).to.be.instanceof(ASError) + }) + + it('inherits from the Error class', function () { + expect(new ASError()).to.be.instanceof(Error) + }) + + it('initializes the error with default values', function () { + const subject: AerospikeError = new ASError() + expect(subject).to.have.property('message', '') + expect(subject).to.have.property('code', status.ERR_CLIENT) + expect(subject).to.have.property('command', null) + expect(subject).to.have.property('func', null) + expect(subject).to.have.property('file', null) + expect(subject).to.have.property('line', null) + expect(subject).to.have.property('inDoubt', false) + }) + + it('sets an error message', function () { + const subject = new ASError('Dooh!') + expect(subject).to.have.property('message', 'Dooh!') + }) + + it('keeps a reference to the command', function () { + const cmd = {} + const subject = new ASError('Dooh!', cmd) + expect(subject).to.have.property('command', cmd) + }) + + it('captures a stacktrace', function () { + const subject = new ASError('Dooh!') + expect(subject).to.have.property('stack') + .that.is.a('string') + .that.includes('AerospikeError: Dooh!') + }) + + it('copies the stacktrace of the command', function () { + const cmd: any = { name: 'AerospikeError', message: 'Dooh!' } + Error.captureStackTrace(cmd) + const subject = new ASError('Dooh!', cmd) + expect(subject).to.have.property('stack') + .that.is.a('string') + .that.equals(cmd.stack) + }) + }) + + describe('.fromASError', function () { + it('copies the info from a AerospikeClient error instance', function () { + const error: any = { + code: -11, + message: 'Dooh!', + func: 'connect', + file: 'lib/client.js', + line: 101, + inDoubt: true + } + const subject = (ASError as any).fromASError(error) + expect(subject).to.have.property('code', -11) + expect(subject).to.have.property('message', 'Dooh!') + expect(subject).to.have.property('func', 'connect') + expect(subject).to.have.property('file', 'lib/client.js') + expect(subject).to.have.property('line', 101) + expect(subject).to.have.property('inDoubt', true) + }) + + it('replaces error codes with descriptive messages', function () { + const error: any = { + code: status.ERR_RECORD_NOT_FOUND, + message: '127.0.0.1:3000 AEROSPIKE_ERR_RECORD_NOT_FOUND' + } + const subject = (ASError as any).fromASError(error) + expect(subject.message).to.equal('127.0.0.1:3000 Record does not exist in database. May be returned by read, or write with policy Aerospike.policy.exists.UPDATE') + }) + + it('returns an AerospikeError instance unmodified', function () { + const error: any = new AerospikeError('Dooh!') + expect((ASError as any).fromASError(error)).to.equal(error) + }) + + it('returns null if the status code is OK', function () { + const error: any = { code: status.OK } + expect((ASError as any).fromASError(error)).to.be.null + }) + + it('returns null if no error is passed', function () { + expect((ASError as any).fromASError(null)).to.be.null + }) + }) + + describe('#isServerError()', function () { + it('returns true if the error code indicates a server error', function () { + const error: any = { code: status.ERR_RECORD_NOT_FOUND } + const subject = (ASError as any).fromASError(error) + expect(subject.isServerError()).to.be.true + }) + + it('returns false if the error code indicates a client error', function () { + const error: any = { code: status.ERR_PARAM } + const subject = (ASError as any).fromASError(error) + expect(subject.isServerError()).to.be.false + }) + }) + + describe('#toString()', function () { + it('sets an informative error message', function () { + const subject: AerospikeError = new ASError('Dooh!') + expect(subject.toString()).to.eql('AerospikeError: Dooh!') + }) + }) +}) diff --git a/ts-test/tests/exists.ts b/ts-test/tests/exists.ts new file mode 100644 index 000000000..4606a6c5c --- /dev/null +++ b/ts-test/tests/exists.ts @@ -0,0 +1,163 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* global context, expect, describe, it */ +/* eslint-disable no-unused-expressions */ +import Aerospike, { Key, AerospikeError, AerospikeRecord} from 'aerospike'; + +import * as helper from './test_helper'; + +import { expect } from 'chai'; + +const keygen = helper.keygen + +describe('client.exists()', function () { + const client = helper.client + + context('Promises', function () { + it('returns true if the record exists', function () { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/exists/1' })() + + return client.put(key, { str: 'abcde' }) + .then(() => client.exists(key)) + .then(result => expect(result).to.be.true) + .then(() => client.remove(key)) + }) + + it('returns false if the record does not exist', function () { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/exists/2' })() + + return client.exists(key) + .then(result => expect(result).to.be.false) + }) + }) + + context('Callbacks', function () { + it('returns true if the record exists', function (done) { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/exists/3' })() + + client.put(key, { str: 'abcde' }, (error?: AerospikeError) => { + if (error) throw error + client.exists(key, (error?: AerospikeError, result?: boolean) => { + if (error) throw error + expect(result).to.be.true + client.remove(key, (error?: AerospikeError) => { + if (error) throw error + done() + }) + }) + }) + }) + + it('returns false if the record does not exist', function (done) { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/exists/4' })() + + client.exists(key, (error: any, result: any) => { + if (error) throw error + expect(result).to.be.false + done() + }) + }) + }) +}) + + +describe('client.existsWithMetadata()', function () { + const client = helper.client + + context('Promises', function () { + it('returns an Aerospike Record with Metatdata if the record exists', function () { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/exists/5' })() + + return client.put(key, { str: 'abcde' }, {ttl: 50, gen: 7}) + .then(() => client.put(key, { str: 'abcd' }, {ttl: 50, gen: 7})) + .then(() => client.put(key, { str: 'abc' }, {ttl: 50, gen: 7})) + .then(() => client.put(key, { str: 'ab' }, {ttl: 50, gen: 7})) + .then(() => client.put(key, { str: 'a' }, {ttl: 50, gen: 7})) + .then(() => client.put(key, { str: 'abcde' }, {ttl: 50, gen: 7})) + .then(() => client.existsWithMetadata(key)) + .then(result => { + expect(result.key).to.eql(key) + expect(result.bins).to.be.null + expect(result.ttl).to.be.within(48, 50) + expect(result.gen).to.eql(6) + }) + .then(() => client.remove(key)) + }) + + it('returns an Aerospike Record with Metatdata if the record exists and no meta or ttl is set', function () { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/exists/6' })() + + return client.put(key, { str: 'abcde' }) + .then(() => client.existsWithMetadata(key)) + .then(result => { + expect(result.key).to.eql(key) + expect(result.bins).to.be.null + expect(result.ttl).to.eql(-1) + expect(result.gen).to.eql(1) + }) + .then(() => client.remove(key)) + }) + + it('returns false if the record does not exist', function () { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/exists/7' })() + + return client.existsWithMetadata(key) + .then((result: AerospikeRecord) => { + expect(result.key).to.equal(key) + expect(result.bins).to.be.null + expect(result.ttl).to.be.null + expect(result.gen).to.be.null + }) + }) + }) + + context('Callbacks', function () { + it('returns an Aerospike Record with Metatdata if the record exists', function (done) { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/exists/8' })() + + client.put(key, { str: 'abcde' }, {ttl: 100, gen: 14}, (error?: AerospikeError) => { + if (error) throw error + client.existsWithMetadata(key, (error?: AerospikeError, result?: AerospikeRecord) => { + if (error) throw error + expect(result?.key).to.equal(key) + expect(result?.bins).to.be.null + expect(result?.ttl).to.be.within(98, 100) + expect(result?.gen).to.eql(1) + client.remove(key, (error?: AerospikeError) => { + if (error) throw error + done() + }) + }) + }) + }) + + it('returns an Aerospike Record without Metatdata if the record does not exist', function (done) { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/exists/9' })() + + client.existsWithMetadata(key, (error?: AerospikeError, result?: AerospikeRecord) => { + if (error) throw error + expect(result?.key).to.equal(key) + expect(result?.bins).to.be.null + expect(result?.ttl).to.be.null + expect(result?.gen).to.be.null + done() + }) + }) + }) +}) \ No newline at end of file diff --git a/ts-test/tests/exp.ts b/ts-test/tests/exp.ts new file mode 100644 index 000000000..47e76a92c --- /dev/null +++ b/ts-test/tests/exp.ts @@ -0,0 +1,361 @@ +// ***************************************************************************** +// Copyright 2021-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* eslint-env mocha */ +/* global expect */ + +import Aerospike, { AerospikeError, Client as Cli, exp as expr, operations, maps as Maps, GeoJSON as GJ, Key, AerospikeBins, cdt, AerospikeRecord, RecordMetadata, AerospikeExp} from 'aerospike'; + +import { expect } from 'chai'; + +const exp: typeof expr = Aerospike.exp +const op: typeof operations = Aerospike.operations +const maps: typeof Maps = Aerospike.maps + +const GeoJSON: typeof GJ = Aerospike.GeoJSON + +const FILTERED_OUT: number = Aerospike.status.FILTERED_OUT + +import * as helper from './test_helper'; + +const keygen = helper.keygen +const tempBin = 'ExpVar' + +describe('Aerospike.exp', function () { + helper.skipUnlessVersion('>= 5.0.0', this) + + const client: Cli = helper.client + + const orderMap = (key: Key, binName: string, order: Maps.order, ctx?: cdt.Context): Promise => { + const policy = new Aerospike.MapPolicy({ order }) + const setMapPolicy = Aerospike.maps.setPolicy(binName, policy) + if (ctx) setMapPolicy.withContext(ctx) + return client.operate(key, [setMapPolicy]) + } + + const orderByKey = (key: Key, binName: string, ctx?: cdt.Context): Promise => orderMap(key, binName, Aerospike.maps.order.KEY_ORDERED , ctx) + + async function createRecord (bins: AerospikeBins, meta: RecordMetadata | null = null) { + const key = keygen.string(helper.namespace, helper.set, { prefix: 'test/exp' })() + await client.put(key, bins, meta) + return key + } + + async function testNoMatch (key: Key, filterExpression: AerospikeExp) { + const rejectPolicy = { filterExpression } + let operationSuccessful = false + try { + await client.remove(key, rejectPolicy) + operationSuccessful = true + } catch (error: any) { + expect(error.code).to.eq(FILTERED_OUT, `Received unexpected error code with message "${error.message}"`) + } + if (operationSuccessful) { + expect.fail('Test no-match: Operation should have not have been executed due to failed expression match') + } + } + + async function testMatch (key: Key, filterExpression: AerospikeExp) { + const passPolicy = { filterExpression } + await client.remove(key, passPolicy) + } + + it('builds up a filter expression value', function () { + const filter = exp.eq(exp.binInt('intVal'), exp.int(42)) + expect(filter).to.be.an('array') + }) + + describe('comparison expressions', function () { + describe('eq on int bin', function () { + it('evaluates to true if an integer bin equals the given value', async function () { + const key = await createRecord({ intVal: 42 }) + + await testNoMatch(key, exp.eq(exp.binInt('intVal'), exp.int(37))) + await testMatch(key, exp.eq(exp.binInt('intVal'), exp.int(42))) + }) + }) + + describe('eq on bool bin', function () { + it('evaluates to true if an integer bin equals the given value', async function () { + const key = await createRecord({ boolVal: true }) + + await testNoMatch(key, exp.eq(exp.binBool('boolVal'), exp.bool(false))) + await testMatch(key, exp.eq(exp.binBool('boolVal'), exp.bool(true))) + }) + }) + + describe('eq on map bin', function () { + helper.skipUnlessVersion('>= 6.3.0', this) + it('evaluates to true if a map bin matches a value', async function () { + const key = await createRecord({ map: { c: 1, b: 2, a: 3 } }) + await orderByKey(key, 'map') + await testNoMatch(key, exp.eq(exp.map({ d: 4, e: 5 }), exp.binMap('map'))) + await testMatch(key, exp.eq(exp.map({ c: 1, b: 2, a: 3 }), exp.binMap('map'))) + }) + + it('evaluates to true if a map bin matches a map bin', async function () { + const key = await createRecord({ map: { c: 1, b: 2, a: 3 }, map2: { c: 1, b: 2, a: 3 }, map3: { c: 1, b: 2 } }) + await orderByKey(key, 'map') + await testNoMatch(key, exp.eq(exp.binMap('map'), exp.binMap('map3'))) + await testMatch(key, exp.eq(exp.binMap('map'), exp.binMap('map2'))) + }) + }) + + describe('eq on list bin', function () { + helper.skipUnlessVersion('>= 6.3.0', this) + it('evaluates to true if a list bin matches a value', async function () { + const key = await createRecord({ list: [4, 2, 0] }) + await orderByKey(key, 'map') + await testNoMatch(key, exp.eq(exp.list([0, 2, 4]), exp.binList('list'))) + await testMatch(key, exp.eq(exp.list([4, 2, 0]), exp.binList('list'))) + }) + + it('evaluates to true if a list bin matches a list bin', async function () { + const key = await createRecord({ list: [4, 2, 0], list2: [4, 2, 0], list3: [4, 2] }) + await orderByKey(key, 'map') + await testNoMatch(key, exp.eq(exp.binList('list'), exp.binList('list3'))) + await testMatch(key, exp.eq(exp.binList('list'), exp.binList('list2'))) + }) + }) + + describe('eq on blob bin', function () { + it('evaluates to true if a blob bin matches a value', async function () { + const key = await createRecord({ blob: Buffer.from([1, 2, 3]) }) + + await testNoMatch(key, exp.eq(exp.binBlob('blob'), exp.bytes(Buffer.from([4, 5, 6])))) + await testMatch(key, exp.eq(exp.binBlob('blob'), exp.bytes(Buffer.from([1, 2, 3])))) + }) + }) + + describe('ne on int bin', function () { + it('evaluates to true if an integer bin does not equal the given value', async function () { + const key = await createRecord({ intVal: 42 }) + + await testNoMatch(key, exp.ne(exp.binInt('intVal'), exp.int(42))) + await testMatch(key, exp.ne(exp.binInt('intVal'), exp.int(37))) + }) + }) + + describe('gt on float bin', function () { + it('evaluates to true if a float bin value is greater than the given value', async function () { + const key = await createRecord({ pi: Math.PI }) + + await testNoMatch(key, exp.gt(exp.binFloat('pi'), exp.float(4.5678))) + await testMatch(key, exp.gt(exp.binFloat('pi'), exp.float(1.2345))) + }) + }) + + describe('regex - regular expression comparisons', function () { + it('matches a string value with a regular expression', async function () { + const key = await createRecord({ title: 'Star Wars' }) + + await testNoMatch(key, exp.cmpRegex(0, 'Treck$', exp.binStr('title'))) + await testMatch(key, exp.cmpRegex(0, '^Star', exp.binStr('title'))) + }) + + it('matches a string value with a regular expression - case insensitive', async function () { + const key = await createRecord({ title: 'Star Wars' }) + + await testNoMatch(key, exp.cmpRegex(Aerospike.regex.ICASE, 'trEcK$', exp.binStr('title'))) + await testMatch(key, exp.cmpRegex(Aerospike.regex.ICASE, '^sTaR', exp.binStr('title'))) + }) + }) + + describe('geo - geospatial comparisons', function () { + it('matches if the point is contained within the region', async function () { + const key = await createRecord({ location: new GeoJSON.Point(103.913, 1.308) }) + + const circle1: GJ = new GeoJSON.Circle(9.78, 53.55, 50000) + const circle2: GJ = new GeoJSON.Circle(103.875, 1.297, 10000) + await testNoMatch(key, exp.cmpGeo(exp.binGeo('location'), exp.geo(circle1))) + await testMatch(key, exp.cmpGeo(exp.binGeo('location'), exp.geo(circle2))) + }) + + it('matches if the region contains the point', async function () { + const key = await createRecord({ location: new GeoJSON.Point(103.913, 1.308) }) + + const circle1: GJ = new GeoJSON.Circle(9.78, 53.55, 50000) + const circle2: GJ = new GeoJSON.Circle(103.875, 1.297, 10000) + await testNoMatch(key, exp.cmpGeo(exp.geo(circle1), exp.binGeo('location'))) + await testMatch(key, exp.cmpGeo(exp.geo(circle2), exp.binGeo('location'))) + }) + }) + }) + + describe('binExists', function () { + it('evaluates to true if the bin with the given name exists', async function () { + const key = await createRecord({ foo: 'bar' }) + + await testNoMatch(key, exp.binExists('fox')) + await testMatch(key, exp.binExists('foo')) + }) + }) + + describe('ttl', function () { + helper.skipUnlessSupportsTtl(this) + + it('evaluates to true if the record ttl matches expectations', async function () { + const key = await createRecord({ foo: 'bar' }, { ttl: 1000 }) + + await testNoMatch(key, exp.eq(exp.ttl(), exp.int(0))) + await testMatch(key, exp.gt(exp.ttl(), exp.int(0))) + }) + }) + + describe('voidTime', function () { + helper.skipUnlessSupportsTtl(this) + + it('evaluates to true if the record void time matches expectations', async function () { + const key = await createRecord({ foo: 'bar' }, { ttl: 1000 }) + + const now = Date.now() * 1000000 // nanoseconds + await testNoMatch(key, exp.lt(exp.voidTime(), exp.int(now))) + await testMatch(key, exp.gt(exp.voidTime(), exp.int(now))) + }) + }) + + describe('not', function () { + it('evaluates to true if the expression evaluates to false', async function () { + const key = await createRecord({ a: 1, b: 2, c: 3 }) + + await testNoMatch(key, exp.not(exp.binExists('a'))) + await testMatch(key, exp.not(exp.binExists('d'))) + }) + }) + + describe('and', function () { + it('evaluates to true if all expressions evaluate to true', async function () { + const key = await createRecord({ a: 1, b: 2, c: 3 }) + + await testNoMatch(key, exp.and(exp.binExists('a'), exp.binExists('d'))) + await testMatch(key, exp.and(exp.binExists('a'), exp.binExists('b'))) + }) + }) + + describe('or', function () { + it('evaluates to true if any expression evaluates to true', async function () { + const key = await createRecord({ a: 1, b: 2, c: 3 }) + + await testNoMatch(key, exp.or(exp.binExists('d'), exp.binExists('e'))) + await testMatch(key, exp.or(exp.binExists('a'), exp.binExists('d'))) + }) + }) + + describe('nil', function () { + it('evaluates to true if any expression evaluates to true', async function () { + const key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + await testNoMatch(key, exp.eq(exp.maps.getByValueRange(exp.binMap('tags'), exp.str('green'), exp.nil(), maps.returnType.COUNT), exp.int(2))) + await testMatch(key, exp.eq(exp.maps.getByValueRange(exp.binMap('tags'), exp.str('green'), exp.nil(), maps.returnType.COUNT), exp.int(1))) + }) + }) + + describe('inf', function () { + it('evaluates to true if any expression evaluates to true', async function () { + const key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + await testNoMatch(key, exp.eq(exp.maps.getByValueRange(exp.binMap('tags'), exp.inf(), exp.str('green'), maps.returnType.COUNT), exp.int(1))) + await testMatch(key, exp.eq(exp.maps.getByValueRange(exp.binMap('tags'), exp.inf(), exp.str('green'), maps.returnType.COUNT), exp.int(2))) + }) + }) + + describe('recordSize', function () { + helper.skipUnlessVersion('>= 7.0.0', this) + + it('evaluates to true if any expression evaluates to true', async function () { + const key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + await testNoMatch(key, exp.eq(exp.recordSize(), exp.int(1))) + await testMatch(key, exp.gt(exp.recordSize(), exp.int(64))) + }) + + it('evaluates to true if any expression evaluates to true', async function () { + const key = await createRecord({ tags: { a: '123456789', b: 'green', c: 'yellow' } }) + await testNoMatch(key, exp.eq(exp.recordSize(), exp.int(1))) + await testMatch(key, exp.gt(exp.recordSize(), exp.int(64))) + }) + }) + + describe('wildcard', function () { + it('evaluates to true if any expression evaluates to true', async function () { + const key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + await testNoMatch(key, exp.eq(exp.maps.getByValueRange(exp.binMap('tags'), exp.inf(), exp.wildcard(), maps.returnType.COUNT), exp.int(2))) + await testMatch(key, exp.eq(exp.maps.getByValueRange(exp.binMap('tags'), exp.inf(), exp.wildcard(), maps.returnType.COUNT), exp.int(3))) + }) + }) + + describe('expWriteFlags', function () { + it('write flags have correct value', async function () { + expect(exp.expWriteFlags).to.have.property('DEFAULT', 0) + expect(exp.expWriteFlags).to.have.property('CREATE_ONLY', 1) + expect(exp.expWriteFlags).to.have.property('UPDATE_ONLY', 2) + expect(exp.expWriteFlags).to.have.property('ALLOW_DELETE', 4) + expect(exp.expWriteFlags).to.have.property('POLICY_NO_FAIL', 8) + expect(exp.expWriteFlags).to.have.property('EVAL_NO_FAIL', 16) + }) + }) + describe('expReadFlags', function () { + it('read flags have correct value', async function () { + expect(exp.expReadFlags).to.have.property('DEFAULT', 0) + expect(exp.expReadFlags).to.have.property('EVAL_NO_FAIL', 16) + }) + }) + + describe('arithmetic expressions', function () { + describe('int bin add expression', function () { + it('evaluates exp_read op to true if temp bin equals the sum of bin and given value', async function () { + const key = await createRecord({ intVal: 2 }) + const ops = [ + exp.operations.read(tempBin, + exp.add(exp.binInt('intVal'), exp.binInt('intVal')), + exp.expWriteFlags.DEFAULT), + op.read('intVal') + ] + const result = await client.operate(key, ops, {}) + // console.log(result) + expect(result.bins.intVal).to.eql(2) + expect(result.bins.ExpVar).to.eql(4) + }) + it('evaluates exp_write op to true if bin equals the sum of bin and given value', async function () { + const key = await createRecord({ intVal: 2 }) + const ops = [ + exp.operations.write('intVal', + exp.add(exp.binInt('intVal'), exp.binInt('intVal')), + exp.expWriteFlags.DEFAULT), + op.read('intVal') + ] + const result = await client.operate(key, ops, {}) + // console.log(result) + expect(result.bins.intVal).to.eql(4) + }) + it('evaluates exp_read op to true if temp bin equals the sum of bin and given value', async function () { + const key = await createRecord({ intVal: 2 }) + const ops = [ + exp.operations.read(tempBin, + exp.add(exp.binInt('intVal'), exp.binInt('intVal')), + exp.expWriteFlags.DEFAULT), + op.read('intVal') + ] + const result = await client.operate(key, ops, {}) + // console.log(result) + expect(result.bins.intVal).to.eql(2) + expect(result.bins.ExpVar).to.eql(4) + }) + }) + }) +}) diff --git a/ts-test/tests/exp_bit.ts b/ts-test/tests/exp_bit.ts new file mode 100644 index 000000000..3c7fd0074 --- /dev/null +++ b/ts-test/tests/exp_bit.ts @@ -0,0 +1,67 @@ +// ***************************************************************************** +// Copyright 2022-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* eslint-env mocha */ +/* global expect */ + +import Aerospike, { AerospikeBins, RecordMetadata, Key, AerospikeExp, AerospikeRecord, operations, exp as expModule} from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +const exp: typeof expModule = Aerospike.exp +const op: typeof operations = Aerospike.operations + +const keygen = helper.keygen +const tempBin = 'ExpVar' + +describe('Aerospike.exp_operations', function () { + helper.skipUnlessVersion('>= 5.0.0', this) + + const client = helper.client + + async function createRecord (bins: AerospikeBins, meta: RecordMetadata | null = null) { + const key = keygen.string(helper.namespace, helper.set, { prefix: 'test/exp' })() + await client.put(key, bins, meta) + return key + } + + it('builds up a filter expression value', function () { + const filter: AerospikeExp = exp.eq(exp.binInt('intVal'), exp.int(42)) + expect(filter).to.be.an('array') + }) + + describe('bit expressions', function () { + describe('bit bin get expression', function () { + it('evaluates exp_read op to true if temp bin equals to bin bits', async function () { + // const key = await createRecord({ blob: Buffer.from([0b00000001, 0b01000010, 0b01010111, 0b00000100, 0b00000101]) }) + const key: Key = await createRecord({ blob: Buffer.from([0, 1, 2, 3]) }) + const ops: operations.Operation[] = [ + exp.operations.read(tempBin, + exp.bit.count(exp.binBlob('blob'), exp.uint(32), exp.int(0)), // b0,b1,b10,b11 (4bits set) + // exp.bit.insert(exp.binBlob('blob'), exp.bytes(Buffer.from([1]), 1), exp.int(1)), + 0), + op.read('blob') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + // console.log(result) + expect(result.bins.ExpVar).to.eql(4) + }) + }) + }) +}) diff --git a/ts-test/tests/exp_hll.ts b/ts-test/tests/exp_hll.ts new file mode 100644 index 000000000..51dc4b27b --- /dev/null +++ b/ts-test/tests/exp_hll.ts @@ -0,0 +1,78 @@ +// ***************************************************************************** +// Copyright 2022-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* eslint-env mocha */ +/* global expect */ + +import Aerospike, { Key, AerospikeExp, Client as Cli, RecordMetadata, operations, exp as expModule, hll as hllModule, AerospikeRecord, AerospikeBins} from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +const exp: typeof expModule = Aerospike.exp +const op: typeof operations = Aerospike.operations +const hll: typeof hllModule = Aerospike.hll + +const keygen: any = helper.keygen +const tempBin: string = 'ExpVar' + +describe('Aerospike.exp_operations', function () { + helper.skipUnlessVersion('>= 5.0.0', this) + + const client: Cli = helper.client + + async function createRecord (bins: AerospikeBins, meta: RecordMetadata | null = null) { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/exp' })() + await client.put(key, bins, meta) + return key + } + + it('builds up a filter expression value', function () { + const filter: AerospikeExp = exp.eq(exp.binInt('intVal'), exp.int(42)) + expect(filter).to.be.an('array') + }) + + describe('hll expressions', function () { + describe('hll bin getCount expression', function () { + it('evaluates exp_read op to true if temp bin equals to unique items in hll', async function () { + const key: Key = await createRecord({ + hllCats: Buffer.from([0, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 0, 0, 0, 0, + 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 65, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), + list: ['tiger'] + }) + const ops: operations.Operation[] = [ + hll.add('hllCats2', ['jaguar', 'tiger', 'tiger', 'leopard', 'lion', 'jaguar'], 8), + exp.operations.read(tempBin, + exp.hll.getCount(exp.binHll('hllCats2')), + 0), + op.read('hllCats2') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + // console.log(result) + expect(result.bins.ExpVar).to.eql(4) + }) + }) + }) +}) diff --git a/ts-test/tests/exp_list.ts b/ts-test/tests/exp_list.ts new file mode 100644 index 000000000..fcddc9510 --- /dev/null +++ b/ts-test/tests/exp_list.ts @@ -0,0 +1,872 @@ +// ***************************************************************************** +// Copyright 2022-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* eslint-env mocha */ +/* global expect */ + +import Aerospike, { exp as expModule, operations, lists as listsModule, Client as Cli, Key, RemovePolicyOptions, AerospikeExp, AerospikeRecord, cdt} from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +const exp: typeof expModule = Aerospike.exp +const op: typeof operations = Aerospike.operations +const lists: typeof listsModule = Aerospike.lists +const Context: typeof cdt.Context = Aerospike.cdt.Context + +const keygen: any = helper.keygen + +const tempBin: string = 'ExpVar' +const FILTERED_OUT: number = Aerospike.status.FILTERED_OUT + +describe('Aerospike.exp_operations', function () { + helper.skipUnlessVersion('>= 5.0.0', this) + + const client: Cli = helper.client + + async function createRecord (bins: any, meta = null) { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/exp' })() + await client.put(key, bins, meta) + return key + } + + async function testNoMatch (key: any, filterExpression: any) { + const rejectPolicy: RemovePolicyOptions = { filterExpression } + let operationSuccessful: boolean = false + try { + await client.remove(key, rejectPolicy) + operationSuccessful = true + } catch (error: any) { + expect(error.code).to.eq(FILTERED_OUT, `Received unexpected error code with message "${error.message}"`) + } + if (operationSuccessful) { + expect.fail('Test no-match: Operation should have not have been executed due to failed expression match') + } + } + + async function testMatch (key: any, filterExpression: any) { + const passPolicy: RemovePolicyOptions = { filterExpression } + await client.remove(key, passPolicy) + } + + it('builds up a filter expression value', function () { + const filter: AerospikeExp = exp.eq(exp.binInt('intVal'), exp.int(42)) + expect(filter).to.be.an('array') + }) + describe('list expressions', function () { + describe('list size', function () { + it('matches the size of a list value', async function () { + const key: Key = await createRecord({ tags: ['blue', 'green', 'yellow'] }) + + await testNoMatch(key, exp.eq(exp.lists.size(exp.binList('tags')), exp.int(5))) + await testMatch(key, exp.eq(exp.lists.size(exp.binList('tags')), exp.int(3))) + }) + }) + + describe('list size with context', function () { + it('matches the size of a list value within a nested context', async function () { + const key: Key = await createRecord({ tags: ['blue', 'green', ['orange', 'pink', 'white', 'black']] }) + const context: cdt.Context = new Context().addListIndex(2) + await testNoMatch(key, exp.eq(exp.lists.size(exp.binList('tags'), context), exp.int(5))) + await testMatch(key, exp.eq(exp.lists.size(exp.binList('tags'), context), exp.int(4))) + }) + }) + + describe('clear', function () { + it('removes all items in a map', async function () { + const key: Key = await createRecord({ tags: ['blue', 'green', 'yellow'] }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.lists.clear( + exp.binList('tags')), + 0), + op.read('tags') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: [] }) + }) + + it('selects item identified by index inside nested map', async function () { + const key: Key = await createRecord({ tags: ['blue', 'green', ['orange', 'pink', 'white', 'black']] }) + + const context: cdt.Context = new Context().addListIndex(2) + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.lists.clear( + exp.binList('tags'), + context), + 0), + op.read('tags') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: ['blue', 'green', []] }) + }) + }) + + describe('removeByValue', function () { + it('removes list item by value', async function () { + const key: Key = await createRecord({ tags: ['blue', 'green', 'yellow'] }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.lists.removeByValue( + exp.binList('tags'), + exp.str('green')), + 0), + op.read('tags') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: ['blue', 'yellow'] }) + }) + + it('removes list item by value in a cdt context', async function () { + const key: Key = await createRecord({ tags: ['blue', 'green', ['orange', 'pink', 'white', 'black']] }) + const context: cdt.Context = new Context().addListIndex(2) + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.lists.removeByValue( + exp.binList('tags'), + exp.str('white'), + context), + 0), + op.read('tags') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: ['blue', 'green', ['orange', 'pink', 'black']] }) + }) + }) + + describe('removeByValueList', function () { + it('removes list item by value list', async function () { + const key: Key = await createRecord({ tags: ['blue', 'green', 'yellow'] }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.lists.removeByValueList( + exp.binList('tags'), + exp.list(['green', 'yellow'])), + 0), + op.read('tags') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: ['blue'] }) + }) + + it('removes list item by value list in a cdt context', async function () { + const key: Key = await createRecord({ tags: ['blue', 'green', ['orange', 'pink', 'white', 'black']] }) + const context: cdt.Context = new Context().addListIndex(2) + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.lists.removeByValueList( + exp.binList('tags'), + exp.list(['orange', 'white']), + context), + 0), + op.read('tags') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: ['blue', 'green', ['pink', 'black']] }) + }) + }) + + describe('removeByValueRange', function () { + it('removes list item by value range', async function () { + const key: Key = await createRecord({ tags: ['blue', 'green', 'yellow'] }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.lists.removeByValueRange( + exp.binList('tags'), + exp.str('green'), + exp.str('blue')), + 0), + op.read('tags') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: ['green', 'yellow'] }) + }) + + it('removes list item by value range in a cdt context', async function () { + const key: Key = await createRecord({ tags: ['blue', 'green', ['orange', 'pink', 'white', 'black']] }) + const context: cdt.Context = new Context().addListIndex(2) + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.lists.removeByValueRange( + exp.binList('tags'), + exp.str('pink'), + exp.str('black'), + context), + 0), + op.read('tags') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: ['blue', 'green', ['pink', 'white']] }) + }) + }) + + describe('removeByRelRankRangeToEnd', function () { + it('removes list item by value relative rank range to end', async function () { + const key: Key = await createRecord({ tags: ['blue', 'green', 'yellow'] }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.lists.removeByRelRankRangeToEnd( + exp.binList('tags'), + exp.int(1), + exp.str('blue')), + 0), + op.read('tags') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: ['blue'] }) + }) + + it('removes list item by value relative rank range to end in a cdt context', async function () { + const key: Key = await createRecord({ tags: ['blue', 'green', ['orange', 'pink', 'white', 'black']] }) + const context: cdt.Context = new Context().addListIndex(2) + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.lists.removeByRelRankRangeToEnd( + exp.binList('tags'), + exp.int(1), + exp.str('orange'), + context), + 0), + op.read('tags') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: ['blue', 'green', ['orange', 'black']] }) + }) + }) + + describe('removeByRelRankRange', function () { + it('removes list item by value relative rank range', async function () { + const key: Key = await createRecord({ tags: ['blue', 'green', 'yellow'] }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.lists.removeByRelRankRange( + exp.binList('tags'), + exp.int(1), + exp.int(-1), + exp.str('green')), + 0), + op.read('tags') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: ['green', 'yellow'] }) + }) + + it('removes list item by value relative rank range in a cdt context', async function () { + const key: Key = await createRecord({ tags: ['blue', 'green', ['orange', 'pink', 'white', 'black']] }) + const context: cdt.Context = new Context().addListIndex(2) + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.lists.removeByRelRankRange( + exp.binList('tags'), + exp.int(1), + exp.int(-1), + exp.str('pink'), + context), + 0), + op.read('tags') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: ['blue', 'green', ['pink', 'white', 'black']] }) + }) + }) + + describe('removeByIndex', function () { + it('removes a list item by index', async function () { + const key: Key = await createRecord({ tags: ['blue', 'green', 'yellow'] }) + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.lists.removeByIndex( + exp.binList('tags'), + exp.int(1)), + 0), + op.read('tags') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + expect(result.bins).to.eql({ tags: ['blue', 'yellow'] }) + }) + + it('removes a list item by index in a cdt context in a cdt context', async function () { + const key: Key = await createRecord({ tags: ['blue', 'green', ['orange', 'pink', 'white', 'black']] }) + const context: cdt.Context = new Context().addListIndex(2) + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.lists.removeByIndex( + exp.binList('tags'), + exp.int(1), + context), + 0), + op.read('tags') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + expect(result.bins).to.eql({ tags: ['blue', 'green', ['orange', 'white', 'black']] }) + }) + }) + + describe('removeByIndexRangeToEnd', function () { + it('removes a list item by index range to end', async function () { + const key: Key = await createRecord({ tags: ['blue', 'green', 'yellow'] }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.lists.removeByIndexRangeToEnd( + exp.binList('tags'), + exp.int(1)), + 0), + op.read('tags') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: ['blue'] }) + }) + + it('removes a list item by index range to end in a cdt context in a cdt context', async function () { + const key: Key = await createRecord({ tags: ['blue', 'green', ['orange', 'pink', 'white', 'black']] }) + const context: cdt.Context = new Context().addListIndex(2) + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.lists.removeByIndexRangeToEnd( + exp.binList('tags'), + exp.int(1), + context), + 0), + op.read('tags') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: ['blue', 'green', ['orange']] }) + }) + }) + + describe('removeByIndexRange', function () { + it('removes a list item by index range', async function () { + const key: Key = await createRecord({ tags: ['blue', 'green', 'yellow'] }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.lists.removeByIndexRange( + exp.binList('tags'), + exp.int(2), + exp.int(0)), + 0), + op.read('tags') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: ['yellow'] }) + }) + + it('removes a list item by index range in a cdt context', async function () { + const key: Key = await createRecord({ tags: ['blue', 'green', ['orange', 'pink', 'white', 'black']] }) + const context: cdt.Context = new Context().addListIndex(2) + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.lists.removeByIndexRange( + exp.binList('tags'), + exp.int(2), + exp.int(0), + context), + 0), + op.read('tags') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: ['blue', 'green', ['white', 'black']] }) + }) + }) + + describe('removeByRank', function () { + it('removes a list item by rank', async function () { + const key: Key = await createRecord({ tags: ['yellow', 'green', 'blue'] }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.lists.removeByRank( + exp.binList('tags'), + exp.int(2)), + 0), + op.read('tags') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: ['green', 'blue'] }) + }) + + it('removes a list item by rank in a cdt context', async function () { + const key: Key = await createRecord({ tags: ['blue', 'green', ['orange', 'pink', 'white', 'black']] }) + const context: cdt.Context = new Context().addListIndex(2) + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.lists.removeByRank( + exp.binList('tags'), + exp.int(2), + context), + 0), + op.read('tags') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: ['blue', 'green', ['orange', 'white', 'black']] }) + }) + }) + + describe('removeByRankRangeToEnd', function () { + it('removes a list item by rank range to end', async function () { + const key: Key = await createRecord({ tags: ['yellow', 'green', 'blue'] }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.lists.removeByRankRangeToEnd( + exp.binList('tags'), + exp.int(1)), + 0), + op.read('tags') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: ['blue'] }) + }) + + it('removes a list item by rank range to end in a cdt context', async function () { + const key: Key = await createRecord({ tags: ['blue', 'green', ['orange', 'pink', 'white', 'black']] }) + const context: cdt.Context = new Context().addListIndex(2) + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.lists.removeByRankRangeToEnd( + exp.binList('tags'), + exp.int(1), + context), + 0), + op.read('tags') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: ['blue', 'green', ['black']] }) + }) + }) + + describe('removeByRankRange', function () { + it('removes a list item by rank range', async function () { + const key: Key = await createRecord({ tags: ['yellow', 'green', 'blue'] }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.lists.removeByRankRange( + exp.binList('tags'), + exp.int(2), + exp.int(0)), + 0), + op.read('tags') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: ['yellow'] }) + }) + + it('removes a list item by rank range in a cdt context', async function () { + const key: Key = await createRecord({ tags: ['blue', 'green', ['orange', 'pink', 'white', 'black']] }) + const context: cdt.Context = new Context().addListIndex(2) + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.lists.removeByRankRange( + exp.binList('tags'), + exp.int(2), + exp.int(0), + context), + 0), + op.read('tags') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: ['blue', 'green', ['pink', 'white']] }) + }) + }) + + describe('getByValue', function () { + it('matches the count of the matched list values', async function () { + const key: Key = await createRecord({ tags: ['blue', 'green', 'yellow', 'green'] }) + + await testNoMatch(key, exp.eq(exp.lists.getByValue(exp.binList('tags'), exp.str('green'), lists.returnType.COUNT), exp.int(1))) + await testMatch(key, exp.eq(exp.lists.getByValue(exp.binList('tags'), exp.str('green'), lists.returnType.COUNT), exp.int(2))) + }) + }) + + describe('getByValue with context', function () { + it('matches the count of the matched list values', async function () { + const key: Key = await createRecord({ tags: ['blue', 'green', 'yellow', 'green', ['orange', 'pink', 'white', 'black', 'pink']] }) + const context: cdt.Context = new Context().addListIndex(4) + await testNoMatch(key, exp.eq(exp.lists.getByValue(exp.binList('tags'), exp.str('pink'), lists.returnType.COUNT, context), exp.int(1))) + await testMatch(key, exp.eq(exp.lists.getByValue(exp.binList('tags'), exp.str('pink'), lists.returnType.COUNT, context), exp.int(2))) + }) + }) + + describe('getByValueRange', function () { + it('matches the count of the matched range of list values', async function () { + const key: Key = await createRecord({ values: [53, 16, 94, 38, 25, 88, 48] }) + + await testNoMatch(key, exp.eq(exp.lists.getByValueRange(exp.binList('values'), exp.int(25), exp.int(50), lists.returnType.COUNT), exp.int(1))) + await testMatch(key, exp.eq(exp.lists.getByValueRange(exp.binList('values'), exp.int(25), exp.int(50), lists.returnType.COUNT), exp.int(3))) + }) + }) + + describe('getByValueRange with context', function () { + it('matches the count of the matched range of list values', async function () { + const key: Key = await createRecord({ values: [53, 16, 94, 38, 25, 88, 48, [1, 92, 94, 96]] }) + const context: cdt.Context = new Context().addListIndex(7) + await testNoMatch(key, exp.eq(exp.lists.getByValueRange(exp.binList('values'), exp.int(90), exp.int(99), lists.returnType.COUNT, context), exp.int(1))) + await testMatch(key, exp.eq(exp.lists.getByValueRange(exp.binList('values'), exp.int(90), exp.int(99), lists.returnType.COUNT, context), exp.int(3))) + }) + }) + + describe('getByValueList', function () { + it('matches the count of the matched values', async function () { + const key: Key = await createRecord({ values: [53, 16, 94, 38, 25, 88, 88, 48, 16] }) + + await testNoMatch(key, exp.eq(exp.lists.getByValueList(exp.binList('values'), exp.list([88, 94]), lists.returnType.COUNT), exp.int(2))) + await testMatch(key, exp.eq(exp.lists.getByValueList(exp.binList('values'), exp.list([88, 94]), lists.returnType.COUNT), exp.int(3))) + }) + }) + + describe('getByValueList with context', function () { + it('matches the count of the matched values', async function () { + const key: Key = await createRecord({ values: [53, 16, 94, 38, 25, 88, 88, 48, 16, [0, 1, 2, 73, 74, 73, 74]] }) + const context: cdt.Context = new Context().addListIndex(9) + await testNoMatch(key, exp.eq(exp.lists.getByValueList(exp.binList('values'), exp.list([73, 74]), lists.returnType.COUNT, context), exp.int(2))) + await testMatch(key, exp.eq(exp.lists.getByValueList(exp.binList('values'), exp.list([73, 74]), lists.returnType.COUNT, context), exp.int(4))) + }) + }) + + describe('getByRelRankRangeToEnd', function () { + it('selects list items nearest to value and greater by relative rank', async function () { + const key: Key = await createRecord({ values: [53, 16, 94, 38, 25, 88, 88, 48, 16] }) + + await testNoMatch(key, exp.eq(exp.lists.getByRelRankRangeToEnd(exp.binList('values'), exp.int(38), exp.int(1), lists.returnType.VALUE), exp.list([38, 48, 53, 88, 88, 94]))) + await testMatch(key, exp.eq(exp.lists.getByRelRankRangeToEnd(exp.binList('values'), exp.int(38), exp.int(1), lists.returnType.VALUE), exp.list([48, 53, 88, 88, 94]))) + }) + }) + + describe('getByRelRankRangeToEnd with context', function () { + it('selects list items nearest to value and greater by relative rank', async function () { + const key: Key = await createRecord({ values: [53, 16, [2, 12, 14, 17]] }) + const context: cdt.Context = new Context().addListIndex(2) + await testNoMatch(key, exp.eq(exp.lists.getByRelRankRangeToEnd(exp.binList('values'), exp.int(12), exp.int(1), lists.returnType.VALUE, context), exp.list([16, 53]))) + await testMatch(key, exp.eq(exp.lists.getByRelRankRangeToEnd(exp.binList('values'), exp.int(12), exp.int(1), lists.returnType.VALUE, context), exp.list([14, 17]))) + }) + }) + + describe('getByRelRankRange', function () { + it('selects list items nearest to value and greater by relative rank with a count limit', async function () { + const key: Key = await createRecord({ values: [53, 16, 94, 38, 25, 88, 88, 48, 16] }) + + await testNoMatch(key, exp.eq(exp.lists.getByRelRankRange(exp.binList('values'), exp.int(38), exp.int(1), exp.int(3), lists.returnType.VALUE), exp.list([38, 48, 53]))) + await testMatch(key, exp.eq(exp.lists.getByRelRankRange(exp.binList('values'), exp.int(38), exp.int(1), exp.int(3), lists.returnType.VALUE), exp.list([48, 53, 88]))) + }) + }) + + describe('getByRelRankRange with context', function () { + it('selects list items nearest to value and greater by relative rank with a count limit', async function () { + const key: Key = await createRecord({ values: [53, 16, 94, [30, 40, 45, 20]] }) + const context: cdt.Context = new Context().addListIndex(3) + await testNoMatch(key, exp.eq(exp.lists.getByRelRankRange(exp.binList('values'), exp.int(30), exp.int(1), exp.int(3), lists.returnType.VALUE, context), exp.list([94]))) + await testMatch(key, exp.eq(exp.lists.getByRelRankRange(exp.binList('values'), exp.int(30), exp.int(1), exp.int(3), lists.returnType.VALUE, context), exp.list([40, 45]))) + }) + }) + + describe('getByIndex', function () { + it('selects item identified by index', async function () { + const key: Key = await createRecord({ values: ['Singapore', 'Hamburg', 'San Francisco', 'Tokyo'] }) + + await testNoMatch(key, exp.eq(exp.lists.getByIndex(exp.binList('values'), exp.int(2), exp.type.STR, lists.returnType.VALUE), exp.str('Hamburg'))) + await testMatch(key, exp.eq(exp.lists.getByIndex(exp.binList('values'), exp.int(2), exp.type.STR, lists.returnType.VALUE), exp.str('San Francisco'))) + }) + }) + + describe('getByIndex with context', function () { + it('selects item identified by index within nested context', async function () { + const key: Key = await createRecord({ values: ['Singapore', 'Hamburg', 'San Francisco', 'Tokyo', ['Firth', 'Hickman', 'Palmyra']] }) + const context: cdt.Context = new Context().addListIndex(4) + await testNoMatch(key, exp.eq(exp.lists.getByIndex(exp.binList('values'), exp.int(2), exp.type.STR, lists.returnType.VALUE, context), exp.str('San Francisco'))) + await testMatch(key, exp.eq(exp.lists.getByIndex(exp.binList('values'), exp.int(2), exp.type.STR, lists.returnType.VALUE, context), exp.str('Palmyra'))) + }) + }) + + describe('getByIndexRangeToEnd', function () { + it('selects list items starting at specified index to the end of the list', async function () { + const key: Key = await createRecord({ values: ['Singapore', 'Hamburg', 'San Francisco', 'Tokyo'] }) + + await testNoMatch(key, exp.eq(exp.lists.getByIndexRangeToEnd(exp.binList('values'), exp.int(2), lists.returnType.VALUE), exp.list(['Hamburg', 'San Francisco']))) + await testMatch(key, exp.eq(exp.lists.getByIndexRangeToEnd(exp.binList('values'), exp.int(2), lists.returnType.VALUE), exp.list(['San Francisco', 'Tokyo']))) + }) + }) + + describe('getByIndexRangeToEnd with context', function () { + it('selects list items starting at specified index to the end of the list', async function () { + const key: Key = await createRecord({ values: ['Singapore', 'Hamburg', 'San Francisco', 'Tokyo', ['Firth', 'Hickman', 'Palmyra']] }) + const context: cdt.Context = new Context().addListIndex(4) + await testNoMatch(key, exp.eq(exp.lists.getByIndexRangeToEnd(exp.binList('values'), exp.int(1), lists.returnType.VALUE, context), exp.list(['Hamburg', 'San Francisco', 'Tokyo']))) + await testMatch(key, exp.eq(exp.lists.getByIndexRangeToEnd(exp.binList('values'), exp.int(1), lists.returnType.VALUE, context), exp.list(['Hickman', 'Palmyra']))) + }) + }) + + describe('getByIndexRange', function () { + it('selects "count" list items starting at specified index', async function () { + const key: Key = await createRecord({ values: ['Singapore', 'Hamburg', 'San Francisco', 'Tokyo'] }) + + await testNoMatch(key, exp.eq(exp.lists.getByIndexRange(exp.binList('values'), exp.int(2), exp.int(1), lists.returnType.VALUE), exp.list(['Hamburg']))) + await testMatch(key, exp.eq(exp.lists.getByIndexRange(exp.binList('values'), exp.int(2), exp.int(1), lists.returnType.VALUE), exp.list(['San Francisco']))) + }) + }) + + describe('getByIndexRange with context', function () { + it('selects "count" list items starting at specified index', async function () { + const key: Key = await createRecord({ values: ['Singapore', 'Hamburg', 'San Francisco', 'Tokyo', ['Firth', 'Hickman', 'Palmyra']] }) + const context: cdt.Context = new Context().addListIndex(4) + await testNoMatch(key, exp.eq(exp.lists.getByIndexRange(exp.binList('values'), exp.int(0), exp.int(2), lists.returnType.VALUE, context), exp.list(['Singapore', 'Hamburg']))) + await testMatch(key, exp.eq(exp.lists.getByIndexRange(exp.binList('values'), exp.int(0), exp.int(2), lists.returnType.VALUE, context), exp.list(['Firth', 'Hickman']))) + }) + }) + + describe('getByRank', function () { + it('selects list item identified by rank', async function () { + const key: Key = await createRecord({ values: [83, 39, 49, 20, 42, 41, 98] }) + + await testNoMatch(key, exp.eq(exp.lists.getByRank(exp.binList('values'), exp.int(2), exp.type.INT, lists.returnType.VALUE), exp.int(42))) + await testMatch(key, exp.eq(exp.lists.getByRank(exp.binList('values'), exp.int(2), exp.type.INT, lists.returnType.VALUE), exp.int(41))) + }) + }) + + describe('getByRank with context', function () { + it('selects list item identified by rank', async function () { + const key: Key = await createRecord({ values: [83, [0, 4, 2, 8], 40] }) + const context: cdt.Context = new Context().addListIndex(1) + await testNoMatch(key, exp.eq(exp.lists.getByRank(exp.binList('values'), exp.int(2), exp.type.INT, lists.returnType.VALUE, context), exp.int(40))) + await testMatch(key, exp.eq(exp.lists.getByRank(exp.binList('values'), exp.int(2), exp.type.INT, lists.returnType.VALUE, context), exp.int(4))) + }) + }) + + describe('getByRankRangeToEnd', function () { + it('selects list items starting at specified rank to the last ranked item', async function () { + const key: Key = await createRecord({ values: [83, 39, 49, 20, 42, 41, 98] }) + + await testNoMatch(key, exp.eq(exp.lists.getByRankRangeToEnd(exp.binList('values'), exp.int(2), lists.returnType.VALUE), exp.list([39, 41, 42, 49, 83, 98]))) + await testMatch(key, exp.eq(exp.lists.getByRankRangeToEnd(exp.binList('values'), exp.int(2), lists.returnType.VALUE), exp.list([41, 42, 49, 83, 98]))) + }) + }) + + describe('getByRankRangeToEnd with context', function () { + it('selects list items starting at specified rank to the last ranked item', async function () { + const key: Key = await createRecord({ values: [83, [0, 4, 2, 8]] }) + const context: cdt.Context = new Context().addListIndex(1) + await testNoMatch(key, exp.eq(exp.lists.getByRankRangeToEnd(exp.binList('values'), exp.int(1), lists.returnType.VALUE, context), exp.list([0, 2, 4, 8]))) + await testMatch(key, exp.eq(exp.lists.getByRankRangeToEnd(exp.binList('values'), exp.int(1), lists.returnType.VALUE, context), exp.list([2, 4, 8]))) + }) + }) + + describe('getByRankRange', function () { + it('selects "count" list items starting at specified rank', async function () { + const key: Key = await createRecord({ values: [83, 39, 49, 20, 42, 41, 98] }) + + await testNoMatch(key, exp.eq(exp.lists.getByRankRange(exp.binList('values'), exp.int(2), exp.int(2), lists.returnType.VALUE), exp.list([39, 41, 42]))) + await testMatch(key, exp.eq(exp.lists.getByRankRange(exp.binList('values'), exp.int(2), exp.int(2), lists.returnType.VALUE), exp.list([42, 41]))) + }) + }) + + describe('getByRankRange with context', function () { + it('selects "count" list items starting at specified rank', async function () { + const key: Key = await createRecord({ values: [83, [0, 4, 2, 8]] }) + const context: cdt.Context = new Context().addListIndex(1) + await testNoMatch(key, exp.eq(exp.lists.getByRankRange(exp.binList('values'), exp.int(1), exp.int(4), lists.returnType.VALUE, context), exp.list([83, [0, 4, 2, 8]]))) + await testMatch(key, exp.eq(exp.lists.getByRankRange(exp.binList('values'), exp.int(1), exp.int(4), lists.returnType.VALUE, context), exp.list([2, 4, 8]))) + }) + }) + + describe('list bin append expression', function () { + it('appends integer value to list', async function () { + const key: Key = await createRecord({ list: [2, 3, 4, 5], intVal: 6 }) + const ops: operations.Operation[] = [ + exp.operations.read(tempBin, + exp.lists.append(exp.binList('list'), exp.binInt('intVal')), + 0), + op.read('list') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins.list).to.eql([2, 3, 4, 5]) + expect(result.bins.ExpVar).to.eql([2, 3, 4, 5, 6]) + }) + + it('appends integer value to a list within a nested context', async function () { + const key: Key = await createRecord({ list: [2, 3, 4, 5, [4]], intVal: 6 }) + const context: cdt.Context = new Context().addListIndex(4) + const ops: operations.Operation[] = [ + exp.operations.read(tempBin, + exp.lists.append(exp.binList('list'), exp.binInt('intVal'), null, context), + 0), + op.read('list') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins.list).to.eql([2, 3, 4, 5, [4]]) + expect(result.bins.ExpVar).to.eql([2, 3, 4, 5, [4, 6]]) + }) + }) + + describe('list bin appendItems expression', function () { + it('appends list to itself', async function () { + const key: Key = await createRecord({ list: [2, 3, 4, 5] }) + const ops: operations.Operation[] = [ + exp.operations.read(tempBin, + exp.lists.appendItems(exp.binList('list'), exp.binList('list')), + 0), + op.read('list') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins.list).to.eql([2, 3, 4, 5]) + expect(result.bins.ExpVar).to.eql([2, 3, 4, 5, 2, 3, 4, 5]) + }) + + it('appends list to a list within a nested context', async function () { + const key: Key = await createRecord({ list: [2, 3, 4, 5, [80, 90, 100]] }) + const context: cdt.Context = new Context().addListIndex(4) + const ops: operations.Operation[] = [ + exp.operations.read(tempBin, + exp.lists.appendItems(exp.binList('list'), exp.binList('list'), null, context), + 0), + op.read('list') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins.list).to.eql([2, 3, 4, 5, [80, 90, 100]]) + expect(result.bins.ExpVar).to.eql([2, 3, 4, 5, [80, 90, 100, 2, 3, 4, 5, [80, 90, 100]]]) + }) + }) + }) + describe('list bin insert expression', function () { + it('inserts value at specified index', async function () { + const key: Key = await createRecord({ list: [2, 3, 4, 5], intVal: 6 }) + const ops: operations.Operation[] = [ + exp.operations.read(tempBin, + exp.lists.insert(exp.binList('list'), exp.binInt('intVal'), exp.int(2)), + 0), + op.read('list') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins.list).to.eql([2, 3, 4, 5]) + expect(result.bins.ExpVar).to.eql([2, 3, 6, 4, 5]) + }) + + it('inserts value at specified index within a nested context', async function () { + const key: Key = await createRecord({ list: [2, 3, 4, 5, [4, 1, 9]], intVal: 7 }) + const context: cdt.Context = new Context().addListIndex(4) + const ops: operations.Operation[] = [ + exp.operations.read(tempBin, + exp.lists.insert(exp.binList('list'), exp.binInt('intVal'), exp.int(2), null, context), + 0), + op.read('list') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins.list).to.eql([2, 3, 4, 5, [4, 1, 9]]) + expect(result.bins.ExpVar).to.eql([2, 3, 4, 5, [4, 1, 7, 9]]) + }) + }) + describe('list bin insertItems expression', function () { + it('inserts values at specified index', async function () { + const key: Key = await createRecord({ list: [2, 3, 4, 5] }) + const ops: operations.Operation[] = [ + exp.operations.read(tempBin, + exp.lists.insertItems(exp.binList('list'), exp.binList('list'), exp.int(1)), + 0), + op.read('list') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins.list).to.eql([2, 3, 4, 5]) + expect(result.bins.ExpVar).to.eql([2, 2, 3, 4, 5, 3, 4, 5]) + }) + + it('inserts values at specified index within a nested context', async function () { + const key: Key = await createRecord({ list: [2, 3, [9, 9]] }) + const context: cdt.Context = new Context().addListIndex(2) + const ops: operations.Operation[] = [ + exp.operations.read(tempBin, + exp.lists.insertItems(exp.binList('list'), exp.binList('list'), exp.int(1), null, context), + 0), + op.read('list') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins.list).to.eql([2, 3, [9, 9]]) + expect(result.bins.ExpVar).to.eql([2, 3, [9, 2, 3, [9, 9], 9]]) + }) + }) + + describe('list bin sort expression', function () { + it('sorts specified list', async function () { + const key: Key = await createRecord({ list: [2, 3, 4, 5] }) + const ops: operations.Operation[] = [ + exp.operations.write('list', + exp.lists.insertItems(exp.binList('list'), exp.binList('list'), exp.int(1)), + 0), + exp.operations.read(tempBin, + exp.lists.sort(exp.binList('list'), 1), + 0), + op.read('list') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins.ExpVar).to.eql([5, 5, 4, 4, 3, 3, 2, 2]) + expect(result.bins.list).to.eql([2, 2, 3, 4, 5, 3, 4, 5]) + }) + + it('sorts specified nested list', async function () { + const key: Key = await createRecord({ list: [2, 3, 4, 5, [9, 100]] }) + const context: cdt.Context = new Context().addListIndex(4) + const ops: operations.Operation[] = [ + exp.operations.read(tempBin, + exp.lists.sort(exp.binList('list'), 1, context), + 0), + op.read('list') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + + expect(result.bins.ExpVar).to.eql([2, 3, 4, 5, [100, 9]]) + expect(result.bins.list).to.eql([2, 3, 4, 5, [9, 100]]) + }) + }) +}) diff --git a/ts-test/tests/exp_map.ts b/ts-test/tests/exp_map.ts new file mode 100644 index 000000000..fc1814c8c --- /dev/null +++ b/ts-test/tests/exp_map.ts @@ -0,0 +1,1630 @@ +// ***************************************************************************** +// Copyright 2022-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* eslint-env mocha */ +/* global expect */ + + + +import Aerospike, { exp as expModule, maps as Maps, operations, Client as Cli, AerospikeBins, RecordMetadata, Key, AerospikeExp, cdt, RemovePolicy, MapPolicy, AerospikeRecord} from 'aerospike'; + +import { expect } from 'chai'; + +const exp: typeof expModule = Aerospike.exp +const maps: typeof Maps = Aerospike.maps +const op: typeof operations = Aerospike.operations +const Context: typeof cdt.Context = Aerospike.cdt.Context + +import * as helper from './test_helper'; + + +const keygen: any = helper.keygen +const tempBin: string = 'ExpVar' +const FILTERED_OUT: number = Aerospike.status.FILTERED_OUT + +describe('Aerospike.exp_operations', function () { + helper.skipUnlessVersion('>= 5.0.0', this) + + const client: Cli = helper.client + + async function createRecord (bins: AerospikeBins, meta: RecordMetadata | null = null) { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/exp' })() + await client.put(key, bins, meta) + return key + } + + async function testNoMatch (key: Key, filterExpression: AerospikeExp) { + const rejectPolicy: RemovePolicy = { filterExpression } + let operationSuccessful = false + try { + await client.remove(key, rejectPolicy) + operationSuccessful = true + } catch (error: any) { + expect(error.code).to.eq(FILTERED_OUT, `Received unexpected error code with message "${error.message}"`) + } + + if (operationSuccessful) { + expect.fail('Test no-match: Operation should have not have been executed due to failed expression match') + } + } + + async function testMatch (key: Key, filterExpression: AerospikeExp) { + const passPolicy: RemovePolicy = { filterExpression } + await client.remove(key, passPolicy) + } + + const orderMap = (binName: string, order: Maps.order, key?: Key, ctx?: cdt.Context) => { + const policy: MapPolicy = new Aerospike.MapPolicy({ order }) + const setMapPolicy = maps.setPolicy(binName, policy) + if (ctx) setMapPolicy.withContext(ctx) + return client.operate(key!, [setMapPolicy]) + } + + const orderByKey = (binName: string, key?: Key, ctx?: cdt.Context) => orderMap(binName, maps.order.KEY_ORDERED, key, ctx) + + it('builds up a filter expression value', function () { + const filter = exp.eq(exp.binInt('intVal'), exp.int(42)) + expect(filter).to.be.an('array') + }) + + describe('map expressions', function () { + describe('clear', function () { + it('removes all items in a map', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.clear( + exp.binMap('tags')), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: {} }) + }) + + it('selects item identified by index inside nested map', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.clear( + exp.binMap('tags'), + context), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', nested: {} } }) + }) + }) + + describe('removeByKey', function () { + it('removes map item by key', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByKey( + exp.binMap('tags'), + exp.str('a')), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { b: 'green', c: 'yellow' } }) + }) + + it('removes map item by key in a cdt context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByKey( + exp.binMap('tags'), + exp.str('e'), + context), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', nested: { d: 'orange', f: 'white', g: 'black' } } }) + }) + }) + + describe('removeByKeyList', function () { + it('removes map item by key list', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByKeyList( + exp.binMap('tags'), + exp.list(['a', 'b'])), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { c: 'yellow' } }) + }) + + it('removes map item by key list in a cdt context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByKeyList( + exp.binMap('tags'), + exp.list(['d', 'e']), + context), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', nested: { f: 'white', g: 'black' } } }) + }) + }) + + describe('removeByKeyRange', function () { + it('removes map item by key range', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByKeyRange( + exp.binMap('tags'), + exp.str('c'), + exp.str('a')), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { c: 'yellow' } }) + }) + + it('removes map item by key range in a cdt context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByKeyRange( + exp.binMap('tags'), + exp.str('h'), + exp.str('e'), + context), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', nested: { d: 'orange' } } }) + }) + + it('removes inverted map item by key range', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByKeyRange( + exp.binMap('tags'), + exp.str('c'), + exp.str('a'), + null, + maps.returnType.INVERTED), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', b: 'green' } }) + }) + + it('removes inverted map item by key range in a cdt context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByKeyRange( + exp.binMap('tags'), + exp.str('h'), + exp.str('e'), + context, + maps.returnType.INVERTED), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', nested: { e: 'pink', f: 'white', g: 'black' } } }) + }) + }) + + describe('removeByKeyRelIndexRangeToEnd', function () { + it('removes map item by key relative index range to end', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByKeyRelIndexRangeToEnd( + exp.binMap('tags'), + exp.int(1), + exp.str('b')), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', b: 'green' } }) + }) + + it('removes map item by key relative index range to end in a cdt context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByKeyRelIndexRangeToEnd( + exp.binMap('tags'), + exp.int(1), + exp.str('e'), + context), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink' } } }) + }) + + it('removes inverted map item by key relative index range to end', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByKeyRelIndexRangeToEnd( + exp.binMap('tags'), + exp.int(1), + exp.str('b'), + null, + maps.returnType.INVERTED), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { c: 'yellow' } }) + }) + + it('removes inverted map item by key relative index range to end in a cdt context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByKeyRelIndexRangeToEnd( + exp.binMap('tags'), + exp.int(1), + exp.str('e'), + context, + maps.returnType.INVERTED), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', nested: { f: 'white', g: 'black' } } }) + }) + }) + + describe('removeByKeyRelIndexRange', function () { + it('removes map item by key relative index range', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByKeyRelIndexRange( + exp.binMap('tags'), + exp.int(2), + exp.int(0), + exp.str('a')), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { c: 'yellow' } }) + }) + + it('removes map item by key relative index range in a cdt context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByKeyRelIndexRange( + exp.binMap('tags'), + exp.int(2), + exp.int(0), + exp.str('d'), + context), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', nested: { f: 'white', g: 'black' } } }) + }) + + it('removes inverted map item by key relative index range', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByKeyRelIndexRange( + exp.binMap('tags'), + exp.int(2), + exp.int(0), + exp.str('a'), + null, + maps.returnType.INVERTED), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', b: 'green' } }) + }) + + it('removes inverted map item by key relative index range in a cdt context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByKeyRelIndexRange( + exp.binMap('tags'), + exp.int(2), + exp.int(0), + exp.str('a'), + context, + maps.returnType.INVERTED), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink' } } }) + }) + }) + + describe('removeByValue', function () { + it('removes map item by value', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByValue( + exp.binMap('tags'), + exp.str('green')), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', c: 'yellow' } }) + }) + + it('removes map item by value in a cdt context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByValue( + exp.binMap('tags'), + exp.str('white'), + context), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', g: 'black' } } }) + }) + }) + + describe('removeByValueList', function () { + it('removes map item by value list', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByValueList( + exp.binMap('tags'), + exp.list(['green', 'yellow'])), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue' } }) + }) + + it('removes map item by value list in a cdt context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByValueList( + exp.binMap('tags'), + exp.list(['orange', 'white']), + context), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', nested: { e: 'pink', g: 'black' } } }) + }) + }) + + describe('removeByValueRange', function () { + it('removes map item by value range', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByValueRange( + exp.binMap('tags'), + exp.str('green'), + exp.str('blue')), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { b: 'green', c: 'yellow' } }) + }) + + it('removes map item by value range in a cdt context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByValueRange( + exp.binMap('tags'), + exp.str('pink'), + exp.str('black'), + context), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', nested: { e: 'pink', f: 'white' } } }) + }) + + it('removes inverted map item by value range', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByValueRange( + exp.binMap('tags'), + exp.str('green'), + exp.str('blue'), + null, + maps.returnType.INVERTED), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue' } }) + }) + + it('removes inverted map item by value range in a cdt context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByValueRange( + exp.binMap('tags'), + exp.str('pink'), + exp.str('black'), + context, + maps.returnType.INVERTED), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', nested: { d: 'orange', g: 'black' } } }) + }) + }) + + describe('removeByValueRelRankRangeToEnd', function () { + it('removes map item by value relative rank range to end', async function () { + const key: Key = await createRecord({ tags: { a: 'yellow', b: 'green', c: 'blue' } }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByValueRelRankRangeToEnd( + exp.binMap('tags'), + exp.int(1), + exp.str('blue')), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { c: 'blue' } }) + }) + + it('removes map item by value relative rank range to end in a cdt context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByValueRelRankRangeToEnd( + exp.binMap('tags'), + exp.int(1), + exp.str('orange'), + context), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', nested: { d: 'orange', g: 'black' } } }) + }) + + it('removes inverted map item by value relative rank range to end', async function () { + const key: Key = await createRecord({ tags: { a: 'yellow', b: 'green', c: 'blue' } }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByValueRelRankRangeToEnd( + exp.binMap('tags'), + exp.int(1), + exp.str('blue'), + null, + maps.returnType.INVERTED), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'yellow', b: 'green' } }) + }) + + it('removes inverted map item by value relative rank range to end in a cdt context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByValueRelRankRangeToEnd( + exp.binMap('tags'), + exp.int(1), + exp.str('black'), + context, + maps.returnType.INVERTED), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white' } } }) + }) + }) + + describe('removeByValueRelRankRange', function () { + it('removes map item by value relative rank range', async function () { + const key: Key = await createRecord({ tags: { a: 'yellow', b: 'green', c: 'blue' } }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByValueRelRankRange( + exp.binMap('tags'), + exp.int(1), + exp.int(-1), + exp.str('green')), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'yellow', b: 'green' } }) + }) + + it('removes map item by value relative rank range in a cdt context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByValueRelRankRange( + exp.binMap('tags'), + exp.int(1), + exp.int(-1), + exp.str('pink'), + context), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', nested: { e: 'pink', f: 'white', g: 'black' } } }) + }) + + it('removes inverted map item by value relative rank range', async function () { + const key: Key = await createRecord({ tags: { a: 'yellow', b: 'green', c: 'blue' } }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByValueRelRankRange( + exp.binMap('tags'), + exp.int(1), + exp.int(-1), + exp.str('green'), + null, + maps.returnType.INVERTED), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { c: 'blue' } }) + }) + + it('removes inverted map item by value relative rank range in a cdt context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByValueRelRankRange( + exp.binMap('tags'), + exp.int(1), + exp.int(-1), + exp.str('pink'), + context, + maps.returnType.INVERTED), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', nested: { d: 'orange' } } }) + }) + }) + + describe('removeByIndex', function () { + it('removes a map item by index', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByIndex( + exp.binMap('tags'), + exp.int(1)), + 0), + op.read('tags') + ] + let result = await client.operate(key, ops, {}) + result = await client.get(key) + expect(result.bins).to.eql({ tags: { a: 'blue', c: 'yellow' } }) + }) + + it('removes a map item by index in a cdt context in a cdt context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByIndex( + exp.binMap('tags'), + exp.int(1), + context), + 0), + op.read('tags') + ] + let result = await client.operate(key, ops, {}) + result = await client.get(key) + expect(result.bins).to.eql({ tags: { a: 'blue', nested: { d: 'orange', f: 'white', g: 'black' } } }) + }) + }) + + describe('removeByIndexRangeToEnd', function () { + it('removes a map item by index range to end', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByIndexRangeToEnd( + exp.binMap('tags'), + exp.int(1)), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue' } }) + }) + + it('removes a map item by index range to end in a cdt context in a cdt context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByIndexRangeToEnd( + exp.binMap('tags'), + exp.int(1), + context), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', nested: { d: 'orange' } } }) + }) + + it('removes an inverted map item by index range to end', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByIndexRangeToEnd( + exp.binMap('tags'), + exp.int(1), + null, + maps.returnType.INVERTED), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { b: 'green', c: 'yellow' } }) + }) + + it('removes an inverted map item by index range to end in a cdt context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByIndexRangeToEnd( + exp.binMap('tags'), + exp.int(1), + context, + maps.returnType.INVERTED), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', nested: { e: 'pink', f: 'white', g: 'black' } } }) + }) + }) + + describe('removeByIndexRange', function () { + it('removes a map item by index range', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByIndexRange( + exp.binMap('tags'), + exp.int(2), + exp.int(0)), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { c: 'yellow' } }) + }) + + it('removes a map item by index range in a cdt context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByIndexRange( + exp.binMap('tags'), + exp.int(2), + exp.int(0), + context), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', nested: { f: 'white', g: 'black' } } }) + }) + + it('removes a inverted map item by index range', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByIndexRange( + exp.binMap('tags'), + exp.int(2), + exp.int(0), + null, + maps.returnType.INVERTED), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', b: 'green' } }) + }) + + it('removes a inverted map item by index range in a cdt context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByIndexRange( + exp.binMap('tags'), + exp.int(2), + exp.int(0), + context, + maps.returnType.INVERTED), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink' } } }) + }) + }) + + describe('removeByRank', function () { + it('removes a map item by rank', async function () { + const key: Key = await createRecord({ tags: { a: 'yellow', b: 'green', c: 'blue' } }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByRank( + exp.binMap('tags'), + exp.int(2)), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { b: 'green', c: 'blue' } }) + }) + + it('removes a map item by rank in a cdt context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByRank( + exp.binMap('tags'), + exp.int(2), + context), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', nested: { d: 'orange', f: 'white', g: 'black' } } }) + }) + }) + + describe('removeByRankRangeToEnd', function () { + it('removes a map item by rank range to end', async function () { + const key: Key = await createRecord({ tags: { a: 'yellow', b: 'green', c: 'blue' } }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByRankRangeToEnd( + exp.binMap('tags'), + exp.int(1)), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { c: 'blue' } }) + }) + + it('removes a map item by rank range to end in a cdt context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByRankRangeToEnd( + exp.binMap('tags'), + exp.int(1), + context), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', nested: { g: 'black' } } }) + }) + + it('removes an inverted map item by rank range to end', async function () { + const key: Key = await createRecord({ tags: { a: 'yellow', b: 'green', c: 'blue' } }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByRankRangeToEnd( + exp.binMap('tags'), + exp.int(1), + null, + maps.returnType.INVERTED), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'yellow', b: 'green' } }) + }) + + it('removes an inverted map item by rank range to end in a cdt context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByRankRangeToEnd( + exp.binMap('tags'), + exp.int(1), + context, + maps.returnType.INVERTED), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white' } } }) + }) + }) + + describe('removeByRankRange', function () { + it('removes a map item by rank range', async function () { + const key: Key = await createRecord({ tags: { a: 'yellow', b: 'green', c: 'blue' } }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByRankRange( + exp.binMap('tags'), + exp.int(2), + exp.int(0)), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'yellow' } }) + }) + + it('removes a map item by rank range in a cdt context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByRankRange( + exp.binMap('tags'), + exp.int(2), + exp.int(0), + context), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', nested: { e: 'pink', f: 'white' } } }) + }) + + it('removes an inverted map item by rank range', async function () { + const key: Key = await createRecord({ tags: { a: 'yellow', b: 'green', c: 'blue' } }) + + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByRankRange( + exp.binMap('tags'), + exp.int(2), + exp.int(0), + null, + maps.returnType.INVERTED), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { b: 'green', c: 'blue' } }) + }) + + it('removes an inverted map item by rank range in a cdt context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.write('tags', + exp.maps.removeByRankRange( + exp.binMap('tags'), + exp.int(2), + exp.int(0), + context, + maps.returnType.INVERTED), + 0), + op.read('tags') + ] + const result = await client.operate(key, ops, {}) + + expect(result.bins).to.eql({ tags: { a: 'blue', nested: { d: 'orange', g: 'black' } } }) + }) + }) + + describe('getByIndex', function () { + it('selects item identified by index', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + const ops: operations.Operation[] = [ + exp.operations.read(tempBin, + exp.maps.getByIndex( + exp.binMap('tags'), + exp.int(2), + exp.type.INT, + maps.returnType.COUNT), + 0), + op.read('tags') + ] + await client.operate(key, ops, {}) + + await testNoMatch(key, exp.eq( + exp.maps.getByIndex( + exp.binMap('tags'), + exp.int(2), + exp.type.INT, + maps.returnType.COUNT), + exp.int(0))) + await testMatch(key, exp.eq( + exp.maps.getByIndex( + exp.binMap('tags'), + exp.int(2), + exp.type.INT, + maps.returnType.COUNT), + exp.int(1))) + }) + + it('selects item identified by index inside nested map', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + orderByKey('tags', key) + orderByKey('tags', key, context) + await testNoMatch(key, exp.eq( + exp.maps.getByIndex( + exp.binMap('tags'), + exp.int(2), + exp.type.AUTO, + maps.returnType.COUNT, + context), + exp.int(0))) + await testMatch(key, exp.eq( + exp.maps.getByIndex( + exp.binMap('tags'), + exp.int(3), + exp.type.AUTO, + maps.returnType.COUNT, + context), + exp.int(1))) + }) + }) + + describe('getByIndexRange', function () { + it('selects "count" map items starting at specified index', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + await testNoMatch(key, exp.eq( + exp.maps.getByIndexRange( + exp.binMap('tags'), + exp.int(5), + exp.int(0), + maps.returnType.COUNT), + exp.int(0))) + await testMatch(key, exp.eq( + exp.maps.getByIndexRange( + exp.binMap('tags'), + exp.int(5), + exp.int(0), + maps.returnType.COUNT), + exp.int(3))) + }) + + it('selects "count" map items starting at specified nested index', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + orderByKey('tags', key) + orderByKey('tags', key, context) + await testNoMatch(key, exp.eq( + exp.maps.getByIndexRange( + exp.binMap('tags'), + exp.int(6), + exp.int(0), + maps.returnType.COUNT, + context), + exp.int(0))) + await testMatch(key, exp.eq( + exp.maps.getByIndexRange( + exp.binMap('tags'), + exp.int(6), + exp.int(0), + maps.returnType.COUNT, + context), + exp.int(4))) + }) + }) + + describe('getByIndexRangeToEnd', function () { + it('selects map items starting at specified index to the end of the map', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + await testNoMatch(key, exp.eq( + exp.maps.getByIndexRangeToEnd( + exp.binMap('tags'), + exp.int(0), + maps.returnType.COUNT), + exp.int(0))) + await testMatch(key, exp.eq( + exp.maps.getByIndexRangeToEnd( + exp.binMap('tags'), + exp.int(0), + maps.returnType.COUNT), + exp.int(3))) + }) + + it('selects map items starting at specified index to the end of the map', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + orderByKey('tags', key) + orderByKey('tags', key, context) + await testNoMatch(key, exp.eq( + exp.maps.getByIndexRangeToEnd( + exp.binMap('tags'), + exp.int(0), + maps.returnType.COUNT, + context), + exp.int(0))) + await testMatch(key, exp.eq( + exp.maps.getByIndexRangeToEnd( + exp.binMap('tags'), + exp.int(0), + maps.returnType.COUNT, + context), + exp.int(4))) + }) + }) + + describe('getByKey', function () { + it('matches the count of the matched map values', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + await testNoMatch(key, exp.eq(exp.maps.getByKey(exp.binMap('tags'), exp.str('a'), exp.type.AUTO, maps.returnType.COUNT), exp.int(2))) + await testMatch(key, exp.eq(exp.maps.getByKey(exp.binMap('tags'), exp.str('a'), exp.type.AUTO, maps.returnType.COUNT), exp.int(1))) + }) + + it('matches the count of the matched map values of a nested map', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + await testNoMatch(key, exp.eq(exp.maps.getByKey(exp.binMap('tags'), exp.str('d'), exp.type.AUTO, maps.returnType.COUNT, context), exp.int(2))) + await testMatch(key, exp.eq(exp.maps.getByKey(exp.binMap('tags'), exp.str('d'), exp.type.AUTO, maps.returnType.COUNT, context), exp.int(1))) + }) + }) + + describe('getByKeyList', function () { + it('matches the count of the matched map values', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + await testNoMatch(key, exp.eq(exp.maps.getByKeyList(exp.binMap('tags'), exp.list(['a', 'b']), maps.returnType.COUNT), exp.int(1))) + await testMatch(key, exp.eq(exp.maps.getByKeyList(exp.binMap('tags'), exp.list(['a', 'b']), maps.returnType.COUNT), exp.int(2))) + }) + + it('matches the count of the matched map values of a nested map', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + await testNoMatch(key, exp.eq(exp.maps.getByKeyList(exp.binMap('tags'), exp.list(['d', 'e']), maps.returnType.COUNT, context), exp.int(1))) + await testMatch(key, exp.eq(exp.maps.getByKeyList(exp.binMap('tags'), exp.list(['d', 'e']), maps.returnType.COUNT, context), exp.int(2))) + }) + }) + + describe('getByKeyRange', function () { + it('matches the count of the matched map values', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + await testNoMatch(key, exp.eq(exp.maps.getByKeyRange(exp.binMap('tags'), exp.str('c'), exp.str('a'), maps.returnType.COUNT), exp.int(3))) + await testMatch(key, exp.eq(exp.maps.getByKeyRange(exp.binMap('tags'), exp.str('c'), exp.str('a'), maps.returnType.COUNT), exp.int(2))) + }) + + it('matches the count of the matched map values of a nested map', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + await testNoMatch(key, exp.eq(exp.maps.getByKeyRange(exp.binMap('tags'), exp.str('g'), exp.str('d'), maps.returnType.COUNT, context), exp.int(4))) + await testMatch(key, exp.eq(exp.maps.getByKeyRange(exp.binMap('tags'), exp.str('g'), exp.str('d'), maps.returnType.COUNT, context), exp.int(3))) + }) + }) + + describe('getByKeyRelIndexRange', function () { + it('matches the count of the matched map values', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', d: 'yellow' } }) + await testNoMatch(key, exp.eq(exp.maps.getByKeyRelIndexRange(exp.binMap('tags'), exp.int(3), exp.int(0), exp.str('b'), maps.returnType.COUNT), exp.int(1))) + await testMatch(key, exp.eq(exp.maps.getByKeyRelIndexRange(exp.binMap('tags'), exp.int(3), exp.int(0), exp.str('b'), maps.returnType.COUNT), exp.int(2))) + }) + + it('matches the count of the matched map values of a nested map', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow', nested: { d: 'orange', e: 'pink', g: 'white', h: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + await testNoMatch(key, exp.eq(exp.maps.getByKeyRelIndexRange(exp.binMap('tags'), exp.int(3), exp.int(0), exp.str('g'), maps.returnType.COUNT, context), exp.int(1))) + await testMatch(key, exp.eq(exp.maps.getByKeyRelIndexRange(exp.binMap('tags'), exp.int(3), exp.int(0), exp.str('g'), maps.returnType.COUNT, context), exp.int(2))) + }) + }) + + describe('getByKeyRelIndexRangeToEnd', function () { + it('matches the count of the matched map values', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', d: 'yellow' } }) + await testNoMatch(key, exp.eq(exp.maps.getByKeyRelIndexRangeToEnd(exp.binMap('tags'), exp.int(0), exp.str('b'), maps.returnType.COUNT), exp.int(1))) + await testMatch(key, exp.eq(exp.maps.getByKeyRelIndexRangeToEnd(exp.binMap('tags'), exp.int(0), exp.str('b'), maps.returnType.COUNT), exp.int(2))) + }) + + it('matches the count of the matched map values of a nested map', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow', nested: { d: 'orange', e: 'pink', g: 'white', h: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + await testNoMatch(key, exp.eq(exp.maps.getByKeyRelIndexRangeToEnd(exp.binMap('tags'), exp.int(0), exp.str('e'), maps.returnType.COUNT, context), exp.int(2))) + await testMatch(key, exp.eq(exp.maps.getByKeyRelIndexRangeToEnd(exp.binMap('tags'), exp.int(0), exp.str('e'), maps.returnType.COUNT, context), exp.int(3))) + }) + }) + + describe('getByRank', function () { + it('selects map item identified by rank', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', d: 5, c: 'yellow' } }) + + const ops: operations.Operation[] = [ + exp.operations.read(tempBin, + exp.maps.getByRank( + exp.binMap('tags'), + exp.int(0), + exp.type.INT, + maps.returnType.COUNT), + 0), + op.read('tags') + ] + await client.operate(key, ops, {}) + + await testNoMatch(key, exp.eq( + exp.maps.getByRank( + exp.binMap('tags'), + exp.int(0), + exp.type.INT, + maps.returnType.COUNT), + exp.int(0))) + await testMatch(key, exp.eq( + exp.maps.getByRank( + exp.binMap('tags'), + exp.int(0), + exp.type.INT, + maps.returnType.COUNT), + exp.int(1))) + }) + + it('selects map item identified by rank within a nested map', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', d: 5, c: 'yellow', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.read(tempBin, + exp.maps.getByRank( + exp.binMap('tags'), + exp.int(0), + exp.type.INT, + maps.returnType.COUNT), + 0), + op.read('tags') + ] + await client.operate(key, ops, {}) + + await testNoMatch(key, exp.eq( + exp.maps.getByRank( + exp.binMap('tags'), + exp.int(0), + exp.type.INT, + maps.returnType.COUNT, + context), + exp.int(0))) + await testMatch(key, exp.eq( + exp.maps.getByRank( + exp.binMap('tags'), + exp.int(0), + exp.type.INT, + maps.returnType.COUNT, + context), + exp.int(1))) + }) + }) + + describe('getByRankRange', function () { + it('selects "count" map items starting at specified rank', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + await testNoMatch(key, exp.eq( + exp.maps.getByRankRange( + exp.binMap('tags'), + exp.int(4), + exp.int(0), + maps.returnType.COUNT), + exp.int(0))) + await testMatch(key, exp.eq( + exp.maps.getByRankRange( + exp.binMap('tags'), + exp.int(4), + exp.int(0), + maps.returnType.COUNT), + exp.int(3))) + }) + + it('selects "count" map items starting at specified rank in nested context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + await testNoMatch(key, exp.eq( + exp.maps.getByRankRange( + exp.binMap('tags'), + exp.int(5), + exp.int(0), + maps.returnType.COUNT, + context), + exp.int(0))) + await testMatch(key, exp.eq( + exp.maps.getByRankRange( + exp.binMap('tags'), + exp.int(5), + exp.int(0), + maps.returnType.COUNT, + context), + exp.int(4))) + }) + }) + + describe('getByRankRangeToEnd', function () { + it('selects map items starting at specified rank to the last ranked item', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + await testNoMatch(key, exp.eq( + exp.maps.getByRankRangeToEnd( + exp.binMap('tags'), + exp.int(0), + maps.returnType.COUNT), + exp.int(0))) + await testMatch(key, exp.eq( + exp.maps.getByRankRangeToEnd( + exp.binMap('tags'), + exp.int(0), + maps.returnType.COUNT), + exp.int(3))) + }) + + it('selects map items starting at specified rank to the last ranked item in a nested context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + await testNoMatch(key, exp.eq( + exp.maps.getByRankRangeToEnd( + exp.binMap('tags'), + exp.int(0), + maps.returnType.COUNT, + context), + exp.int(0))) + await testMatch(key, exp.eq( + exp.maps.getByRankRangeToEnd( + exp.binMap('tags'), + exp.int(0), + maps.returnType.COUNT, + context), + exp.int(4))) + }) + }) + }) + + describe('getByValue', function () { + it('matches the count of the matched map values', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + await testNoMatch(key, exp.eq(exp.maps.getByValue(exp.binMap('tags'), exp.str('green'), maps.returnType.COUNT), exp.int(2))) + await testMatch(key, exp.eq(exp.maps.getByValue(exp.binMap('tags'), exp.str('green'), maps.returnType.COUNT), exp.int(1))) + }) + + it('matches the count of the matched map values of a nested map', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + await testNoMatch(key, exp.eq(exp.maps.getByValue(exp.binMap('tags'), exp.str('orange'), maps.returnType.COUNT, context), exp.int(2))) + await testMatch(key, exp.eq(exp.maps.getByValue(exp.binMap('tags'), exp.str('orange'), maps.returnType.COUNT, context), exp.int(1))) + }) + }) + + describe('getByValueList', function () { + it('matches the count of the matched values', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + await testNoMatch(key, exp.eq(exp.maps.getByValueList(exp.binMap('tags'), exp.list(['green', 'yellow']), maps.returnType.COUNT), exp.int(3))) + await testMatch(key, exp.eq(exp.maps.getByValueList(exp.binMap('tags'), exp.list(['green', 'yellow']), maps.returnType.COUNT), exp.int(2))) + }) + + it('matches the count of the matched values', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + + await testNoMatch(key, exp.eq(exp.maps.getByValueList(exp.binMap('tags'), exp.list(['orange', 'white']), maps.returnType.COUNT, context), exp.int(3))) + await testMatch(key, exp.eq(exp.maps.getByValueList(exp.binMap('tags'), exp.list(['orange', 'white']), maps.returnType.COUNT, context), exp.int(2))) + }) + }) + + describe('getByValueRange', function () { + it('matches the count of the matched range of map values', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + await testNoMatch(key, exp.eq(exp.maps.getByValueRange(exp.binMap('tags'), exp.str('yellow'), exp.str('blue'), maps.returnType.COUNT), exp.int(3))) + + await testMatch(key, exp.eq(exp.maps.getByValueRange(exp.binMap('tags'), exp.str('yellow'), exp.str('blue'), maps.returnType.COUNT), exp.int(2))) + }) + + it('matches the count of the matched range of map values in a nested context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + await testNoMatch(key, exp.eq(exp.maps.getByValueRange(exp.binMap('tags'), exp.str('white'), exp.str('black'), maps.returnType.COUNT, context), exp.int(4))) + + await testMatch(key, exp.eq(exp.maps.getByValueRange(exp.binMap('tags'), exp.str('white'), exp.str('black'), maps.returnType.COUNT, context), exp.int(3))) + }) + }) + + describe('getByValueRelRankRange', function () { + it('selects map items nearest to value and greater by relative rank with a count limit', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + await testNoMatch(key, exp.eq( + exp.maps.getByValueRelRankRange( + exp.binMap('tags'), + exp.int(2), + exp.int(0), + exp.str('yellow'), + maps.returnType.COUNT), + exp.int(0))) + await testMatch(key, exp.eq( + exp.maps.getByValueRelRankRange( + exp.binMap('tags'), + exp.int(2), + exp.int(0), + exp.str('yellow'), + maps.returnType.COUNT), + exp.int(1))) + }) + + it('selects map items nearest to value and greater by relative rank with a count limit in a nested context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + await testNoMatch(key, exp.eq( + exp.maps.getByValueRelRankRange( + exp.binMap('tags'), + exp.int(2), + exp.int(0), + exp.str('pink'), + maps.returnType.COUNT, + context), + exp.int(0))) + await testMatch(key, exp.eq( + exp.maps.getByValueRelRankRange( + exp.binMap('tags'), + exp.int(2), + exp.int(0), + exp.str('pink'), + maps.returnType.COUNT, + context), + exp.int(2))) + }) + }) + + describe('getByValueRelRankRangeToEnd', function () { + it('selects map items nearest to value and greater by relative rank', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + + await testNoMatch(key, exp.eq(exp.maps.getByValueRelRankRangeToEnd( + exp.binMap('tags'), + exp.int(0), + exp.str('yellow'), + maps.returnType.COUNT), + exp.int(0))) + await testMatch(key, exp.eq(exp.maps.getByValueRelRankRangeToEnd( + exp.binMap('tags'), + exp.int(0), + exp.str('yellow'), + maps.returnType.COUNT), + exp.int(1))) + }) + + it('selects map items nearest to value and greater by relative rank in a nested context', async function () { + const key: Key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow', nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + await testNoMatch(key, exp.eq(exp.maps.getByValueRelRankRangeToEnd( + exp.binMap('tags'), + exp.int(0), + exp.str('orange'), + maps.returnType.COUNT, + context), + exp.int(0))) + await testMatch(key, exp.eq(exp.maps.getByValueRelRankRangeToEnd( + exp.binMap('tags'), + exp.int(0), + exp.str('orange'), + maps.returnType.COUNT, + context), + exp.int(3))) + }) + }) + + describe('putItems', function () { + it('writes map values to a specified map', async function () { + const key: Key = await createRecord({ map: { c: 1, b: 2, a: 3 }, map2: { f: 1, e: 2, d: 3 } }) + const ops: operations.Operation[] = [ + exp.operations.write('map', + exp.maps.putItems(exp.binMap('map'), exp.binMap('map2')), + 0), + op.read('map') + ] + const result = await client.operate(key, ops, {}) + expect(result.bins.map).to.eql({ a: 3, b: 2, c: 1, d: 3, e: 2, f: 1 }) + }) + + it('writes map values from exp.map expression to specified map', async function () { + const key: Key = await createRecord({ map: { c: 1, b: 2, a: 3 } }) + const ops: operations.Operation[] = [ + exp.operations.write('map', + exp.maps.putItems(exp.binMap('map'), exp.map({ f: 1, e: 2, d: 3 })), + 0), + op.read('map') + ] + const result = await client.operate(key, ops, {}) + expect(result.bins.map).to.eql({ a: 3, b: 2, c: 1, d: 3, e: 2, f: 1 }) + }) + + it('writes map values originating from nested map to a specified map', async function () { + const key: Key = await createRecord({ map: { c: 1, b: 2, a: 3, nested: { g: 4 } }, map2: { f: 1, e: 2, d: 3 } }) + const context: cdt.Context = new Context().addMapKey('nested') + const ops: operations.Operation[] = [ + exp.operations.write('map', + exp.maps.putItems(exp.binMap('map'), exp.binMap('map2'), null, context), + 0), + op.read('map') + ] + const result: AerospikeRecord = await client.operate(key, ops, {}) + expect((result.bins.map as { nested: Object }).nested).to.eql({ d: 3, e: 2, f: 1, g: 4 }) + }) + }) + + describe('size', function () { + it('returns the map size', async function () { + const key: Key = await createRecord({ map: { john: 42, malcom: 73, susan: 27 } }) + + await testNoMatch(key, exp.eq(exp.maps.size(exp.binMap('map')), exp.int(2))) + await testMatch(key, exp.eq(exp.maps.size(exp.binMap('map')), exp.int(3))) + }) + + it('returns the map size from a nested map', async function () { + const key: Key = await createRecord({ map: { john: 42, malcom: 73, susan: 27, nested: { d: 'orange', e: 'pink', f: 'white', g: 'black' } } }) + const context: cdt.Context = new Context().addMapKey('nested') + await testNoMatch(key, exp.eq(exp.maps.size(exp.binMap('map'), context), exp.int(2))) + await testMatch(key, exp.eq(exp.maps.size(exp.binMap('map'), context), exp.int(4))) + }) + }) +}) diff --git a/ts-test/tests/generators/key.ts b/ts-test/tests/generators/key.ts new file mode 100644 index 000000000..e1e37de20 --- /dev/null +++ b/ts-test/tests/generators/key.ts @@ -0,0 +1,60 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' +import Aerospike from 'aerospike'; +const Key = Aerospike.Key +import * as valgen from './value'; + +// +// Returns a generator for bytes keys. +// +export function bytes (namespace: any, set: any, options?: any) { + const bgen = valgen.bytes(options) + return function () { + return new Key(namespace, set, bgen()) + } +} + +// +// Returns a generator for string keys. +// +export function string (namespace: any, set: any, options?: any) { + const sgen = valgen.string(options) + return function () { + return new Key(namespace, set, sgen()) + } +} + +// +// Returns a generator for integer keys. +// +export function integer (namespace: any, set: any, options?: any) { + const igen = valgen.integer(options) + return function () { + return new Key(namespace, set, igen()) + } +} + +export function range (keygen: any, end: number, start?: number) { + start = start || 0 + end = end || start + 1 + const a = [] + for (let i = 0; i < end; i++) { + a.push(keygen()) + } + return a +} \ No newline at end of file diff --git a/ts-test/tests/generators/metadata.ts b/ts-test/tests/generators/metadata.ts new file mode 100644 index 000000000..5b264e696 --- /dev/null +++ b/ts-test/tests/generators/metadata.ts @@ -0,0 +1,26 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +// +// Returns a static record. +// +export function constant (metadata: any): () => any { + return function (): any { + return metadata + } +} diff --git a/ts-test/tests/generators/put.ts b/ts-test/tests/generators/put.ts new file mode 100644 index 000000000..bd1387700 --- /dev/null +++ b/ts-test/tests/generators/put.ts @@ -0,0 +1,98 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +import pThrottle from 'p-throttle'; + + +import Aerospike, {ConfigOptions, Host, AerospikeRecord} from 'aerospike'; + + +const Record = Aerospike.Record +import * as helper from '../test_helper'; + +function createRecords (putCall: any, generator: any, recordsToCreate: any, maxConcurrent: any, callback: any) { + let currentRecordNo = 0 + let inFlight = 0 + + const creator = function (this: any, record: any, err: any) { + if (err) { + console.error('ERROR: %s [%d] in %s at %s:%d\n%s', err.message, err.code, err.func, err.file, err.line, err.stack) + throw err + } + if (record) { + if(typeof callback === 'function'){ + callback(record) + + } + inFlight-- + } + currentRecordNo++ + if (currentRecordNo <= recordsToCreate && inFlight < maxConcurrent) { + record = new Record(generator.key(), generator.bins(), generator.metadata()) + const putCb = creator.bind(this, record) + const policy = generator.policy() + const meta = { ttl: record.ttl, gen: record.gen } + putCall(record.key, record.bins, meta, policy, putCb) + inFlight++ + } else if (currentRecordNo > recordsToCreate && inFlight === 0) { + if(typeof callback === 'function'){ + + callback(null) + } + } + } + + for (let i = 0; i < Math.min(maxConcurrent, recordsToCreate); i++) { + creator(null, null) + } +} + +export function put (n: any, options: any, callback?: any): any { + const policy = options.policy || new Aerospike.WritePolicy({ + totalTimeout: 1000, + exists: Aerospike.policy.exists.CREATE_OR_REPLACE + }) + + const generator = { + key: options.keygen, + bins: options.recgen, + metadata: options.metagen, + policy: function () { return policy } + } + + let putCall: any = helper.client.put.bind(helper.client) + if (options.throttle) { + const { limit, interval } = options.throttle + putCall = pThrottle(putCall, limit, interval) + } + + if (callback) { + createRecords(putCall, generator, n, 200, callback) + } else { + return new Promise((resolve, reject) => { + const records: AerospikeRecord[] = [] + createRecords(putCall, generator, n, 200, (record: any) => { + if (record) { + records.push(record) + } else { + resolve(records) + } + }) + }) + } +} diff --git a/ts-test/tests/generators/record.ts b/ts-test/tests/generators/record.ts new file mode 100644 index 000000000..d048d7b95 --- /dev/null +++ b/ts-test/tests/generators/record.ts @@ -0,0 +1,40 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' +import Aerospike, {ConfigOptions, Host, AerospikeBins} from 'aerospike'; + +// +// Returns a static record. +// +export function constant (bins: any): () => AerospikeBins { + return function (): AerospikeBins { + return bins + } +} + +// +// Returns a record from bins spec'd using generators record. +// +export function record (bins: any): () => AerospikeBins { + return function () { + const out: AerospikeBins = {} + for (const bin in bins) { + out[bin] = bins[bin]() + } + return out + } +} diff --git a/ts-test/tests/generators/value.ts b/ts-test/tests/generators/value.ts new file mode 100644 index 000000000..be13c4f77 --- /dev/null +++ b/ts-test/tests/generators/value.ts @@ -0,0 +1,161 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +// ***************************************************************************** +// HELPERS +// **************************************************************************** +// +import Aerospike from 'aerospike'; +const Double = Aerospike.Double + + +// Returns a random integer between min (included) and max (excluded) +function randomInt (min: number, max: number): number { + return Math.floor(Math.random() * (max - min)) + min +} + +// Returns a random number between min (included) and max (excluded) +function randomDouble (min: number, max: number): number { + return Math.random() * (max - min) + min +} + +function merge(o1: T, o2: U): T & U { + return { ...o1, ...o2 }; +} + + +export function string (options?: any): () => string { + const opt = merge(string.defaults, options) + let seq: number = 0 + return function (): string { + if (opt.random === true) { + const lengthMin: number = opt.length.min || 1 + const lengthMax: number = opt.length.max || lengthMin + const len: number = randomInt(lengthMin, lengthMax) + const arr: Array = new Array(len) + for (let i = 0; i < len; i++) { + arr[i] = opt.charset[randomInt(0, opt.charset.length)] + } + return opt.prefix + arr.join('') + opt.suffix + } else { + return opt.prefix + (seq++) + opt.suffix + } + } +} + +string.defaults = { + random: true, + length: { + min: 1, + max: 128, + }, + prefix: '', + suffix: '', + charset: '0123456789ABCDEFGHIJKLMNOPQRSTUVWXTZabcdefghiklmnopqrstuvwxyz', +} + +export function bytes (options?: any): () => Buffer { + const opt = merge(bytes.defaults, options) + return function (): Buffer { + const len: number = randomInt(opt.length.min, opt.length.max) + const buf: Buffer = Buffer.alloc(len) + for (let i = 0; i < len; i++) { + buf[i] = randomInt(opt.byte.min, opt.byte.max) + } + return buf + } +} + +bytes.defaults = { + length: { + min: 1, + max: 1024, + }, + byte: { + min: 0, + max: 255, + }, +} + + + +export function integer (options?: any): () => number { + const opt = merge(integer.defaults, options) + let seq: number = opt.min + return function (): number { + return opt.random === true ? randomInt(opt.min, opt.max) : seq++ + } +} + +integer.defaults = { + random: true, + min: 0, + max: 0xffffff, +} + +export function double (options?: any): () => typeof Double { + const opt = merge(double.defaults, options) + let seq: number = opt.min + const step: number = opt.step + const r: number = Math.pow(10, step.toString().length - step.toString().indexOf('.') - 1) + return function (): any { + if (opt.random){ + return new Double(randomDouble(opt.min, opt.max)) + } else { + seq = Math.round(r * (seq + step)) / r + return new Double(seq) + } + } +} + +double.defaults = { + random: true, + min: 0, + max: 0xffffff, + step: 0.1, +} + +export function array(options?: any): () => Array { + const opt = merge(array.defaults, options) + return function (): Array { + return opt.values.map(function (gen: Function): any { return gen() }) + } +} + +array.defaults = { + values: [integer(), string(), bytes()] +} + +export function map ():() => Object { + return function (): Object { + const num: Function = integer() + const str: Function = string() + const uint: Function = bytes() + const map: Object = { itype: num(), stype: str(), btyte: uint() } + return map + } +} +// ***************************************************************************** +// GENERATORS +// ***************************************************************************** + +export function constant(value: T): () => T { + return function(): T { + return value; + } +} diff --git a/ts-test/tests/geojson.ts b/ts-test/tests/geojson.ts new file mode 100644 index 000000000..1024082ca --- /dev/null +++ b/ts-test/tests/geojson.ts @@ -0,0 +1,143 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* eslint-env mocha */ +/* global expect */ + +import Aerospike, { GeoJSON as GJ, Key as K, Client as Cli, RecordMetadata, WritePolicy, AerospikeBins, AerospikeRecord, AerospikeError } from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +const Key: typeof K = Aerospike.Key +const GeoJSON: typeof GJ = Aerospike.GeoJSON + +describe('Aerospike.GeoJSON', function () { + context('GeoJSON class #noserver', function () { + const subject: GJ = new GeoJSON({ type: 'Point', coordinates: [103.913, 1.308] }) + + describe('constructor', function () { + it('returns a new GeoJSON value when called as an Object constructor', function () { + expect(new GeoJSON({ type: 'Point', coordinates: [103.913, 1.308] })).to.be.instanceof(GeoJSON) + }) + /* + it('returns a new GeoJSON value when called as function', function () { + expect(GeoJSON({ type: 'Point', coordinates: [103.913, 1.308] })).to.be.instanceof(GeoJSON) + }) + */ + it('parses a GeoJSON string', function () { + expect(new GeoJSON('{"type": "Point", "coordinates": [103.913, 1.308]}')).to.be.instanceof(GeoJSON) + }) + /* + it('throws a type error if passed an invalid GeoJSON value', function () { + const fn: function = () => new GeoJSON(45) + expect(fn).to.throw(TypeError) + }) + */ + }) + + describe('#value()', function () { + it('returns the value as a JSON object', function () { + expect(subject.value()).to.eql({ type: 'Point', coordinates: [103.913, 1.308] }) + }) + }) + + describe('#toJSON()', function () { + it('returns the GeoJSON value as a JSON object', function () { + expect(subject.toJSON()).to.eql({ type: 'Point', coordinates: [103.913, 1.308] }) + }) + }) + + describe('#toString()', function () { + it('returns a string representation of the GeoJSON value', function () { + expect(subject.toString()).to.equal('{"type":"Point","coordinates":[103.913,1.308]}') + }) + }) + + describe('GeoJSON.Point()', function () { + it('returns the lat, lng as a GeoJSON point value', function () { + const point: GJ = new GeoJSON({ type: 'Point', coordinates: [103.913, 1.308] }) + expect(new GeoJSON.Point(103.913, 1.308)).to.eql(point) + }) + }) + + describe('GeoJSON.Polygon()', function () { + it('returns the coordinates as a GeoJSON polygon value', function () { + const polygon: GJ = new GeoJSON({ type: 'Polygon', coordinates: [[[103.913, 1.308], [104.913, 1.308], [104.913, 1.408], [103.913, 1.408], [103.913, 1.408]]] }) + expect(new GeoJSON.Polygon([103.913, 1.308], [104.913, 1.308], [104.913, 1.408], [103.913, 1.408], [103.913, 1.408])).to.eql(polygon) + }) + }) + + describe('GeoJSON.Circle()', function () { + it('creates a GeoJSON circle representation', function () { + const circle: GJ = new GeoJSON({ type: 'AeroCircle', coordinates: [[-122.250629, 37.871022], 300] }) + expect(new GeoJSON.Circle(-122.250629, 37.871022, 300)).to.eql(circle) + }) + }) + }) + + describe('putting and getting GeoJSON values', function () { + const client: Cli = helper.client + const point: string = JSON.stringify({ type: 'Point', coordinates: [103.9139, 1.3030] }) + const geojson: GJ = new GeoJSON(point) + const key: K = new Key(helper.namespace, helper.set, 'test/geojson') + const meta: RecordMetadata = { ttl: 1000 } + const policy: WritePolicy = new Aerospike.WritePolicy({ + exists: Aerospike.policy.exists.CREATE_OR_REPLACE + }) + + it('can put/get a GeoJSON bin value', function (done) { + const record: AerospikeBins = { location: geojson } + client.put(key, record, meta, policy, function (err?: AerospikeError) { + if (err) throw err + + client.get(key, function (err?: AerospikeError, record?: AerospikeRecord) { + if (err) throw err + expect(record?.bins.location).to.equal(point) + done() + }) + }) + }) + + it('can put/get a GeoJSON value in a list bin', function (done) { + const record: AerospikeBins = { locations: [geojson, geojson] } + client.put(key, record, meta, policy, function (err?: AerospikeError) { + if (err) throw err + + client.get(key, function (err?: AerospikeError, record?: AerospikeRecord) { + if (err) throw err + expect(record?.bins.locations).to.eql([point, point]) + done() + }) + }) + }) + + it('can put/get a GeoJSON value in a map bin', function (done) { + const record: AerospikeBins = { map: { location: geojson } } + client.put(key, record, meta, policy, function (err?: AerospikeError) { + if (err) throw err + + client.get(key, function (err?: AerospikeError, record?: AerospikeRecord) { + if (err) throw err + expect((record?.bins.map as { location: GJ }).location as GJ).to.equal(point) + done() + }) + }) + }) + }) +}) diff --git a/ts-test/tests/get.ts b/ts-test/tests/get.ts new file mode 100644 index 000000000..0e24b4f12 --- /dev/null +++ b/ts-test/tests/get.ts @@ -0,0 +1,213 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* eslint-env mocha */ +/* global expect */ + +import Aerospike, { status as statusModule, Client as Cli, Key as K, RecordMetadata, AerospikeRecord, AerospikeError, ReadPolicy, AerospikeBins} from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +const keygen: any = helper.keygen +const metagen: any = helper.metagen +const recgen: any = helper.recgen + +const status: typeof statusModule = Aerospike.status + +describe('client.get()', function () { + const client: Cli = helper.client + + it('should read the record', function (done) { + const key: K = keygen.string(helper.namespace, helper.set, { prefix: 'test/get/' })() + const meta: RecordMetadata = metagen.constant({ ttl: 1000 })() + const record: AerospikeRecord = recgen.constant({ i: 123, s: 'abc' })() + + client.put(key, record, meta, function (err?: AerospikeError) { + if (err) throw err + client.get(key, function (err?: AerospikeError, record?: AerospikeRecord) { + if (err) throw err + client.remove(key, function (err?: AerospikeError, key?: K) { + if (err) throw err + done() + }) + }) + }) + }) + + it('should not find the record', function (done) { + const key: K = keygen.string(helper.namespace, helper.set, { prefix: 'test/not_found/' })() + + client.get(key, function (err?: AerospikeError, record?: AerospikeRecord) { + expect(err?.code).to.equal(status.ERR_RECORD_NOT_FOUND) + done() + }) + }) + + context('with ReadPolicy', function () { + context('with deserialize: false', function () { + it('should return lists and maps as raw bytes', function () { + const key: K = keygen.string(helper.namespace, helper.set, { prefix: 'test/get/' })() + const bins: AerospikeBins = { + i: 123, + s: 'abc', + l: [1, 2, 3], + m: { a: 1, b: 2, c: 3 } + } + const policy: ReadPolicy = new Aerospike.ReadPolicy({ + deserialize: false + }) + + return client.put(key, bins) + .then(() => client.get(key, policy)) + .then((record: AerospikeRecord) => { + const bins: AerospikeBins = record.bins + expect(bins.i).to.eql(123) + expect(bins.s).to.eql('abc') + expect(bins.l).to.eql(Buffer.from([0x93, 0x01, 0x02, 0x03])) + expect(bins.m).to.eql(Buffer.from([0x84, 0xc7, 0x00, 0x01, 0xc0, 0xa2, 0x03, 0x61, 0x01, 0xa2, 0x03, 0x62, 0x02, 0xa2, 0x03, 0x63, 0x03])) + }) + }) + }) + + context('readTouchTtlPercent policy', function () { + helper.skipUnlessVersion('>= 7.1.0', this) + + this.timeout(4000) + it('100% touches record', async function () { + const key: K = keygen.integer(helper.namespace, helper.set)() + const policy: ReadPolicy = new Aerospike.ReadPolicy({ + readTouchTtlPercent: 100 + }) + + await client.put(key, { i: 2 }, { ttl: 10 }) + await new Promise((resolve: any) => setTimeout(resolve, 3000)) + let record: AerospikeRecord = await client.get(key, policy) + + expect(record.bins).to.eql({ i: 2 }) + expect(record.ttl).to.be.within(7, 8) + + record = await client.get(key, policy) + + expect(record.bins).to.eql({ i: 2 }) + expect(record.ttl).to.be.within(9, 10) + + await client.remove(key) + }) + + it('71% touches record', async function () { + const key: K = keygen.integer(helper.namespace, helper.set)() + const policy: ReadPolicy = new Aerospike.ReadPolicy({ + readTouchTtlPercent: 71 + }) + + await client.put(key, { i: 2 }, { ttl: 10 }) + await new Promise((resolve: any) => setTimeout(resolve, 3000)) + let record: AerospikeRecord = await client.get(key, policy) + + expect(record.bins).to.eql({ i: 2 }) + expect(record.ttl).to.be.within(7, 8) + + record = await client.get(key, policy) + + expect(record.bins).to.eql({ i: 2 }) + expect(record.ttl).to.be.within(9, 10) + + await client.remove(key) + }) + + it('60% never touches record', async function () { + const key: K = keygen.integer(helper.namespace, helper.set)() + const policy: ReadPolicy = new Aerospike.ReadPolicy({ + readTouchTtlPercent: 60 + }) + await client.put(key, { i: 2 }, { ttl: 10 }) + await new Promise((resolve: any) => setTimeout(resolve, 3000)) + + let record: AerospikeRecord = await client.get(key, policy) + + expect(record.bins).to.eql({ i: 2 }) + expect(record.ttl).to.be.within(7, 8) + + record = await client.get(key, policy) + + expect(record.bins).to.eql({ i: 2 }) + expect(record.ttl).to.be.within(7, 8) + await client.remove(key) + }) + + it('0% never touches record', async function () { + const key: K = keygen.integer(helper.namespace, helper.set)() + const policy: ReadPolicy = new Aerospike.ReadPolicy({ + readTouchTtlPercent: 0 + }) + await client.put(key, { i: 2 }, { ttl: 10 }) + await new Promise((resolve: any) => setTimeout(resolve, 3000)) + + let record: AerospikeRecord = await client.get(key, policy) + + expect(record.bins).to.eql({ i: 2 }) + expect(record.ttl).to.be.within(7, 8) + + record = await client.get(key, policy) + + expect(record.bins).to.eql({ i: 2 }) + expect(record.ttl).to.be.within(7, 8) + await client.remove(key) + }) + }) + }) + + it('should return the TTL for a never expiring record as Aerospike.ttl.NEVER_EXPIRE', function (done) { + const key: K = keygen.string(helper.namespace, helper.set, { prefix: 'test/get/' })() + const meta: RecordMetadata = metagen.constant({ ttl: Aerospike.ttl.NEVER_EXPIRE })() + const record: AerospikeRecord = recgen.constant({ i: 123, s: 'abc' })() + + client.put(key, record, meta, function (err?: AerospikeError) { + if (err) throw err + client.get(key, function (err?: AerospikeError, record?: AerospikeRecord) { + if (err) throw err + expect(record?.ttl).to.equal(Aerospike.ttl.NEVER_EXPIRE) + client.remove(key, function (err?: AerospikeError) { + if (err) throw err + done() + }) + }) + }) + }) + + it('should return a Promise that resolves to a Record', function () { + const key: K = keygen.string(helper.namespace, helper.set, { prefix: 'test/get/' })() + + return client.put(key, { i: 42 }) + .then(() => client.get(key)) + .then((record: AerospikeRecord) => expect(record.bins).to.eql({ i: 42 })) + .then(() => client.remove(key)) + }) + + it('fetches a record given the digest', function () { + const key: K = new Aerospike.Key(helper.namespace, helper.set, 'digestOnly') + client.put(key, { foo: 'bar' }) + .then(() => { + const digest: Buffer = key.digest!; + const key2: K = new Aerospike.Key(helper.namespace, undefined, null, digest) + return client.get(key2) + .then((record: AerospikeRecord) => expect(record.bins.foo).to.equal('bar')) + }) + }) +}) diff --git a/ts-test/tests/hll.ts b/ts-test/tests/hll.ts new file mode 100644 index 000000000..11d4172e5 --- /dev/null +++ b/ts-test/tests/hll.ts @@ -0,0 +1,464 @@ +// ***************************************************************************** +// Copyright 2020-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* eslint-env mocha */ + + +import Aerospike, { hll as hllModule, status as statusModule, HLLPolicy} from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +const hll: typeof hllModule = Aerospike.hll +const status: typeof statusModule = Aerospike.status + +const { + assertError, + assertRecordEql, + assertResultEql, + assertResultSatisfy, + cleanup, + createRecord, + expectError, + initState, + operate +} = require('./util/statefulAsyncTest') + +const isDouble = (number: string) => typeof number === 'number' && parseInt(number, 10) !== number + +describe('client.operate() - HyperLogLog operations', function () { + helper.skipUnlessVersion('>= 4.9.0', this) + + // HLL object representing the set ('jaguar', 'leopard', 'lion', 'tiger') + // with an index bit size of 8, and minhash bit size of 0. + const hllCats: Buffer = Buffer.from([0, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 0, 0, 0, 0, + 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 65, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]) + + describe('hll.init', function () { + it('initializes a HLL bin value', function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(operate([ + hll.init('hll', 10), + hll.describe('hll') + ])) + .then(assertResultEql({ hll: [10, 0] })) + .then(cleanup()) + }) + + it('initializes a HLL bin value with minhash bits', function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(operate([ + hll.init('hll', 10, 6), + hll.describe('hll') + ])) + .then(assertResultEql({ hll: [10, 6] })) + .then(cleanup()) + }) + + it('re-initializes an existing HLL bin', function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(operate(hll.add('hll', ['tiger', 'leopard'], 10))) + .then(operate([ + hll.init('hll', 12, 4), + hll.describe('hll') + ])) + .then(assertResultEql({ hll: [12, 4] })) + .then(cleanup()) + }) + + context('with HLL policy', function () { + context('with create-only write flag', function () { + const policy: HLLPolicy = { + writeFlags: hll.writeFlags.CREATE_ONLY + } + + it('returns an error if the bin already exists', function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(operate(hll.add('hll', ['tiger'], 8))) + .then(expectError()) + .then(operate( + hll.init('hll', 10).withPolicy(policy) + )) + .then(assertError(status.ERR_BIN_EXISTS)) + .then(cleanup()) + }) + + context('with no-fail write flag', function () { + const policy: HLLPolicy = { + writeFlags: hll.writeFlags.CREATE_ONLY | hll.writeFlags.NO_FAIL + } + + it('does not re-initialize the bin', function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(operate(hll.add('hll', ['tiger', 'cheetah'], 8))) + .then(operate( + hll.init('hll', 12).withPolicy(policy) + )) + .then(operate(hll.getCount('hll'))) + .then(assertResultEql({ hll: 2 })) + .then(cleanup()) + }) + }) + }) + + context('with update-only write flag', function () { + const policy: HLLPolicy = { + writeFlags: hll.writeFlags.UPDATE_ONLY + } + + it('returns an error if the bin does not yet exist', function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(expectError()) + .then(operate( + hll.init('hll', 10, 6).withPolicy(policy) + )) + .then(assertError(status.ERR_BIN_NOT_FOUND)) + .then(cleanup()) + }) + + context('with no-fail write flag', function () { + const policy: HLLPolicy = { + writeFlags: hll.writeFlags.UPDATE_ONLY | hll.writeFlags.NO_FAIL + } + + it('does not initialize the bin', function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(operate( + hll.init('hll', 10, 6).withPolicy(policy) + )) + .then(assertRecordEql({ foo: 'bar' })) + .then(cleanup()) + }) + }) + }) + }) + }) + + describe('hll.add', function () { + it('initializes a new HLL value if it does not exist', function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(operate(hll.add('hll', ['jaguar', 'tiger', 'tiger', 'leopard', 'lion', 'jaguar'], 8))) + .then(assertResultEql({ hll: 4 })) + .then(assertRecordEql({ hll: hllCats, foo: 'bar' })) + .then(cleanup()) + }) + + it('returns an error if the bin is of wrong type', function () { + return initState() + .then(createRecord({ hll: 'not a HLL set' })) + .then(expectError()) + .then(operate(hll.add('hll', ['jaguar', 'tiger', 'tiger', 'leopard', 'lion', 'jaguar'], 8))) + .then(assertError(status.ERR_BIN_INCOMPATIBLE_TYPE)) + .then(cleanup()) + }) + + context('with HLL policy', function () { + context('with create-only write flag', function () { + const policy: HLLPolicy = { + writeFlags: hll.writeFlags.CREATE_ONLY + } + + it('returns an error if bin already exist', async function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(operate(hll.init('hll', 12))) + .then(expectError()) + .then(operate(hll.add('hll', ['tiger', 'tiger', 'leopard'], 8).withPolicy(policy))) + .then(assertError(status.ERR_BIN_EXISTS)) + .then(cleanup()) + }) + + context('with no-fail write flag', function () { + const policy: HLLPolicy = { + writeFlags: hll.writeFlags.CREATE_ONLY | hll.writeFlags.NO_FAIL + } + + it('does not update the bin if it already exists', async function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(operate(hll.add('hll', ['tiger', 'lion'], 8))) + .then(operate(hll.add('hll', ['tiger', 'leopard', 'cheetah'], 8).withPolicy(policy))) + .then(operate(hll.getCount('hll'))) + .then(assertResultEql({ hll: 2 })) + .then(cleanup()) + }) + }) + }) + }) + }) + + describe('hll.setUnion', function () { + it('sets a union of the HLL objects with the HLL bin', function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(operate([ + hll.add('hll', ['tiger', 'lynx', 'cheetah', 'tiger'], 8), + hll.setUnion('hll', [hllCats]), + hll.getCount('hll') + ])) + .then(assertResultEql({ hll: 6 })) + .then(cleanup()) + }) + + it('returns an error if the index bit count does not match', function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(expectError()) + .then(operate([ + hll.add('hll', ['tiger', 'lynx', 'cheetah', 'tiger'], 12), + hll.setUnion('hll', [hllCats]) // index bit size = 8 + ])) + .then(assertError(status.ERR_OP_NOT_APPLICABLE)) + .then(cleanup()) + }) + + context('with HLL policy', function () { + context('with create-only write flag', function () { + const policy: HLLPolicy = { + writeFlags: hll.writeFlags.CREATE_ONLY + } + + it('returns an error if the bin already exists', function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(expectError()) + .then(operate([ + hll.add('hll', ['tiger', 'lynx', 'cheetah', 'tiger'], 8), + hll.setUnion('hll', [hllCats]).withPolicy(policy) + ])) + .then(assertError(status.ERR_BIN_EXISTS)) + .then(cleanup()) + }) + + context('with no-fail write flag', function () { + const policy: HLLPolicy = { + writeFlags: hll.writeFlags.CREATE_ONLY | hll.writeFlags.NO_FAIL + } + + it('does not update the bin', function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(operate([ + hll.add('hll', ['tiger'], 8), + hll.setUnion('hll', [hllCats]).withPolicy(policy), + hll.getCount('hll') + ])) + .then(assertResultEql({ hll: 1 })) + .then(cleanup()) + }) + }) + }) + + context('with update-only write flag', function () { + const policy: HLLPolicy = { + writeFlags: hll.writeFlags.UPDATE_ONLY + } + + it('returns an error if the bin does not exist', function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(expectError()) + .then(operate( + hll.setUnion('hll', [hllCats]).withPolicy(policy) + )) + .then(assertError(status.ERR_BIN_NOT_FOUND)) + .then(cleanup()) + }) + + context('with no-fail write flag', function () { + const policy = { + writeFlags: hll.writeFlags.UPDATE_ONLY | hll.writeFlags.NO_FAIL + } + + it('does not create the bin', function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(operate( + hll.setUnion('hll', [hllCats]).withPolicy(policy) + )) + .then(assertRecordEql({ foo: 'bar' })) + .then(cleanup()) + }) + }) + }) + + context('with allow-fold write flag', function () { + const policy: HLLPolicy = { + writeFlags: hll.writeFlags.ALLOW_FOLD + } + + it('folds the result to the lowest index bit size', function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(operate([ + hll.add('hll', ['tiger', 'lynx', 'cheetah', 'tiger'], 12), + hll.setUnion('hll', [hllCats]).withPolicy(policy), // index bit size = 8 + hll.describe('hll') + ])) + .then(assertResultEql({ hll: [8, 0] })) + .then(cleanup()) + }) + }) + }) + }) + + describe('hll.refreshCount', function () { + it('updates and then returns the cached count', function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(operate([ + hll.add('hll', ['tiger', 'lynx', 'cheetah', 'tiger'], 8), + hll.add('hll', ['lion', 'tiger', 'puma', 'puma']), + hll.fold('hll', 6), + hll.refreshCount('hll') + ])) + .then(assertResultEql({ hll: 5 })) + .then(cleanup()) + }) + }) + + describe('hll.fold', function () { + it('folds the index bit count to the specified value', function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(operate([ + hll.init('hll', 16), + hll.fold('hll', 8), + hll.describe('hll') + ])) + .then(assertResultEql({ hll: [8, 0] })) + .then(cleanup()) + }) + + it('returns an error if the minhash count is not zero', function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(expectError()) + .then(operate([ + hll.init('hll', 16, 8), + hll.fold('hll', 8) + ])) + .then(assertError(status.ERR_OP_NOT_APPLICABLE)) + .then(cleanup()) + }) + }) + + describe('hll.getCount', function () { + it('returns the estimated number of elements in the bin', function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(operate([ + hll.add('hll', ['leopard', 'tiger', 'tiger', 'jaguar'], 8), + hll.getCount('hll') + ])) + .then(assertResultEql({ hll: 3 })) + .then(cleanup()) + }) + }) + + describe('hll.getUnion', function () { + it('returns the union of the HLL objects with the HLL bin', function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(operate([ + hll.add('hll', ['leopard', 'lynx', 'tiger', 'tiger', 'cheetah', 'lynx'], 8), + hll.getUnion('hll', [hllCats]) + ])) + .then(assertResultSatisfy(({ hll }: any) => Buffer.isBuffer(hll))) + .then(cleanup()) + }) + }) + + describe('hll.getUnionCount', function () { + it('returns the element count of the union of the HLL objects with the HLL bin', function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(operate([ + hll.add('hll', ['leopard', 'lynx', 'tiger', 'tiger', 'cheetah', 'lynx'], 8), + hll.getUnionCount('hll', [hllCats]) + ])) + .then(assertResultEql(({ hll: 6 }))) + .then(cleanup()) + }) + }) + + describe('hll.getIntersectCount', function () { + it('returns the element count of the intersection of the HLL objects with the HLL bin', function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(operate([ + hll.add('hll', ['leopard', 'lynx', 'tiger', 'tiger', 'cheetah', 'lynx'], 8), + hll.getIntersectCount('hll', [hllCats]) + ])) + .then(assertResultEql(({ hll: 2 }))) + .then(cleanup()) + }) + }) + + describe('hll.getSimilarity', function () { + it('returns the similarity of the HLL objects', function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(operate([ + hll.add('hll', ['leopard', 'lynx', 'tiger', 'tiger', 'cheetah', 'lynx'], 8), + hll.getSimilarity('hll', [hllCats]) + ])) + .then(assertResultSatisfy(({ hll }: any) => isDouble(hll))) + .then(cleanup()) + }) + }) + + describe('hll.describe', function () { + it('returns the index and minhash bit counts', function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(operate([ + hll.init('hll', 16, 5), + hll.describe('hll') + ])) + .then(assertResultEql({ hll: [16, 5] })) + .then(cleanup()) + }) + + it('returns the index count, with minhash zero', function () { + return initState() + .then(createRecord({ foo: 'bar' })) + .then(operate([ + hll.init('hll', 16), + hll.describe('hll') + ])) + .then(assertResultEql({ hll: [16, 0] })) + .then(cleanup()) + }) + }) +}) diff --git a/ts-test/tests/index.ts b/ts-test/tests/index.ts new file mode 100644 index 000000000..769db30e1 --- /dev/null +++ b/ts-test/tests/index.ts @@ -0,0 +1,252 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* eslint-env mocha */ +/* global expect */ + + +import Aerospike, { Job as J, IndexJob as IJ, Client as Cli, AerospikeError } from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +const Job: typeof J = Aerospike.Job +const IndexJob: typeof IJ = Aerospike.IndexJob + +const Context = Aerospike.cdt.Context + +context('secondary indexes', function () { + const client: Cli = helper.client + + // generate unique index name for each test + const testIndex: any = { name: null, bin: null, counter: 0 } + beforeEach(() => { + testIndex.counter++ + testIndex.name = 'idx-' + testIndex.counter + '-' + Math.floor(Math.random() * 10000000) + testIndex.bin = 'bin-' + testIndex.counter + '-' + Math.floor(Math.random() * 10000000) + }) + + function verifyIndexExists (namespace: string, indexName: string) { + const sindex = 'sindex/' + namespace + '/' + indexName + const checkStatus = function () { + return client.infoAll(sindex) + .then(() => true) + .catch((error: any) => { + if (error.code !== Aerospike.status.ERR_INDEX_NOT_FOUND) { + return Promise.reject(error) + } + return false + }) + } + return (Job as any).pollUntilDone(checkStatus, 10) + .then(() => helper.index.remove(indexName)) + } + + describe('Client#indexCreate()', function () { + it('returns an IndexJob instance', function () { + const options = { + ns: helper.namespace, + set: helper.set, + bin: testIndex.bin, + index: testIndex.name, + datatype: Aerospike.indexDataType.NUMERIC + } + + return client.createIndex(options) + .then((job: IJ) => expect(job).to.be.instanceof(IndexJob)) + .then(() => verifyIndexExists(helper.namespace, testIndex.name)) + }) + + it('should create a complex index on list', function () { + const options = { + ns: helper.namespace, + set: helper.set, + bin: testIndex.bin, + index: testIndex.name, + type: Aerospike.indexType.LIST, + datatype: Aerospike.indexDataType.NUMERIC + } + + return client.createIndex(options) + .then(() => verifyIndexExists(helper.namespace, testIndex.name)) + }) + + it('should create an index with CDT Context', function () { + const options = { + ns: helper.namespace, + set: helper.set, + bin: testIndex.bin, + index: testIndex.name, + type: Aerospike.indexType.LIST, + datatype: Aerospike.indexDataType.NUMERIC, + context: new Context().addListIndex(0) + } + + return client.createIndex(options) + .then(() => verifyIndexExists(helper.namespace, testIndex.name)) + }) + + it('should not create an index with CDT Context \'addListIndexCreate\'', function () { + const options = { + ns: helper.namespace, + set: helper.set, + bin: testIndex.bin, + index: testIndex.name, + type: Aerospike.indexType.LIST, + datatype: Aerospike.indexDataType.NUMERIC, + context: new Context().addListIndexCreate(0, 0, false) + } + + return client.createIndex(options) + .then(() => expect(1).to.equal(2)) + .catch(() => { expect('pass').to.equal('pass') }) + }) + + it('should create an integer index with info policy', function () { + const options = { + ns: helper.namespace, + set: helper.set, + bin: testIndex.bin, + index: testIndex.name, + datatype: Aerospike.indexDataType.NUMERIC + } + const policy = new Aerospike.InfoPolicy({ + timeout: 100 + }) + + return client.createIndex(options, policy) + .then(() => verifyIndexExists(helper.namespace, testIndex.name)) + }) + + it('re-creating an index with identical options returns an error (success with new server, verify the existence)', function () { + const options = { + ns: helper.namespace, + set: helper.set, + bin: testIndex.bin, + index: testIndex.name, + datatype: Aerospike.indexDataType.NUMERIC + } + + return client.createIndex(options) + .then((job: IJ) => job.wait(10)) + .then(() => client.createIndex(options) + .catch((error: any) => { + if (error.code === Aerospike.status.ERR_INDEX_FOUND || + error.code === Aerospike.status.AEROSPIKE_OK) { + // All good! + verifyIndexExists(helper.namespace, testIndex.name) + } else { + return Promise.reject(error) + } + })) + }) + }) + + describe('Client#createIntegerIndex()', function () { + it('should create an integer index', function () { + const options = { + ns: helper.namespace, + set: helper.set, + bin: testIndex.bin, + index: testIndex.name + } + + return client.createIntegerIndex(options) + .then(() => verifyIndexExists(helper.namespace, testIndex.name)) + }) + }) + + describe('Client#createStringIndex()', function () { + it('should create an string index', function () { + const args = { + ns: helper.namespace, + set: helper.set, + bin: testIndex.bin, + index: testIndex.name + } + + return client.createStringIndex(args) + .then(() => verifyIndexExists(helper.namespace, testIndex.name)) + }) + }) + + describe('Client#createGeo2DSphereIndex()', function () { + it('should create a geospatial index', function () { + const args = { + ns: helper.namespace, + set: helper.set, + bin: testIndex.bin, + index: testIndex.name + } + + return client.createGeo2DSphereIndex(args) + .then(() => verifyIndexExists(helper.namespace, testIndex.name)) + }) + }) + + describe('Client#createBlobIndex()', function () { + helper.skipUnlessVersion('>= 7.0.0', this) + it('should create a blob index', function () { + const args = { + ns: helper.namespace, + set: helper.set, + bin: testIndex.bin, + index: testIndex.name + } + + return client.createBlobIndex(args) + .then(() => verifyIndexExists(helper.namespace, testIndex.name)) + }) + }) + + describe('Client#indexRemove()', async function () { + beforeEach(async () => { + await helper.index.create(testIndex.name, helper.set, testIndex.bin, + Aerospike.indexDataType.STRING, Aerospike.indexType.DEFAULT) + }) + + it('should drop an index', async function () { + // Wait for index creation to complete + this.timeout(10000) + await new Promise(resolve => setTimeout(resolve, 5000)) + + // Do query on the secondary index to ensure proper creation. + let query = client.query(helper.namespace, helper.set) + query.where(Aerospike.filter.equal(testIndex.bin, 'value')) + await query.results() + + await client.indexRemove(helper.namespace, testIndex.name) + + // Do query on the secondary index to ensure proper deletion + query = client.query(helper.namespace, helper.set) + query.where(Aerospike.filter.equal(testIndex.bin, 'value')) + try { + await query.results() + // Fail test if this code is reached + expect('fail').to.equal('now') + } catch (error: any) { + expect(error.code).to.equal(201) + expect('pass').to.equal('pass') + } + }) + + it('should return a Promise if called without callback function', async function () { + return await client.indexRemove(helper.namespace, testIndex.name) + }) + }) +}) diff --git a/ts-test/tests/index_job.ts b/ts-test/tests/index_job.ts new file mode 100644 index 000000000..ec4d5b4ca --- /dev/null +++ b/ts-test/tests/index_job.ts @@ -0,0 +1,88 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* global expect, beforeEach, afterEach, describe, it */ +/* eslint-disable no-unused-expressions */ + + +import Aerospike, { AerospikeError as ASError, IndexJob as IJ } from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +const AerospikeError: typeof ASError = Aerospike.AerospikeError +const IndexJob: typeof IJ = Aerospike.IndexJob + +describe('IndexJob', function () { + const client = helper.client + const testIndex: any = { name: null, bin: null, counter: 0 } + beforeEach(function () { + testIndex.counter++ + testIndex.name = 'idx-' + testIndex.counter + '-' + Math.floor(Math.random() * 10000000) + testIndex.bin = 'bin-' + testIndex.counter + '-' + Math.floor(Math.random() * 10000000) + }) + afterEach(() => helper.index.remove(testIndex.name)) + + describe('IndexJob#waitUntilDone()', function () { + it('should wait until the index creation is completed', function (done) { + const options = { + ns: helper.namespace, + set: helper.set, + bin: testIndex.bin, + index: testIndex.name + } + client.createIntegerIndex(options, function (err?: ASError, job?: IJ) { + if (err) throw err + + job?.waitUntilDone(10, function (err?: ASError) { + expect(err).to.not.be.ok + done() + }) + }) + }) + }) + + describe('IndexJob#checkStatus()', function () { + it('should return a boolean indicating whether the job is done or not', function () { + const options = { + ns: helper.namespace, + set: helper.set, + bin: testIndex.bin, + index: testIndex.name + } + return client.createIntegerIndex(options) + .then((job: any) => job.checkStatus()) + .then((status: boolean) => expect(status).to.be.a('boolean')) + }) + + it('should return false if the index does not exist', function () { + const job: any = new IndexJob(client, helper.namespace, 'thisIndexDoesNotExist') + return job.checkStatus() + .then((status: boolean) => expect(status).to.be.false) + }) + /* + it('should return an error if one of the cluster nodes cannot be queried', function () { + const client = Aerospike.client() // not connected, should return error when info command is executed + const job: any = new IndexJob(client, helper.ns, 'thisIndexDoesNotExist') + return job.checkStatus() + .then(() => { throw new Error('Expected promise to reject') }) + .catch((error: any) => expect(error).to.be.instanceof(AerospikeError)) + }) + */ + }) +}) diff --git a/ts-test/tests/info.js b/ts-test/tests/info.js new file mode 100644 index 000000000..e23a569a2 --- /dev/null +++ b/ts-test/tests/info.js @@ -0,0 +1,236 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* global expect, describe, it, context, before */ +/* eslint-disable no-unused-expressions */ + +const Aerospike = require('../lib/aerospike') +const info = require('../lib/info') +const helper = require('./test_helper') +const utils = require('../lib/utils') + +const AerospikeError = Aerospike.AerospikeError + +context('Info commands', function () { + const client = helper.client + + describe('Client#info()', function () { + helper.skipIf(this, () => !!helper.config.password && helper.cluster.isVersionInRange('>= 5.5'), + 'client#info does not support authenticated connections on server 5.5 or later') + + let node = null + let host = null + + before(() => { + node = helper.cluster.randomNode() + host = utils.parseHostString(node.address) + }) + + it('sends status query to a specific cluster node', function (done) { + client.info('status', host, (error, response) => { + if (error) throw error + expect(response).to.equal('status\tok\n') + done() + }) + }) + + it('accepts a string with the host address', function (done) { + client.info('status', node.address, (error, response) => { + if (error) throw error + expect(response).to.equal('status\tok\n') + done() + }) + }) + + it('fetches all info if no request is passed', function (done) { + client.info(null, host, (error, response) => { + if (error) throw error + expect(response).to.contain('\nversion\t') + expect(response).to.contain('\nedition\t') + done() + }) + }) + + it('should return a client error if the client is not connected', function (done) { + Aerospike.client(helper.config).info('status', host, error => { + expect(error).to.be.instanceof(AerospikeError).with.property('code', Aerospike.status.ERR_CLIENT) + done() + }) + }) + }) + + describe('Client#infoNode()', function () { + let node = null + + before(() => { + node = helper.cluster.randomNode() + }) + + it('sends status query to a specific cluster node', function () { + return client.infoNode('status', node) + .then(response => expect(response).to.equal('status\tok\n')) + }) + + it('fetches all info if no request is passed', function () { + return client.infoNode(null, node) + .then(response => { + expect(response).to.contain('\nversion\t') + expect(response).to.contain('\nedition\t') + }) + }) + + it('should return a client error if the client is not connected', function (done) { + Aerospike.client(helper.config).infoNode('status', node, error => { + expect(error).to.be.instanceof(AerospikeError).with.property('code', Aerospike.status.ERR_CLIENT) + done() + }) + }) + }) + + describe('Client#infoAny()', function () { + it('executes the info command on a single cluster node', function (done) { + client.infoAny('status', function (err, result) { + expect(err).to.not.be.ok + expect(result).to.equal('status\tok\n') + done() + }) + }) + + it('returns a Promise that resolves to the result of the info query', function () { + return client.infoAny('status') + .then(result => { + expect(result).to.equal('status\tok\n') + }) + }) + }) + + describe('client.infoAll()', function () { + it('executes the info command on all cluster nodes an returns a list of results', function (done) { + client.infoAll('status', function (err, results) { + expect(err).to.not.be.ok + expect(Array.isArray(results)).to.be.true + results.forEach(function (result) { + expect(result.host).to.be.ok + expect(result.info).to.equal('status\tok\n') + }) + done() + }) + }) + + it('does not require an info command', function () { + return client.infoAll() + .then(results => + expect(Array.isArray(results)).to.be.true) + }) + + it('returns a Promise that resolves to the result of the info query', function () { + return client.infoAll('status') + .then(results => { + expect(Array.isArray(results)).to.be.true + results.forEach(result => { + expect(result.host).to.be.ok + expect(result.info).to.equal('status\tok\n') + }) + }) + }) + }) + + describe('info.parse()', function () { + it('should parse key-value pairs from an info string', function () { + const infoStr = 'version\t1\nedition\tCommunity Edition\n' + const infoHash = info.parse(infoStr) + expect(infoHash).to.eql({ version: 1, edition: 'Community Edition' }) + }) + + it('should parse nested key-value pairs', function () { + const infoStr = 'statistics\tmem=10;req=20\n' + const infoHash = info.parse(infoStr) + expect(infoHash.statistics).to.eql({ mem: 10, req: 20 }) + }) + + it('should parse list values', function () { + const infoStr = 'features\tgeo;double\n' + const infoHash = info.parse(infoStr) + expect(infoHash.features).to.eql(['geo', 'double']) + }) + + it('should parse numeric strings as numbers', function () { + const infoStr = 'version\t1' + const infoHash = info.parse(infoStr) + expect(infoHash.version).to.be.a('number') + }) + + it('should be able to handle an empty info response', function () { + const infoStr = 'foo\n' + const infoHash = info.parse(infoStr) + expect(infoHash).to.eql({ foo: undefined }) + }) + + it('should be able to handle an empty string', function () { + const infoStr = '' + const infoHash = info.parse(infoStr) + expect(infoHash).to.eql({}) + }) + + it('does not split the response if no separators are specified', function () { + info.separators['test-foo'] = [] + const infoStr = 'test-foo\ta=1;b=2\n' + const infoHash = info.parse(infoStr) + expect(infoHash['test-foo']).to.eql('a=1;b=2') + }) + + it('should parse the udf-list info key', function () { + const infoStr = 'udf-list\tfilename=mod1.lua,hash=00557374fc319b8d0f38c6668015db35358d7b62,type=LUA;filename=mod2.lua,hash=c96771bd8ce6911a22a592e4857fd47082f14990,type=LUA;' + const infoHash = info.parse(infoStr) + expect(infoHash['udf-list']).to.eql([ + { filename: 'mod1.lua', hash: '00557374fc319b8d0f38c6668015db35358d7b62', type: 'LUA' }, + { filename: 'mod2.lua', hash: 'c96771bd8ce6911a22a592e4857fd47082f14990', type: 'LUA' } + ]) + }) + + it('should parse empty udf-list info key and return empty array', function () { + const infoStr = 'udf-list\t' + const infoHash = info.parse(infoStr) + expect(infoHash['udf-list']).to.eql([]) + }) + + it('should parse the bins info key', function () { + const infoStr = 'bins\ttest:bin_names=2,bin_names_quota=32768,bin1,bin2;' + const infoHash = info.parse(infoStr) + const expected = { + test: { + names: ['bin1', 'bin2'], + stats: { bin_names: 2, bin_names_quota: 32768 } + } + } + expect(infoHash.bins).to.deep.equal(expected) + }) + + it('should pick the right separators to parse based on the key pattern', function () { + const infoStr = 'sets/test/foo/bar\tobjects=0:tombstones=0:truncate_lut=275452156000:disable-eviction=false;' + const infoHash = info.parse(infoStr) + const expected = { + objects: 0, + tombstones: 0, + truncate_lut: 275452156000, + 'disable-eviction': 'false' + } + expect(infoHash['sets/test/foo/bar']).to.deep.equal(expected) + }) + }) +}) diff --git a/ts-test/tests/key.ts b/ts-test/tests/key.ts new file mode 100644 index 000000000..0d4d69562 --- /dev/null +++ b/ts-test/tests/key.ts @@ -0,0 +1,271 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* global expect, describe, context, it */ +/* eslint-disable no-unused-expressions */ + +import Aerospike, { Key as K, status as statusModule, Client as Cli, KeyOptions} from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +const Key: typeof K = Aerospike.Key +const status: typeof statusModule = Aerospike.status + + +describe('Key #noserver', function () { + describe('constructor', function () { + + /* + context('namespace', function () { + it('rejects empty namespace', function () { + expect(function () { return new Key('', 'set', 'key') }).to.throw('Namespace must be a valid string') + }) + + it('rejects namespace that is too long', function () { + expect(function () { return new Key('abcdefghijklmnopqrstuvwxyz0123456789', 'set', 'key') }).to.throw('Namespace must be a valid string') + }) + + it('rejects null namespace', function () { + expect(function () { return new Key(null, 'set', 'key') }).to.throw('Namespace must be a valid string') + }) + + it('rejects undefined namespace', function () { + expect(function () { return new Key(undefined, 'set', 'key') }).to.throw('Namespace must be a valid string') + }) + + it('rejects namespace that is not a string', function () { + expect(function () { return new Key(1234, 'set', 'key') }).to.throw('Namespace must be a valid string') + }) + }) + */ + + context('set name', function () { + it('allows null set name', function () { + expect(new Key('ns', null, 'key')).to.be.ok + }) + + it('allows undefined set name', function () { + expect(new Key('ns', undefined, 'key')).to.be.ok + }) + /* + it('rejects empty set name', function () { + expect(function () { return new Key('ns', '', 'key') }).to.throw('Set must be a valid string') + }) + + it('rejects set name that is too long', function () { + expect(function () { return new Key('ns', 'abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz0123456789', 'key') }).to.throw('Set must be a valid string') + }) + + it('rejects set name that is not a string', function () { + expect(function () { return new Key('ns', 1234, 'key') }).to.throw('Set must be a valid string') + }) + */ + }) + + context('user key', function () { + const dummyDigest = Buffer.from([0x15, 0xc7, 0x49, 0xfd, 0x01, 0x54, 0x43, 0x8b, 0xa9, 0xd9, 0x5d, 0x0c, 0x6e, 0x27, 0x0f, 0x1a, 0x76, 0xfc, 0x31, 0x15]) + + it('allows string user key', function () { + expect(new Key('ns', 'set', 'abc')).to.be.ok + }) + + it('allows string user key of arbitrary length', function () { + expect(new Key('ns', 'set', 'abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz0123456789')).to.be.ok + }) + + it('allows integer user key', function () { + expect(new Key('ns', 'set', 1234)).to.be.ok + expect(new Key('ns', 'set', -1234)).to.be.ok + }) + + context('BigInt keys', function () { + it('allows BigInt user key', function () { + expect(new Key('ns', 'set', BigInt(42))).to.be.ok + expect(new Key('ns', 'set', BigInt(2) ** BigInt(63) - BigInt(1))).to.be.ok + expect(new Key('ns', 'set', BigInt(-2) ** BigInt(63))).to.be.ok + }) + /* + it('rejects BigInt user keys outside valid range', function () { + expect(() => new Key('ns', 'set', BigInt(2) ** BigInt(63))).to.throw(TypeError, /Invalid user key/) + expect(() => new Key('ns', 'set', BigInt(-2) ** BigInt(63) - BigInt(1))).to.throw(TypeError, /Invalid user key/) + }) + */ + }) + + it('allows byte array user key', function () { + const buf = Buffer.from([0x62, 0x75, 0x66, 0x66, 0x65, 0x72]) + expect(new Key('ns', 'set', buf)).to.be.ok + }) + + it('allows undefined user key', function () { + expect(new Key('ns', 'set', undefined, dummyDigest)).to.be.ok + }) + + it('allows null user key', function () { + expect(new Key('ns', 'set', null, dummyDigest)).to.be.ok + }) + /* + it('rejects empty string user key', function () { + expect(() => new Key('ns', 'set', '')).to.throw(TypeError, /Invalid user key/) + }) + + it('rejects empty byte array user key', function () { + expect(() => new Key('ns', 'set', Buffer.from([]))).to.throw(TypeError, /Invalid user key/) + }) + + it('rejects float user key', function () { + expect(() => new Key('ns', 'set', 3.1415)).to.throw(TypeError, /Invalid user key/) + }) + + it('rejects Object user key', function () { + expect(() => new Key('ns', 'set', { key: 'myKey' })).to.throw(TypeError, /Invalid user key/) + }) + + it('requires either key or digest', function () { + expect(() => new Key('ns', 'set')).to.throw('Either key or digest must be set') + }) + */ + }) + + context('digest', function () { + it('allows creating a new key with just the namespace and digest', function () { + const digest = Buffer.from([0x15, 0xc7, 0x49, 0xfd, 0x01, 0x54, 0x43, 0x8b, 0xa9, 0xd9, 0x5d, 0x0c, 0x6e, 0x27, 0x0f, 0x1a, 0x76, 0xfc, 0x31, 0x15]) + expect(new Key('ns', null, null, digest)).to.be.ok + }) + /* + it('rejects a digest that is not a buffer', function () { + expect(function () { return new Key('ns', null, null, 'some string') }).to.throw('Digest must be a 20-byte Buffer') + }) + + it('rejects a digest that is not the right size', function () { + expect(function () { return new Key('ns', null, null, Buffer.from([0x01])) }).to.throw('Digest must be a 20-byte Buffer') + }) + */ + }) + }) + + describe('equals', function () { + it('matches two keys with identical ns, set and user key', function () { + const key1: K = new Key('ns1', 'set1', 'key1') + const key2: K = new Key('ns1', 'set1', 'key1') + expect(key1.equals(key2)).to.be.true + expect(key2.equals(key1)).to.be.true + }) + + it('matches two keys with identical ns, set, user key and digest', function () { + const key1: K = new Key('ns1', 'set1', 'key1', Buffer.from('a1b2c3d4e5f6g7h8i9j0')) + const key2: K = new Key('ns1', 'set1', 'key1', Buffer.from('a1b2c3d4e5f6g7h8i9j0')) + expect(key1.equals(key2)).to.be.true + expect(key2.equals(key1)).to.be.true + }) + + it('matches two keys with identical ns, set and digest', function () { + const key1: K = new Key('ns1', 'set1', null, Buffer.from('a1b2c3d4e5f6g7h8i9j0')) + const key2: K = new Key('ns1', 'set1', null, Buffer.from('a1b2c3d4e5f6g7h8i9j0')) + expect(key1.equals(key2)).to.be.true + expect(key2.equals(key1)).to.be.true + }) + + it('a key with digest to another key with identical ns, set and user key but without digest', function () { + const key1: K = new Key('ns1', 'set1', 'key1', Buffer.from('a1b2c3d4e5f6g7h8i9j0')) + const key2: K = new Key('ns1', 'set1', 'key1') + expect(key1.equals(key2)).to.be.true + expect(key2.equals(key1)).to.be.true + }) + + it('matches two keys with identical ns, empty set and user key', function () { + const key1: K = new Key('ns1', null, 'key1') + const key2: K = new Key('ns1', null, 'key1') + expect(key1.equals(key2)).to.be.true + expect(key2.equals(key1)).to.be.true + }) + + it('does not match two keys with different ns', function () { + const key1: K = new Key('ns1', 'set1', 'key1') + const key2: K = new Key('ns2', 'set1', 'key1') + expect(key1.equals(key2)).to.be.false + expect(key2.equals(key1)).to.be.false + }) + + it('does not match two keys with different set', function () { + const key1: K = new Key('ns1', 'set1', 'key1') + const key2: K = new Key('ns1', 'set2', 'key1') + expect(key1.equals(key2)).to.be.false + expect(key2.equals(key1)).to.be.false + }) + + it('does not match a key with set and a key without set', function () { + const key1: K = new Key('ns1', 'set1', 'key1') + const key2: K = new Key('ns1', null, 'key1') + expect(key1.equals(key2)).to.be.false + expect(key2.equals(key1)).to.be.false + }) + + it('does not match two keys with different user keys', function () { + const key1: K = new Key('ns1', 'set1', 'key1') + const key2: K = new Key('ns1', 'set1', 'key2') + expect(key1.equals(key2)).to.be.false + expect(key2.equals(key1)).to.be.false + }) + + it('does not match a key with user key and a key without user key', function () { + const key1: K = new Key('ns1', 'set1', 'key1', Buffer.from('a1b2c3d4e5f6g7h8i9j0')) + const key2: K = new Key('ns1', 'set1', null, Buffer.from('a1b2c3d4e5f6g7h8i9j0')) + expect(key1.equals(key2)).to.be.false + expect(key2.equals(key1)).to.be.false + }) + + it('does not match two keys with different digests', function () { + const key1: K = new Key('ns1', 'set1', 'key1', Buffer.from('a1b2c3d4e5f6g7h8i9j0')) + const key2: K = new Key('ns1', 'set1', 'key1', Buffer.from('0j9i8h7g6f5e4d3c2b1a')) + expect(key1.equals(key2)).to.be.false + expect(key2.equals(key1)).to.be.false + }) + }) +}) + +context('Plain Object Keys (for backward compatibility)', function () { + const client: Cli = helper.client + + it('accepts plain objects as user keys', function (done) { + const key: KeyOptions = { ns: helper.namespace, set: helper.set, key: 1234 } + client.put(key, { foo: 'bar' }, function (err: any) { + expect(err).to.not.be.ok + done() + }) + }) + + it('returns an error for an unsupported float user key', function (done) { + const key: KeyOptions = { ns: helper.namespace, set: helper.set, key: 3.1415 } + client.put(key, { foo: 'bar' }, function (err: any) { + expect(err.code).to.equal(status.ERR_PARAM) + done() + }) + }) + + /* + it('returns an error for an invalid user key', function (done) { + const key = { ns: helper.namespace, set: helper.set, key: { a: 1, b: 2, c: 3 } } + client.put(key, { foo: 'bar' }, function (err: any) { + expect(err.code).to.equal(status.ERR_PARAM) + done() + }) + }) + */ +}) diff --git a/ts-test/tests/lists.ts b/ts-test/tests/lists.ts new file mode 100644 index 000000000..9aac97e37 --- /dev/null +++ b/ts-test/tests/lists.ts @@ -0,0 +1,1453 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* eslint-env mocha */ +/* global expect */ +/* eslint-disable no-unused-expressions */ + +import Aerospike, { AerospikeError as ASError, lists as listsModule, operations, cdt, status as statusModule, ListPolicy } from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +const AerospikeError: typeof ASError = Aerospike.AerospikeError +const lists: typeof listsModule = Aerospike.lists +const ops: typeof operations = Aerospike.operations +const Context: typeof cdt.Context = Aerospike.cdt.Context +const status: typeof statusModule = Aerospike.status + +const eql = require('deep-eql') + +const { + assertError, + assertRecordEql, + assertResultEql, + assertResultSatisfy, + cleanup, + createRecord, + expectError, + initState, + operate +} = require('./util/statefulAsyncTest') + +const orderList: Function = (binName: string, ctx?: cdt.Context) => { + const setListOrder: listsModule.ListOperation = lists.setOrder(binName, lists.order.ORDERED) + if (ctx) setListOrder.withContext(ctx) + return operate(setListOrder) +} + +describe('client.operate() - CDT List operations', function () { + helper.skipUnlessSupportsFeature(Aerospike.features.CDT_LIST, this) + + let ListOutOfBoundsError: any; + before(() => { + ListOutOfBoundsError = helper.cluster.isVersionInRange('>=4.6.0') + ? status.ERR_OP_NOT_APPLICABLE + : status.ERR_REQUEST_INVALID + }) + + describe('lists.setOrder', function () { + it('changes the list order', function () { + return initState() + .then(createRecord({ list: [3, 1, 2] })) + .then(operate([ + lists.setOrder('list', lists.order.ORDERED), + ops.read('list') + ])) + .then(assertResultEql({ list: [1, 2, 3] })) + .then(cleanup()) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('changes the order of a nested list', function () { + return initState() + .then(createRecord({ list: [[3, 1, 2], [6, 5, 4]] })) + .then(operate([ + lists.setOrder('list', lists.order.ORDERED).withContext((ctx: any) => ctx.addListIndex(0)), + lists.setOrder('list', lists.order.ORDERED).withContext((ctx: any) => ctx.addListIndex(1)), + ops.read('list') + ])) + .then(assertResultEql({ list: [[1, 2, 3], [4, 5, 6]] })) + .then(cleanup()) + }) + }) + }) + + describe('lists.sort', function () { + it('sorts the list', function () { + return initState() + .then(createRecord({ list: [3, 1, 2, 1] })) + .then(operate([ + lists.sort('list', lists.sortFlags.DEFAULT), + ops.read('list') + ])) + .then(assertResultEql({ list: [1, 1, 2, 3] })) + .then(cleanup()) + }) + + context('with DROP_DUPLICATES flag', function () { + it('sorts the list and drops duplicates', function () { + return initState() + .then(createRecord({ list: [3, 1, 2, 1] })) + .then(operate([ + lists.sort('list', lists.sortFlags.DROP_DUPLICATES), + ops.read('list') + ])) + .then(assertResultEql({ list: [1, 2, 3] })) + .then(cleanup()) + }) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('sorts a nested list', function () { + return initState() + .then(createRecord({ list: [['a', 'b', 'c'], [3, 1, 2, 1]] })) + .then(operate([ + lists.sort('list', lists.sortFlags.DEFAULT).withContext((ctx: any) => ctx.addListIndex(-1)), + ops.read('list') + ])) + .then(assertResultEql({ list: [['a', 'b', 'c'], [1, 1, 2, 3]] })) + .then(cleanup()) + }) + }) + }) + + describe('lists.append', function () { + it('appends an item to the list and returns the list size', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.append('list', 99))) + .then(assertResultEql({ list: 6 })) + .then(assertRecordEql({ list: [1, 2, 3, 4, 5, 99] })) + .then(cleanup) + }) + + context('with add-unique flag', function () { + const policy: ListPolicy = { + writeFlags: lists.writeFlags.ADD_UNIQUE + } + + it('returns an error when trying to append a non-unique element', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(expectError()) + .then(operate(lists.append('list', 3, policy))) + .then(assertError(status.ERR_FAIL_ELEMENT_EXISTS)) + .then(cleanup) + }) + + context('with no-fail flag', function () { + helper.skipUnlessVersion('>= 4.3.0', this) + + const policy: ListPolicy = { + writeFlags: lists.writeFlags.ADD_UNIQUE | lists.writeFlags.NO_FAIL + } + + it('returns an error when trying to append a non-unique element', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.append('list', 3, policy))) + .then(assertResultEql({ list: 5 })) + .then(assertRecordEql({ list: [1, 2, 3, 4, 5] })) + .then(cleanup) + }) + }) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('appends a value to a nested list', function () { + return initState() + .then(createRecord({ list: [1, 2, ['a', 'b', 'c'], 4, 5] })) + .then(operate(lists.append('list', 'd').withContext((ctx: any) => ctx.addListIndex(2)))) + .then(assertResultEql({ list: 4 })) + .then(assertRecordEql({ list: [1, 2, ['a', 'b', 'c', 'd'], 4, 5] })) + .then(cleanup) + }) + }) + }) + + describe('lists.appendItems', function () { + it('appends the items to the list and returns the list size', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.appendItems('list', [99, 100]))) + .then(assertResultEql({ list: 7 })) + .then(assertRecordEql({ list: [1, 2, 3, 4, 5, 99, 100] })) + .then(cleanup) + }) + /* + it('returns an error if the value to append is not an array', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(expectError()) + .then(operate(lists.appendItems('list', 99))) + .then(assertError(status.ERR_PARAM)) + .then(cleanup) + }) + */ + context('with add-unique flag', function () { + const policy: ListPolicy = { + writeFlags: lists.writeFlags.ADD_UNIQUE + } + + it('returns an error when appending duplicate items', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(expectError()) + .then(operate(lists.appendItems('list', [3, 6], policy))) + .then(assertError(status.ERR_FAIL_ELEMENT_EXISTS)) + .then(cleanup) + }) + + context('with no-fail flag', function () { + helper.skipUnlessVersion('>= 4.3.0', this) + + const policy: ListPolicy = { + writeFlags: lists.writeFlags.ADD_UNIQUE | lists.writeFlags.NO_FAIL + } + + it('does not append any items but returns ok', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.appendItems('list', [3, 6], policy))) + .then(assertResultEql({ list: 5 })) + .then(assertRecordEql({ list: [1, 2, 3, 4, 5] })) + .then(cleanup) + }) + + context('with partial flag', function () { + const policy: ListPolicy = { + writeFlags: lists.writeFlags.ADD_UNIQUE | lists.writeFlags.NO_FAIL | lists.writeFlags.PARTIAL + } + + it('appends only the unique items', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.appendItems('list', [3, 6], policy))) + .then(assertResultEql({ list: 6 })) + .then(assertRecordEql({ list: [1, 2, 3, 4, 5, 6] })) + .then(cleanup) + }) + }) + }) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('appends the items to a nested list', function () { + return initState() + .then(createRecord({ map: { list: [1, 2, 3, 4, 5] } })) + .then(operate(lists.appendItems('map', [99, 100]).withContext((ctx: any) => ctx.addMapKey('list')))) + .then(assertResultEql({ map: 7 })) + .then(assertRecordEql({ map: { list: [1, 2, 3, 4, 5, 99, 100] } })) + .then(cleanup) + }) + }) + }) + + describe('lists.insert', function () { + it('inserts the item at the specified index and returns the list size', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.insert('list', 2, 99))) + .then(assertResultEql({ list: 6 })) + .then(assertRecordEql({ list: [1, 2, 99, 3, 4, 5] })) + .then(cleanup) + }) + + context('with add-unique flag', function () { + const policy: ListPolicy = { + writeFlags: lists.writeFlags.ADD_UNIQUE + } + + it('returns an error when trying to insert a non-unique element', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(expectError()) + .then(operate(lists.insert('list', 2, 3, policy))) + .then(assertError(status.ERR_FAIL_ELEMENT_EXISTS)) + .then(cleanup) + }) + + context('with no-fail flag', function () { + helper.skipUnlessVersion('>= 4.3.0', this) + + const policy: ListPolicy = { + writeFlags: lists.writeFlags.ADD_UNIQUE | lists.writeFlags.NO_FAIL + } + + it('does not insert the item but returns ok', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.insert('list', 2, 3, policy))) + .then(assertResultEql({ list: 5 })) + .then(assertRecordEql({ list: [1, 2, 3, 4, 5] })) + .then(cleanup) + }) + }) + }) + + context('with insert-bounded flag', function () { + helper.skipUnlessVersion('>= 4.3.0', this) + + const policy: ListPolicy = new Aerospike.ListPolicy({ + writeFlags: lists.writeFlags.INSERT_BOUNDED + }) + + it('returns an error when trying to insert an item outside the current bounds of the list', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(expectError()) + .then(operate(lists.insert('list', 10, 99, policy))) + .then(assertError(ListOutOfBoundsError)) + .then(cleanup) + }) + + context('with no-fail flag', function () { + const policy: ListPolicy = new Aerospike.ListPolicy({ + writeFlags: lists.writeFlags.INSERT_BOUNDED | lists.writeFlags.NO_FAIL + }) + + it('does not insert an item outside bounds, but returns ok', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.insert('list', 10, 99, policy))) + .then(assertResultEql({ list: 5 })) + .then(assertRecordEql({ list: [1, 2, 3, 4, 5] })) + .then(cleanup) + }) + }) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('inserts the item at the specified index of a nested list', function () { + return initState() + .then(createRecord({ map: { list: [1, 2, 3, 4, 5] } })) + .then(operate(lists.insert('map', 2, 99).withContext((ctx: any) => ctx.addMapKey('list')))) + .then(assertResultEql({ map: 6 })) + .then(assertRecordEql({ map: { list: [1, 2, 99, 3, 4, 5] } })) + .then(cleanup) + }) + }) + }) + + describe('lists.insertItems', function () { + it('inserts the items at the specified index and returns the list size', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.insertItems('list', 2, [99, 100]))) + .then(assertResultEql({ list: 7 })) + .then(assertRecordEql({ list: [1, 2, 99, 100, 3, 4, 5] })) + .then(cleanup) + }) + /* + it('returns an error if the value to insert is not an array', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(expectError()) + .then(operate(lists.insertItems('list', 2, 99))) + .then(assertError(status.ERR_PARAM)) + .then(cleanup) + }) + */ + + context('with add-unique flag', function () { + const policy: ListPolicy = { + writeFlags: lists.writeFlags.ADD_UNIQUE + } + + it('returns an error when trying to insert items that already exist in the list', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(expectError()) + .then(operate(lists.insertItems('list', 2, [3, 99], policy))) + .then(assertError(status.ERR_FAIL_ELEMENT_EXISTS)) + .then(cleanup) + }) + + context('with no-fail flag', function () { + helper.skipUnlessVersion('>= 4.3.0', this) + + const policy: ListPolicy = { + writeFlags: lists.writeFlags.ADD_UNIQUE | lists.writeFlags.NO_FAIL + } + + it('does not insert any items but returns ok', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.insertItems('list', 2, [3, 99], policy))) + .then(assertResultEql({ list: 5 })) + .then(assertRecordEql({ list: [1, 2, 3, 4, 5] })) + .then(cleanup) + }) + + context('with partial flag', function () { + const policy: ListPolicy = { + writeFlags: lists.writeFlags.ADD_UNIQUE | lists.writeFlags.NO_FAIL | lists.writeFlags.PARTIAL + } + + it('inserts only the unique items', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.insertItems('list', 2, [3, 99], policy))) + .then(assertResultEql({ list: 6 })) + .then(assertRecordEql({ list: [1, 2, 99, 3, 4, 5] })) + .then(cleanup) + }) + }) + }) + }) + + context('with insert-bounded flag', function () { + helper.skipUnlessVersion('>= 4.3.0', this) + + const policy: ListPolicy = new Aerospike.ListPolicy({ + writeFlags: lists.writeFlags.INSERT_BOUNDED + }) + + it('returns an error when trying to insert items outside the current bounds of the list', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(expectError()) + .then(operate(lists.insertItems('list', 10, [99, 100], policy))) + .then(assertError(ListOutOfBoundsError)) + .then(cleanup) + }) + + context('with no-fail flag', function () { + const policy: ListPolicy = new Aerospike.ListPolicy({ + writeFlags: lists.writeFlags.INSERT_BOUNDED | lists.writeFlags.NO_FAIL + }) + + it('does not insert the items outside bounds, but returns ok', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.insertItems('list', 10, [99, 100], policy))) + .then(assertResultEql({ list: 5 })) + .then(assertRecordEql({ list: [1, 2, 3, 4, 5] })) + .then(cleanup) + }) + }) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('inserts the items at the specified index of a nested list', function () { + return initState() + .then(createRecord({ map: { list: [1, 2, 3, 4, 5] } })) + .then(operate(lists.insertItems('map', 2, [99, 100]).withContext((ctx: any) => ctx.addMapKey('list')))) + .then(assertResultEql({ map: 7 })) + .then(assertRecordEql({ map: { list: [1, 2, 99, 100, 3, 4, 5] } })) + .then(cleanup) + }) + }) + }) + + describe('lists.pop', function () { + it('removes the item at the specified index and returns it', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.pop('list', 2))) + .then(assertResultEql({ list: 3 })) + .then(assertRecordEql({ list: [1, 2, 4, 5] })) + .then(cleanup) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('removes the item at the specified index and returns it', function () { + return initState() + .then(createRecord({ list: [[1, 2, 3, 4, 5], [6, 7, 8]] })) + .then(operate(lists.pop('list', 2).withContext((ctx: any) => ctx.addListIndex(0)))) + .then(assertResultEql({ list: 3 })) + .then(assertRecordEql({ list: [[1, 2, 4, 5], [6, 7, 8]] })) + .then(cleanup) + }) + }) + }) + + describe('lists.popRange', function () { + it('removes the items at the specified range and returns them', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.popRange('list', 2, 2))) + .then(assertResultEql({ list: [3, 4] })) + .then(assertRecordEql({ list: [1, 2, 5] })) + .then(cleanup) + }) + + it('removes and returns all items starting from the specified index if count is not specified', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.popRange('list', 2))) + .then(assertResultEql({ list: [3, 4, 5] })) + .then(assertRecordEql({ list: [1, 2] })) + .then(cleanup) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('removes the items in the specified range and returns them', function () { + return initState() + .then(createRecord({ list: [[1, 2, 3, 4, 5], [6, 7, 8]] })) + .then(operate(lists.popRange('list', 2).withContext((ctx: any) => ctx.addListIndex(1)))) + .then(assertResultEql({ list: [8] })) + .then(assertRecordEql({ list: [[1, 2, 3, 4, 5], [6, 7]] })) + .then(cleanup) + }) + }) + }) + + describe('lists.remove', function () { + it('removes the item at the specified index and returns the number of items removed', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.remove('list', 2))) + .then(assertResultEql({ list: 1 })) + .then(assertRecordEql({ list: [1, 2, 4, 5] })) + .then(cleanup) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('removes the item at the specified index', function () { + return initState() + .then(createRecord({ list: [[1, 2, 3, 4, 5], [6, 7, 8]] })) + .then(operate(lists.remove('list', 2).withContext((ctx: any) => ctx.addListIndex(1)))) + .then(assertResultEql({ list: 1 })) + .then(assertRecordEql({ list: [[1, 2, 3, 4, 5], [6, 7]] })) + .then(cleanup) + }) + }) + }) + + describe('lists.removeRange', function () { + it('removes the items in the specified range and returns the number of items removed', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.removeRange('list', 2, 2))) + .then(assertResultEql({ list: 2 })) + .then(assertRecordEql({ list: [1, 2, 5] })) + .then(cleanup) + }) + + it('removes all items starting from the specified index and returns the number of items removed if count is not specified', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.removeRange('list', 2))) + .then(assertResultEql({ list: 3 })) + .then(assertRecordEql({ list: [1, 2] })) + .then(cleanup) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('removes the item at the specified range', function () { + return initState() + .then(createRecord({ list: [[1, 2, 3, 4, 5], [6, 7, 8]] })) + .then(operate(lists.removeRange('list', 1, 3).withContext((ctx: any) => ctx.addListIndex(0)))) + .then(assertResultEql({ list: 3 })) + .then(assertRecordEql({ list: [[1, 5], [6, 7, 8]] })) + .then(cleanup) + }) + }) + }) + + describe('lists.removeByIndex', function () { + context('returnType=VALUE', function () { + it('removes the item at the specified index and returns the value', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.removeByIndex('list', 2).andReturn(lists.returnType.VALUE))) + .then(assertResultEql({ list: 3 })) + .then(assertRecordEql({ list: [1, 2, 4, 5] })) + .then(cleanup) + }) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('removes the item at the specified index', function () { + return initState() + .then(createRecord({ map: { list: [1, 2, 3, 4, 5] } })) + .then(operate(lists.removeByIndex('map', 2).withContext((ctx: any) => ctx.addMapKey('list')))) + .then(assertRecordEql({ map: { list: [1, 2, 4, 5] } })) + .then(cleanup) + }) + }) + }) + + describe('lists.removeByIndexRange', function () { + context('returnType=VALUE', function () { + it('removes the items in the specified range and returns the values', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.removeByIndexRange('list', 2, 2).andReturn(lists.returnType.VALUE))) + .then(assertResultEql({ list: [3, 4] })) + .then(assertRecordEql({ list: [1, 2, 5] })) + .then(cleanup) + }) + + it('removes the items starting from the specified index and returns the values', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.removeByIndexRange('list', 2).andReturn(lists.returnType.VALUE))) + .then(assertResultEql({ list: [3, 4, 5] })) + .then(assertRecordEql({ list: [1, 2] })) + .then(cleanup) + }) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('removes the item int the specified range', function () { + return initState() + .then(createRecord({ map: { list: [1, 2, 3, 4, 5] } })) + .then(operate(lists.removeByIndexRange('map', 1, 3).withContext((ctx: any) => ctx.addMapKey('list')))) + .then(assertRecordEql({ map: { list: [1, 5] } })) + .then(cleanup) + }) + }) + }) + + describe('lists.removeByValue', function () { + context('returnType=INDEX', function () { + it('removes all items with the specified value and returns the indexes', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 1, 2, 3] })) + .then(operate(lists.removeByValue('list', 3).andReturn(lists.returnType.INDEX))) + .then(assertResultEql({ list: [2, 5] })) + .then(assertRecordEql({ list: [1, 2, 1, 2] })) + .then(cleanup) + }) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('removes all items with the specified value', function () { + return initState() + .then(createRecord({ list: [[3, 2, 1], [1, 2, 3, 1, 2, 3]] })) + .then(operate(lists.removeByValue('list', 3).withContext((ctx: any) => ctx.addListValue([3, 2, 1])))) + .then(assertRecordEql({ list: [[2, 1], [1, 2, 3, 1, 2, 3]] })) + .then(cleanup) + }) + }) + }) + + describe('lists.removeByValueList', function () { + context('returnType=INDEX', function () { + it('removes all items with the specified values and returns the indexes', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 1, 2, 3] })) + .then(operate(lists.removeByValueList('list', [1, 3]).andReturn(lists.returnType.INDEX))) + .then(assertResultEql({ list: [0, 2, 3, 5] })) + .then(assertRecordEql({ list: [2, 2] })) + .then(cleanup) + }) + }) + + context('invert results', function () { + it('removes all items except with the specified values', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 1, 2, 3] })) + .then(operate(lists.removeByValueList('list', [1, 3]).invertSelection())) + .then(assertRecordEql({ list: [1, 3, 1, 3] })) + .then(cleanup) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('removes all items except with the specified values', function () { + return initState() + .then(createRecord({ list: [[3, 2, 1], [1, 2, 3, 1, 2, 3]] })) + .then(operate( + lists + .removeByValueList('list', [1, 4]) + .withContext((ctx: any) => ctx.addListIndex(-1)) + .invertSelection() + )) + .then(assertRecordEql({ list: [[3, 2, 1], [1, 1]] })) + .then(cleanup) + }) + }) + }) + }) + + describe('lists.removeByValueRange', function () { + context('returnType=INDEX', function () { + it('removes all items in the specified range of values and returns the indexes', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.removeByValueRange('list', 2, 5).andReturn(lists.returnType.INDEX))) + .then(assertResultEql({ list: [1, 2, 3] })) + .then(assertRecordEql({ list: [1, 5] })) + .then(cleanup) + }) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('removes all items in the specified range of values', function () { + return initState() + .then(createRecord({ list: [[1, 2, 3, 4, 5], [6, 7, 8]] })) + .then(operate(lists.removeByValueRange('list', 2, 5).withContext((ctx: any) => ctx.addListIndex(0)))) + .then(assertRecordEql({ list: [[1, 5], [6, 7, 8]] })) + .then(cleanup) + }) + }) + }) + + describe('lists.removeByValueRelRankRange', function () { + helper.skipUnlessVersion('>= 4.3.0', this) + + context('with count', function () { + it('removes all items nearest to value and greater, by relative rank', function () { + return initState() + .then(createRecord({ list: [0, 4, 5, 9, 11, 15] })) + .then(orderList('list')) + .then(operate(lists.removeByValueRelRankRange('list', 5, 0, 2).andReturn(lists.returnType.VALUE))) + .then(assertResultEql({ list: [5, 9] })) + .then(assertRecordEql({ list: [0, 4, 11, 15] })) + .then(cleanup) + }) + }) + + context('without count', function () { + it('removes all items nearest to value and greater, by relative rank', function () { + return initState() + .then(createRecord({ list: [0, 4, 5, 9, 11, 15] })) + .then(orderList('list')) + .then(operate(lists.removeByValueRelRankRange('list', 5, 0).andReturn(lists.returnType.VALUE))) + .then(assertResultEql({ list: [5, 9, 11, 15] })) + .then(assertRecordEql({ list: [0, 4] })) + .then(cleanup) + }) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('removes all items nearest to value and greater, by relative rank', function () { + const listContext: cdt.Context = new Context().addMapKey('list') + return initState() + .then(createRecord({ map: { list: [0, 4, 5, 9, 11, 15] } })) + .then(orderList('map', listContext)) + .then(operate(lists.removeByValueRelRankRange('map', 5, 0, 2).withContext(listContext))) + .then(assertRecordEql({ map: { list: [0, 4, 11, 15] } })) + .then(cleanup) + }) + }) + }) + + describe('lists.removeByRank', function () { + context('returnType=VALUE', function () { + it('removes the item with the specified list rank and returns the value', function () { + return initState() + .then(createRecord({ list: [3, 1, 2, 4] })) + .then(operate(lists.removeByRank('list', 1).andReturn(lists.returnType.VALUE))) + .then(assertResultEql({ list: 2 })) + .then(assertRecordEql({ list: [3, 1, 4] })) + .then(cleanup) + }) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('removes the item with the specified list rank', function () { + return initState() + .then(createRecord({ list: [[2, 3, 1, 4], [3, 1, 2, 4]] })) + .then(operate(lists.removeByRank('list', 1).withContext((ctx: any) => ctx.addListIndex(1)))) + .then(assertRecordEql({ list: [[2, 3, 1, 4], [3, 1, 4]] })) + .then(cleanup) + }) + }) + }) + + describe('lists.removeByRankRange', function () { + context('returnType=VALUE', function () { + it('removes the item with the specified list rank and returns the value', function () { + return initState() + .then(createRecord({ list: [3, 1, 2, 5, 4] })) + .then(operate(lists.removeByRankRange('list', 1, 3).andReturn(lists.returnType.VALUE))) + .then(assertResultSatisfy((result: any) => eql(result.list.sort(), [2, 3, 4]))) + .then(assertRecordEql({ list: [1, 5] })) + .then(cleanup) + }) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('removes the item with the specified list rank', function () { + return initState() + .then(createRecord({ list: [[3, 1, 2, 5, 4], [1, 2, 3]] })) + .then(operate(lists.removeByRankRange('list', 1, 3).withContext((ctx: any) => ctx.addListIndex(0)))) + .then(assertRecordEql({ list: [[1, 5], [1, 2, 3]] })) + .then(cleanup) + }) + }) + }) + + describe('lists.clear', function () { + it('removes all elements from the list', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.clear('list'))) + .then(assertRecordEql({ list: [] })) + .then(cleanup) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('removes all elements from the list', function () { + return initState() + .then(createRecord({ map: { list: [1, 2, 3, 4, 5] } })) + .then(operate(lists.clear('map').withContext((ctx: any) => ctx.addMapKey('list')))) + .then(assertRecordEql({ map: { list: [] } })) + .then(cleanup) + }) + }) + }) + + describe('lists.create', function () { + it('creates a new list', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.create('emptyList', lists.order.ORDERED))) + .then(assertRecordEql({ list: [1, 2, 3, 4, 5], emptyList: [] })) + .then(cleanup) + }) + + it('creates a new list with persist index true', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.create('emptyList', lists.order.ORDERED, false, true))) + .then(assertRecordEql({ list: [1, 2, 3, 4, 5], emptyList: [] })) + .then(cleanup) + }) + + it('creates a new list with persist index true and pad true', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.create('emptyList', lists.order.ORDERED, true, true))) + .then(assertRecordEql({ list: [1, 2, 3, 4, 5], emptyList: [] })) + .then(cleanup) + }) + + it('creates a new list with pad true', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.create('emptyList', lists.order.ORDERED, true, false))) + .then(assertRecordEql({ list: [1, 2, 3, 4, 5], emptyList: [] })) + .then(cleanup) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('creates a new list within a map', function () { + return initState() + .then(createRecord({ map: { c: 1, b: 2, a: 3 } })) + .then(operate(lists.create('map', lists.order.ORDERED).withContext((ctx: any) => ctx.addMapKeyCreate('nested')))) + .then(assertRecordEql({ map: { c: 1, b: 2, a: 3, nested: [] } })) + .then(cleanup) + }) + + it('creates a new list within a list', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.create('list', lists.order.UNORDERED, true, false).withContext((ctx: any) => ctx.addListIndex(10)))) + .then(assertRecordEql({ list: [1, 2, 3, 4, 5, null, null, null, null, null, []] })) + .then(cleanup) + }) + + it('creates a new list within a list', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.create('list', lists.order.UNORDERED, true, true).withContext((ctx: any) => ctx.addListIndex(10)))) + .then(assertRecordEql({ list: [1, 2, 3, 4, 5, null, null, null, null, null, []] })) + .then(cleanup) + }) + }) + }) + + describe('lists.set', function () { + it('sets the item at the specified index', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.set('list', 2, 99))) + .then(assertRecordEql({ list: [1, 2, 99, 4, 5] })) + .then(cleanup) + }) + + context('with add-unique flag', function () { + const policy: ListPolicy = { + writeFlags: lists.writeFlags.ADD_UNIQUE + } + + it('fails with an error if the value already exists in the list', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(expectError()) + .then(operate(lists.set('list', 2, 5, policy))) + .then(assertError(status.ERR_FAIL_ELEMENT_EXISTS)) + .then(assertRecordEql({ list: [1, 2, 3, 4, 5] })) + .then(cleanup) + }) + + context('with no-fail flag', function () { + helper.skipUnlessVersion('>= 4.3.0', this) + + const policy: ListPolicy = { + writeFlags: lists.writeFlags.ADD_UNIQUE | lists.writeFlags.NO_FAIL + } + + it('does not set the value but returns ok', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.set('list', 2, 5, policy))) + .then(assertRecordEql({ list: [1, 2, 3, 4, 5] })) + .then(cleanup) + }) + }) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('sets the item at the specified index', function () { + return initState() + .then(createRecord({ map: { list: [1, 2, 3, 4, 5] } })) + .then(operate(lists.set('map', 2, 99).withContext((ctx: any) => ctx.addMapKey('list')))) + .then(assertRecordEql({ map: { list: [1, 2, 99, 4, 5] } })) + .then(cleanup) + }) + }) + }) + + describe('lists.trim', function () { + it('removes all elements not within the specified range and returns the number of elements removed', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.trim('list', 1, 3))) + .then(assertResultEql({ list: 2 })) + .then(assertRecordEql({ list: [2, 3, 4] })) + .then(cleanup) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('removes all elements not within the specified range', function () { + return initState() + .then(createRecord({ list: [['a', 'b', 'c'], [1, 2, 3, 4, 5]] })) + .then(operate(lists.trim('list', 1, 3).withContext((ctx: any) => ctx.addListValue([1, 2, 3, 4, 5])))) + .then(assertResultEql({ list: 2 })) + .then(assertRecordEql({ list: [['a', 'b', 'c'], [2, 3, 4]] })) + .then(cleanup) + }) + }) + }) + + describe('lists.get', function () { + it('returns the item at the specified index', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.get('list', 2))) + .then(assertResultEql({ list: 3 })) + .then(cleanup) + }) + + it('should return an error if the index is out of bounds', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(expectError()) + .then(operate(lists.get('list', 99))) + .then(assertError(ListOutOfBoundsError)) + .then(cleanup) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('returns the item at the specified index', function () { + return initState() + .then(createRecord({ list: [['a', 'b', 'c'], [1, 2, 3, 4, 5]] })) + .then(operate(lists.get('list', 2).withContext((ctx: any) => ctx.addListIndex(1)))) + .then(assertResultEql({ list: 3 })) + .then(cleanup) + }) + }) + }) + + describe('lists.getRange', function () { + it('returns the items in the specified range', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.getRange('list', 1, 3))) + .then(assertResultEql({ list: [2, 3, 4] })) + .then(cleanup) + }) + + it('returns all items starting at the specified index if count is not specified', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.getRange('list', 1))) + .then(assertResultEql({ list: [2, 3, 4, 5] })) + .then(cleanup) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('returns the items in the specified range', function () { + return initState() + .then(createRecord({ map: { list: [1, 2, 3, 4, 5] } })) + .then(operate(lists.getRange('map', 1, 3).withContext((ctx: any) => ctx.addMapKey('list')))) + .then(assertResultEql({ map: [2, 3, 4] })) + .then(cleanup) + }) + }) + }) + + describe('lists.getByIndex', function () { + context('returnType=VALUE', function () { + it('fetches the item at the specified index and returns its value', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.getByIndex('list', 2).andReturn(lists.returnType.VALUE))) + .then(assertResultEql({ list: 3 })) + .then(cleanup) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('fetches the item at the specified index and returns its value', function () { + return initState() + .then(createRecord({ list: [['a', 'b', 'c'], [1, 2, 3, 4, 5]] })) + .then(operate( + lists + .getByIndex('list', 2) + .withContext((ctx: any) => ctx.addListIndex(1)) + .andReturn(lists.returnType.VALUE) + )) + .then(assertResultEql({ list: 3 })) + .then(cleanup) + }) + }) + }) + }) + + describe('lists.getByIndexRange', function () { + context('returnType=VALUE', function () { + it('fetches the items in the specified range and returns the values', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.getByIndexRange('list', 2, 2).andReturn(lists.returnType.VALUE))) + .then(assertResultEql({ list: [3, 4] })) + .then(cleanup) + }) + + it('fetches the items starting from the specified index and returns the values', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.getByIndexRange('list', 2).andReturn(lists.returnType.VALUE))) + .then(assertResultEql({ list: [3, 4, 5] })) + .then(cleanup) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('fetches the items in the specified range and returns the values', function () { + return initState() + .then(createRecord({ map: { list: [1, 2, 3, 4, 5] } })) + .then(operate( + lists + .getByIndexRange('map', 2, 2) + .withContext((ctx: any) => ctx.addMapKey('list')) + .andReturn(lists.returnType.VALUE) + )) + .then(assertResultEql({ map: [3, 4] })) + .then(cleanup) + }) + }) + }) + }) + + describe('lists.getByValue', function () { + context('returnType=INDEX', function () { + it('fetches all items with the specified value and returns the indexes', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 1, 2, 3] })) + .then(operate(lists.getByValue('list', 3).andReturn(lists.returnType.INDEX))) + .then(assertResultEql({ list: [2, 5] })) + .then(cleanup) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('fetches all items with the specified value and returns the indexes', function () { + return initState() + .then(createRecord({ list: [['a', 'b', 'c'], [1, 2, 3, 1, 2, 3]] })) + .then(operate( + lists + .getByValue('list', 3) + .withContext((ctx: any) => ctx.addListIndex(1)) + .andReturn(lists.returnType.INDEX) + )) + .then(assertResultEql({ list: [2, 5] })) + .then(cleanup) + }) + }) + }) + }) + + describe('lists.getByValueList', function () { + context('returnType=INDEX', function () { + it('fetches all items with the specified values and returns the indexes', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 1, 2, 3] })) + .then(operate(lists.getByValueList('list', [1, 3]).andReturn(lists.returnType.INDEX))) + .then(assertResultEql({ list: [0, 2, 3, 5] })) + .then(cleanup) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('fetches all items with the specified values and returns the indexes', function () { + return initState() + .then(createRecord({ list: [['a', 'b', 'c'], [1, 2, 3, 1, 2, 3]] })) + .then(operate( + lists + .getByValueList('list', [1, 3]) + .withContext((ctx: any) => ctx.addListIndex(1)) + .andReturn(lists.returnType.INDEX) + )) + .then(assertResultEql({ list: [0, 2, 3, 5] })) + .then(cleanup) + }) + }) + }) + + context('returnType=EXISTS', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('fetches all items with the specified values and returns the indexes', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 1, 2, 3] })) + .then(operate(lists.getByValueList('list', [1, 3, 5]).andReturn(lists.returnType.EXISTS))) + .then(assertResultEql({ list: true })) + .then(cleanup) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('fetches all items with the specified values and returns the indexes', function () { + return initState() + .then(createRecord({ list: [['a', 'b', 'c'], [1, 2, 3, 1, 2, 3]] })) + .then(operate( + lists + .getByValueList('list', [7, 5, 4]) + .withContext((ctx: any) => ctx.addListIndex(1)) + .andReturn(lists.returnType.EXISTS) + )) + .then(assertResultEql({ list: false })) + .then(cleanup) + }) + }) + }) + + context('invert results', function () { + it('fetches all items except with the specified values', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 1, 2, 3] })) + .then(operate(lists.getByValueList('list', [1, 3]).invertSelection().andReturn(lists.returnType.INDEX))) + .then(assertResultEql({ list: [1, 4] })) + .then(cleanup) + }) + }) + }) + + describe('lists.getByValueRange', function () { + context('returnType=INDEX', function () { + it('fetches all items in the specified range of values and returns the indexes', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.getByValueRange('list', 2, 5).andReturn(lists.returnType.INDEX))) + .then(assertResultEql({ list: [1, 2, 3] })) + .then(cleanup) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('fetches all items in the specified range of values and returns the indexes', function () { + return initState() + .then(createRecord({ map: { list: [1, 2, 3, 4, 5] } })) + .then(operate( + lists + .getByValueRange('map', 2, 5) + .withContext((ctx: any) => ctx.addMapKey('list')) + .andReturn(lists.returnType.INDEX) + )) + .then(assertResultEql({ map: [1, 2, 3] })) + .then(cleanup) + }) + }) + }) + }) + + describe('lists.getByValueRelRankRange', function () { + helper.skipUnlessVersion('>= 4.3.0', this) + + context('with count', function () { + it('fetches all items nearest to value and greater, by relative rank', function () { + return initState() + .then(createRecord({ list: [0, 4, 5, 9, 11, 15] })) + .then(orderList('list')) + .then(operate(lists.getByValueRelRankRange('list', 5, 0, 2).andReturn(lists.returnType.VALUE))) + .then(assertResultEql({ list: [5, 9] })) + .then(cleanup) + }) + }) + context('without count', function () { + it('fetches all items nearest to value and greater, by relative rank', function () { + return initState() + .then(createRecord({ list: [0, 4, 5, 9, 11, 15] })) + .then(orderList('list')) + .then(operate(lists.getByValueRelRankRange('list', 5, 0).andReturn(lists.returnType.VALUE))) + .then(assertResultEql({ list: [5, 9, 11, 15] })) + .then(cleanup) + }) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('fetches all items nearest to value and greater, by relative rank', function () { + const listContext: cdt.Context = new Context().addMapKey('list') + return initState() + .then(createRecord({ map: { list: [0, 4, 5, 9, 11, 15] } })) + .then(orderList('map', listContext)) + .then(operate( + lists + .getByValueRelRankRange('map', 5, 0, 2) + .withContext(listContext) + .andReturn(lists.returnType.VALUE) + )) + .then(assertResultEql({ map: [5, 9] })) + .then(cleanup) + }) + }) + }) + + describe('lists.getByRank', function () { + context('returnType=VALUE', function () { + it('fetches the item with the specified list rank and returns the value', function () { + return initState() + .then(createRecord({ list: [3, 1, 2, 4] })) + .then(operate(lists.getByRank('list', 1).andReturn(lists.returnType.VALUE))) + .then(assertResultEql({ list: 2 })) + .then(cleanup) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('fetches the item with the specified list rank and returns the value', function () { + return initState() + .then(createRecord({ list: [[3, 1, 2, 4], ['a', 'b', 'c']] })) + .then(operate( + lists + .getByRank('list', 1) + .withContext((ctx: any) => ctx.addListIndex(0)) + .andReturn(lists.returnType.VALUE) + )) + .then(assertResultEql({ list: 2 })) + .then(cleanup) + }) + }) + }) + }) + + describe('lists.getByRankRange', function () { + context('returnType=VALUE', function () { + it('fetches the item with the specified list rank and returns the value', function () { + return initState() + .then(createRecord({ list: [3, 1, 2, 5, 4] })) + .then(operate(lists.getByRankRange('list', 1, 3).andReturn(lists.returnType.VALUE))) + .then(assertResultSatisfy((result: any) => eql(result.list.sort(), [2, 3, 4]))) + .then(cleanup) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('fetches the item with the specified list rank and returns the value', function () { + return initState() + .then(createRecord({ list: [[3, 1, 2, 5, 4], ['a', 'b', 'c']] })) + .then(operate( + lists + .getByRankRange('list', 1, 3) + .withContext((ctx: any) => ctx.addListIndex(0)) + .andReturn(lists.returnType.VALUE) + )) + .then(assertResultSatisfy((result: any) => eql(result.list.sort(), [2, 3, 4]))) + .then(cleanup) + }) + }) + }) + }) + + describe('lists.increment', function () { + helper.skipUnlessVersion('>= 3.15.0', this) + + it('increments the element at the specified index and returns the final value', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.increment('list', 1, 3))) + .then(assertResultEql({ list: 5 })) + .then(assertRecordEql({ list: [1, 5, 3, 4, 5] })) + .then(cleanup) + }) + + it('increments the element at the specified index by one and returns the final value', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.increment('list', 2))) + .then(assertResultEql({ list: 4 })) + .then(assertRecordEql({ list: [1, 2, 4, 4, 5] })) + .then(cleanup) + }) + + context('ordered lists', function () { + it('reorders the list with the incremented value', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(orderList('list')) + .then(operate(lists.increment('list', 2, 10))) + .then(assertResultEql({ list: 13 })) + .then(assertRecordEql({ list: [1, 2, 4, 5, 13] })) + .then(cleanup) + }) + }) + + context('with add-unique flag', function () { + const policy: ListPolicy = { + writeFlags: lists.writeFlags.ADD_UNIQUE + } + + it('fails with an error if the incremented number already exists in the list', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(expectError()) + .then(operate(lists.increment('list', 2, 1, policy))) + .then(assertError(status.ERR_FAIL_ELEMENT_EXISTS)) + .then(cleanup) + }) + + context('with no-fail flag', function () { + helper.skipUnlessVersion('>= 4.3.0', this) + + const policy: ListPolicy = { + writeFlags: lists.writeFlags.ADD_UNIQUE | lists.writeFlags.NO_FAIL + } + + it('does not increment the item but returns ok', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.increment('list', 2, 1, policy))) + // Note: Operation returns post-increment value even though + // operation was not executed due to add-unique constraint! + .then(assertResultEql({ list: 4 })) + .then(assertRecordEql({ list: [1, 2, 3, 4, 5] })) + .then(cleanup) + }) + }) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('increments the element at the specified index and returns the final value', function () { + return initState() + .then(createRecord({ map: { list: [1, 2, 3, 4, 5] } })) + .then(operate(lists.increment('map', 1, 3).withContext((ctx: any) => ctx.addMapKey('list')))) + .then(assertResultEql({ map: 5 })) + .then(assertRecordEql({ map: { list: [1, 5, 3, 4, 5] } })) + .then(cleanup) + }) + }) + }) + + describe('lists.size', function () { + it('returns the element count', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, 5] })) + .then(operate(lists.size('list'))) + .then(assertResultEql({ list: 5 })) + .then(cleanup) + }) + + context('with nested list context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('returns the element count', function () { + return initState() + .then(createRecord({ list: [[], [1, 2, 3, 4, 5]] })) + .then(operate(lists.size('list').withContext((ctx: any) => ctx.addListIndex(-1)))) + .then(assertResultEql({ list: 5 })) + .then(cleanup) + }) + }) + }) + + describe('ListOperation', function () { + describe('#invertSelection', function () { + it('throws an error if the operation is not invertible', function () { + const op: listsModule.ListOperation = lists.size('lists') + expect(() => op.invertSelection()).to.throw(AerospikeError, 'List operation cannot be inverted') + }) + }) + }) +}) diff --git a/ts-test/tests/maps.ts b/ts-test/tests/maps.ts new file mode 100644 index 000000000..7dc1fee92 --- /dev/null +++ b/ts-test/tests/maps.ts @@ -0,0 +1,1622 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* eslint-env mocha */ + +import Aerospike, { maps as Maps, operations, cdt, status as statusModule, AerospikeBins} from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +const maps: typeof Maps = Aerospike.maps +const op: typeof operations = Aerospike.operations +const Context: typeof cdt.Context = Aerospike.cdt.Context +const status: typeof statusModule = Aerospike.status + +const eql = require('deep-eql') + +const { + assertError, + assertRecordEql, + assertResultEql, + assertResultSatisfy, + cleanup, + createRecord, + expectError, + initState, + operate +} = require('./util/statefulAsyncTest') + +const orderMap = (binName: string, order?: Maps.order, ctx?: cdt.Context) => { + const policy = new Aerospike.MapPolicy({ order }) + const setMapPolicy = maps.setPolicy(binName, policy) + if (ctx) setMapPolicy.withContext(ctx) + return operate(setMapPolicy) +} +const orderByKey = (binName: string, ctx?: cdt.Context) => orderMap(binName, maps.order.KEY_ORDERED, ctx) +const orderByKeyValue = (binName: string, ctx?: cdt.Context) => orderMap(binName, maps.order.KEY_VALUE_ORDERED, ctx) + +describe('client.operate() - CDT Map operations', function () { + helper.skipUnlessSupportsFeature(Aerospike.features.CDT_MAP, this) + + describe('maps.setPolicy', function () { + it('changes the map order', function () { + return initState() + .then(createRecord({ map: { c: 1, b: 2, a: 3 } })) + .then(orderByKey('map')) + .then(operate(maps.getByKeyRange('map', 'a', 'z', maps.returnType.KEY))) + .then(assertResultEql({ map: ['a', 'b', 'c'] })) + .then(cleanup()) + }) + }) + + describe('maps.create', function () { + it('Creates a new map', function () { + return initState() + .then(createRecord({ map: { c: 1, b: 2, a: 3 } })) + .then(orderByKey('map')) + .then(operate(maps.create('emptyMap', maps.order.KEY_ORDERED))) + .then(operate(op.read('dap'))) + .then(assertRecordEql({ emptyMap: {}, map: { a: 3, b: 2, c: 1 } })) + .then(cleanup()) + }) + + it('Creates a new map from a cdt context', function () { + return initState() + .then(createRecord({ map: { c: 1, b: 2, a: 3 } })) + .then(orderByKey('map')) + .then(operate(maps.create('map', maps.order.KEY_ORDERED).withContext((ctx: cdt.Context) => ctx.addMapKeyCreate('nested')))) + .then(assertRecordEql({ map: { a: 3, b: 2, c: 1, nested: {} } })) + .then(cleanup()) + }) + + it('Creates a new map from a cdt context as parameter', function () { + return initState() + .then(createRecord({ map: { c: 1, b: 2, a: 3 } })) + .then(orderByKey('map')) + .then(operate(maps.create('map', maps.order.KEY_ORDERED, false, new Context().addMapKeyCreate('nested')))) + .then(assertRecordEql({ map: { a: 3, b: 2, c: 1, nested: {} } })) + .then(cleanup()) + }) + + context('persistent indexes added in 7.0', function () { + helper.skipUnlessVersion('>= 7.0.0', this) + it('Creates a new map with persistent index', function () { + return initState() + .then(createRecord({ map: { c: 1, b: 2, a: 3 } })) + .then(orderByKey('map')) + .then(operate(maps.create('emptyMap', maps.order.KEY_ORDERED, true))) + .then(assertRecordEql({ emptyMap: {}, map: { a: 3, b: 2, c: 1 } })) + .then(cleanup()) + }) + }) + }) + + describe('maps.put', function () { + it('adds the item to the map and returns the size of the map', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.put('map', 'd', 99))) + .then(assertResultEql({ map: 4 })) + .then(assertRecordEql({ map: { a: 1, b: 2, c: 3, d: 99 } })) + .then(cleanup()) + }) + + it('replaces the item and returns the size of the map', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.put('map', 'b', 99))) + .then(assertResultEql({ map: 3 })) + .then(assertRecordEql({ map: { a: 1, b: 99, c: 3 } })) + .then(cleanup()) + }) + + it('creates a new map if it does not exist yet', function () { + return initState() + .then(createRecord({ i: 1 })) + .then(operate(maps.put('map', 'a', 1))) + .then(assertResultEql({ map: 1 })) + .then(assertRecordEql({ i: 1, map: { a: 1 } })) + .then(cleanup()) + }) + + it('fails if the bin does not contain a map', function () { + return initState() + .then(createRecord({ map: 'this is not a map' })) + .then(expectError()) + .then(operate(maps.put('map', 'a', 1))) + .then(assertError(status.ERR_BIN_INCOMPATIBLE_TYPE)) + .then(cleanup()) + }) + + context('update-only write mode', function () { + const updateOnlyPolicy = new Aerospike.MapPolicy({ + writeMode: maps.writeMode.UPDATE_ONLY + }) + + it('overwrites an existing key', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.put('map', 'b', 99, updateOnlyPolicy))) + .then(assertResultEql({ map: 3 })) + .then(assertRecordEql({ map: { a: 1, b: 99, c: 3 } })) + .then(cleanup()) + }) + + it('fails to write a non-existing key', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(expectError()) + .then(operate(maps.put('map', 'd', 99, updateOnlyPolicy))) + .then(assertError(status.ERR_FAIL_ELEMENT_NOT_FOUND)) + .then(cleanup()) + }) + }) + + context('with update-only flag', function () { + helper.skipUnlessVersion('>= 4.3.0', this) + + const policy = new Aerospike.MapPolicy({ + writeFlags: maps.writeFlags.UPDATE_ONLY + }) + + it('overwrites an existing key', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.put('map', 'b', 99, policy))) + .then(assertResultEql({ map: 3 })) + .then(assertRecordEql({ map: { a: 1, b: 99, c: 3 } })) + .then(cleanup()) + }) + + it('fails to write a non-existing key', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(expectError()) + .then(operate(maps.put('map', 'd', 99, policy))) + .then(assertError(status.ERR_FAIL_ELEMENT_NOT_FOUND)) + .then(cleanup()) + }) + + context('with no-fail flag', function () { + const policy = new Aerospike.MapPolicy({ + writeFlags: maps.writeFlags.UPDATE_ONLY | maps.writeFlags.NO_FAIL + }) + + it('does not add the item but returns ok', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.put('map', 'd', 99, policy))) + .then(assertResultEql({ map: 3 })) + .then(assertRecordEql({ map: { a: 1, b: 2, c: 3 } })) + .then(cleanup()) + }) + }) + }) + + context('create-only write mode', function () { + const createOnlyPolicy = new Aerospike.MapPolicy({ + writeMode: maps.writeMode.CREATE_ONLY + }) + + it('fails to overwrite an existing key', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(expectError()) + .then(operate(maps.put('map', 'b', 99, createOnlyPolicy))) + .then(assertError(status.ERR_FAIL_ELEMENT_EXISTS)) + .then(cleanup()) + }) + + it('creates a new key if it does not exist', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.put('map', 'd', 99, createOnlyPolicy))) + .then(assertResultEql({ map: 4 })) + .then(assertRecordEql({ map: { a: 1, b: 2, c: 3, d: 99 } })) + .then(cleanup()) + }) + }) + + context('with create-only flag', function () { + helper.skipUnlessVersion('>= 4.3.0', this) + + const policy = new Aerospike.MapPolicy({ + writeFlags: maps.writeFlags.CREATE_ONLY + }) + + it('fails to overwrite an existing key', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(expectError()) + .then(operate(maps.put('map', 'b', 99, policy))) + .then(assertError(status.ERR_FAIL_ELEMENT_EXISTS)) + .then(cleanup()) + }) + + it('creates a new key if it does not exist', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.put('map', 'd', 99, policy))) + .then(assertResultEql({ map: 4 })) + .then(assertRecordEql({ map: { a: 1, b: 2, c: 3, d: 99 } })) + .then(cleanup()) + }) + + context('with no-fail flag', function () { + const policy = new Aerospike.MapPolicy({ + writeFlags: maps.writeFlags.CREATE_ONLY | maps.writeFlags.NO_FAIL + }) + + it('does not update the item but returns ok', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.put('map', 'b', 99, policy))) + .then(assertResultEql({ map: 3 })) + .then(assertRecordEql({ map: { a: 1, b: 2, c: 3 } })) + .then(cleanup()) + }) + }) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('adds the item to the nested map', function () { + return initState() + .then(createRecord({ list: [{ a: 1, b: 2, c: 3 }] })) + .then(operate(maps.put('list', 'd', 99).withContext((ctx: cdt.Context) => ctx.addListIndex(0)))) + .then(assertResultEql({ list: 4 })) + .then(assertRecordEql({ list: [{ a: 1, b: 2, c: 3, d: 99 }] })) + .then(cleanup()) + }) + }) + }) + + describe('maps.putItems', function () { + it('adds each item to the map and returns the size of the map', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.putItems('map', { c: 99, d: 100 }))) + .then(assertResultEql({ map: 4 })) + .then(assertRecordEql({ map: { a: 1, b: 2, c: 99, d: 100 } })) + .then(cleanup()) + }) + + it('adds each item from the Map class to the map and returns the size of the map', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.putItems('map', new Map([['e', 150], ['d', 100], ['c', 99]])))) + .then(assertResultEql({ map: 5 })) + .then(assertRecordEql({ map: { a: 1, b: 2, c: 99, d: 100, e: 150 } })) + .then(cleanup()) + }) + + context('with update-only flag', function () { + helper.skipUnlessVersion('>= 4.3.0', this) + + const policy = new Aerospike.MapPolicy({ + writeFlags: maps.writeFlags.UPDATE_ONLY + }) + + it('fails if any of the items do not yet exist in the map', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3, d: 4, e: 5 } })) + .then(expectError()) + .then(operate(maps.putItems('map', { c: 10, x: 100 }, policy))) + .then(assertError(status.ERR_FAIL_ELEMENT_NOT_FOUND)) + .then(cleanup()) + }) + + context('with no-fail flag', function () { + const policy = new Aerospike.MapPolicy({ + writeFlags: maps.writeFlags.UPDATE_ONLY | maps.writeFlags.NO_FAIL + }) + + it('does not update the map but returns ok', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3, d: 4, e: 5 } })) + .then(operate(maps.putItems('map', { c: 10, x: 100 }, policy))) + .then(assertResultEql({ map: 5 })) + .then(assertRecordEql({ map: { a: 1, b: 2, c: 3, d: 4, e: 5 } })) + .then(cleanup()) + }) + + context('with partial flag', function () { + const policy = new Aerospike.MapPolicy({ + writeFlags: maps.writeFlags.UPDATE_ONLY | maps.writeFlags.NO_FAIL | maps.writeFlags.PARTIAL + }) + + it('updates only the existing items', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3, d: 4, e: 5 } })) + .then(operate(maps.putItems('map', { c: 10, x: 100 }, policy))) + .then(assertResultEql({ map: 5 })) + .then(assertRecordEql({ map: { a: 1, b: 2, c: 10, d: 4, e: 5 } })) + .then(cleanup()) + }) + }) + }) + }) + + context('with create-only flag', function () { + helper.skipUnlessVersion('>= 4.3.0', this) + + const policy = new Aerospike.MapPolicy({ + writeFlags: maps.writeFlags.CREATE_ONLY + }) + + it('fails if any of the items already exist in the map', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3, d: 4, e: 5 } })) + .then(expectError()) + .then(operate(maps.putItems('map', { c: 10, x: 100 }, policy))) + .then(assertError(status.ERR_FAIL_ELEMENT_EXISTS)) + .then(cleanup()) + }) + + context('with no-fail flag', function () { + const policy = new Aerospike.MapPolicy({ + writeFlags: maps.writeFlags.CREATE_ONLY | maps.writeFlags.NO_FAIL + }) + + it('does not update the map but returns ok', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3, d: 4, e: 5 } })) + .then(operate(maps.putItems('map', { c: 10, x: 100 }, policy))) + .then(assertResultEql({ map: 5 })) + .then(assertRecordEql({ map: { a: 1, b: 2, c: 3, d: 4, e: 5 } })) + .then(cleanup()) + }) + + context('with partial flag', function () { + const policy = new Aerospike.MapPolicy({ + writeFlags: maps.writeFlags.CREATE_ONLY | maps.writeFlags.NO_FAIL | maps.writeFlags.PARTIAL + }) + + it('adds only the keys that do not exist yet', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3, d: 4, e: 5 } })) + .then(operate(maps.putItems('map', { c: 10, x: 100 }, policy))) + .then(assertResultEql({ map: 6 })) + .then(assertRecordEql({ map: { a: 1, b: 2, c: 3, d: 4, e: 5, x: 100 } })) + .then(cleanup()) + }) + }) + }) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('adds each item to the nested map', function () { + return initState() + .then(createRecord({ map: { nested: { a: 1, b: 2, c: 3 } } })) + .then(operate(maps.putItems('map', { c: 99, d: 100 }).withContext((ctx: cdt.Context) => ctx.addMapKey('nested')))) + .then(assertResultEql({ map: 4 })) + .then(assertRecordEql({ map: { nested: { a: 1, b: 2, c: 99, d: 100 } } })) + .then(cleanup()) + }) + }) + }) + + describe('maps.increment', function () { + it('increments the value of the entry and returns the final value', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.increment('map', 'b', 10))) + .then(assertResultEql({ map: 12 })) + .then(assertRecordEql({ map: { a: 1, b: 12, c: 3 } })) + .then(cleanup()) + }) + + it('creates a new entry if the key does not exist yet', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.increment('map', 'd', 10))) + .then(assertResultEql({ map: 10 })) + .then(assertRecordEql({ map: { a: 1, b: 2, c: 3, d: 10 } })) + .then(cleanup()) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('increments the value of the entry and returns the final value', function () { + return initState() + .then(createRecord({ list: [[1, 2, 3], { a: 1, b: 2, c: 3 }] })) + .then(operate(maps.increment('list', 'b', 10).withContext((ctx: cdt.Context) => ctx.addListIndex(1)))) + .then(assertResultEql({ list: 12 })) + .then(assertRecordEql({ list: [[1, 2, 3], { a: 1, b: 12, c: 3 }] })) + .then(cleanup()) + }) + }) + }) + + describe('maps.decrement', function () { + it('decrements the value of the entry and returns the final value', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 12, c: 3 } })) + .then(operate(maps.decrement('map', 'b', 10))) + .then(assertResultEql({ map: 2 })) + .then(assertRecordEql({ map: { a: 1, b: 2, c: 3 } })) + .then(cleanup()) + }) + + it('creates a new entry if the key does not exist yet', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.decrement('map', 'd', 1))) + .then(assertResultEql({ map: -1 })) + .then(assertRecordEql({ map: { a: 1, b: 2, c: 3, d: -1 } })) + .then(cleanup()) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('decrements the value of the entry and returns the final value', function () { + return initState() + .then(createRecord({ list: [{ a: 1, b: 12, c: 3 }, ['a', 'b', 'c']] })) + .then(operate(maps.decrement('list', 'b', 10).withContext((ctx: cdt.Context) => ctx.addListIndex(0)))) + .then(assertResultEql({ list: 2 })) + .then(assertRecordEql({ list: [{ a: 1, b: 2, c: 3 }, ['a', 'b', 'c']] })) + .then(cleanup()) + }) + }) + }) + + describe('maps.clear', function () { + it('removes all entries from the map', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 12, c: 3 } })) + .then(operate(maps.clear('map'))) + .then(assertResultEql({ map: null })) + .then(assertRecordEql({ map: { } })) + .then(cleanup()) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('removes all entries from the map', function () { + return initState() + .then(createRecord({ map: { nested: { deepNested: { a: 1, b: 12, c: 3 } } } })) + .then(operate( + maps + .clear('map') + .withContext((ctx: cdt.Context) => ctx.addMapKey('nested').addMapKey('deepNested')) + )) + .then(assertRecordEql({ map: { nested: { deepNested: { } } } })) + .then(cleanup()) + }) + }) + }) + + describe('maps.removeByKey', function () { + it('removes a map entry identified by key', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.removeByKey('map', 'b', maps.returnType.VALUE))) + .then(assertResultEql({ map: 2 })) + .then(assertRecordEql({ map: { a: 1, c: 3 } })) + .then(cleanup()) + }) + + it('does not fail when removing a non-existing key', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.removeByKey('map', 'd', maps.returnType.VALUE))) + .then(assertResultEql({ map: null })) + .then(assertRecordEql({ map: { a: 1, b: 2, c: 3 } })) + .then(cleanup()) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('removes a map entry identified by key', function () { + return initState() + .then(createRecord({ list: [{ a: 1, b: 2, c: 3 }, { a: 2, b: 3, c: 4 }] })) + .then(operate(maps.removeByKey('list', 'b').withContext((ctx: cdt.Context) => ctx.addListIndex(-1)))) + .then(assertRecordEql({ list: [{ a: 1, b: 2, c: 3 }, { a: 2, c: 4 }] })) + .then(cleanup()) + }) + }) + }) + + describe('maps.removeByKeyList', function () { + it('removes map entries identified by one or more keys', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.removeByKeyList('map', ['a', 'c'], maps.returnType.VALUE))) + .then(assertResultSatisfy((result: AerospikeBins) => eql((result.map as number[]).sort(), [1, 3]))) + .then(assertRecordEql({ map: { b: 2 } })) + .then(cleanup()) + }) + + it('skips non-existent keys', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.removeByKeyList('map', ['a', 'x', 'y', 'z', 'c'], maps.returnType.VALUE))) + .then(assertResultSatisfy((result: AerospikeBins) => eql((result.map as number[]).sort(), [1, 3]))) + .then(assertRecordEql({ map: { b: 2 } })) + .then(cleanup()) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('removes map entries identified by one or more keys', function () { + return initState() + .then(createRecord({ maps: { a: { a: 1, b: 2, c: 3 }, b: { a: 2, b: 3, c: 4 } } })) + .then(operate(maps.removeByKeyList('maps', ['a', 'c']).withContext((ctx: cdt.Context) => ctx.addMapKey('a')))) + .then(assertRecordEql({ maps: { a: { b: 2 }, b: { a: 2, b: 3, c: 4 } } })) + .then(cleanup()) + }) + }) + }) + + describe('maps.removeByKeyRange', function () { + it('removes map entries identified by key range', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3, d: 4 } })) + .then(orderByKey('map')) + .then(operate(maps.removeByKeyRange('map', 'b', 'd', maps.returnType.VALUE))) + .then(assertResultEql({ map: [2, 3] })) + .then(assertRecordEql({ map: { a: 1, d: 4 } })) + .then(cleanup()) + }) + + it('removes all keys from the specified start key until the end', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3, d: 4 } })) + .then(orderByKey('map')) + .then(operate(maps.removeByKeyRange('map', 'b', null, maps.returnType.VALUE))) + .then(assertResultEql({ map: [2, 3, 4] })) + .then(assertRecordEql({ map: { a: 1 } })) + .then(cleanup()) + }) + + it('removes all keys from the start to the specified end', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3, d: 4 } })) + .then(orderByKey('map')) + .then(operate(maps.removeByKeyRange('map', null, 'b', maps.returnType.VALUE))) + .then(assertResultEql({ map: [1] })) + .then(assertRecordEql({ map: { b: 2, c: 3, d: 4 } })) + .then(cleanup()) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('removes map entries identified by key range', function () { + const mapContext = new Context().addListIndex(0) + return initState() + .then(createRecord({ list: [{ a: 1, b: 2, c: 3, d: 4 }] })) + .then(orderByKey('list', mapContext)) + .then(operate(maps.removeByKeyRange('list', 'b', 'd').withContext(mapContext))) + .then(assertRecordEql({ list: [{ a: 1, d: 4 }] })) + .then(cleanup()) + }) + }) + }) + + describe('maps.removeByKeyRelIndexRange', function () { + helper.skipUnlessVersion('>= 4.3.0', this) + + context('with count', function () { + it('removes map entries nearest to key and greater, by index', function () { + return initState() + .then(createRecord({ map: { a: 17, e: 2, f: 15, j: 10 } })) + .then(orderByKey('map')) + .then(operate(maps.removeByKeyRelIndexRange('map', 'f', 0, 1).andReturn(maps.returnType.KEY))) + .then(assertResultEql({ map: ['f'] })) + .then(assertRecordEql({ map: { a: 17, e: 2, j: 10 } })) + .then(cleanup()) + }) + }) + + context('without count', function () { + it('removes map entries nearest to key and greater, by index', function () { + return initState() + .then(createRecord({ map: { a: 17, e: 2, f: 15, j: 10 } })) + .then(orderByKey('map')) + .then(operate(maps.removeByKeyRelIndexRange('map', 'f', 0).andReturn(maps.returnType.KEY))) + .then(assertResultEql({ map: ['f', 'j'] })) + .then(assertRecordEql({ map: { a: 17, e: 2 } })) + .then(cleanup()) + }) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('removes map entries nearest to key and greater, by index', function () { + const mapContext = new Context().addListIndex(0) + return initState() + .then(createRecord({ list: [{ a: 17, e: 2, f: 15, j: 10 }, { a: 32, f: 14 }] })) + .then(orderByKey('list', mapContext)) + .then(operate(maps.removeByKeyRelIndexRange('list', 'f', 0, 1).withContext(mapContext))) + .then(assertRecordEql({ list: [{ a: 17, e: 2, j: 10 }, { a: 32, f: 14 }] })) + .then(cleanup()) + }) + }) + }) + + describe('maps.removeByValue', function () { + it('removes a map entry identified by value', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.removeByValue('map', 2, maps.returnType.RANK))) + .then(assertResultEql({ map: [1] })) + .then(assertRecordEql({ map: { a: 1, c: 3 } })) + .then(cleanup()) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('removes a map entry identified by value', function () { + return initState() + .then(createRecord({ list: [{ a: 1, b: 2 }, { a: 1, b: 2 }, { a: 2, b: 3 }] })) + .then(operate( + maps + .removeByValue('list', 2) + .withContext((ctx: cdt.Context) => ctx.addListValue({ a: 1, b: 2 })) // matches only the first list value + .andReturn(maps.returnType.RANK) + )) + .then(assertResultEql({ list: [1] })) + .then(assertRecordEql({ list: [{ a: 1 }, { a: 1, b: 2 }, { a: 2, b: 3 }] })) + .then(cleanup()) + }) + }) + }) + + describe('maps.removeByValueList', function () { + it('removes map entries identified by one or more values', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.removeByValueList('map', [1, 3], maps.returnType.RANK))) + .then(assertResultEql({ map: [0, 2] })) + .then(assertRecordEql({ map: { b: 2 } })) + .then(cleanup()) + }) + + it('skips non-existent values', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.removeByValueList('map', [1, 99, 3], maps.returnType.RANK))) + .then(assertResultEql({ map: [0, 2] })) + .then(assertRecordEql({ map: { b: 2 } })) + .then(cleanup()) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('removes map entries identified by one or more values', function () { + return initState() + .then(createRecord({ map: { a: { a: 1, b: 2, c: 3 }, b: { b: 2, c: 3, d: 4 } } })) + .then(operate(maps.removeByValueList('map', [1, 3]).withContext((ctx: cdt.Context) => ctx.addMapKey('a')))) + .then(assertRecordEql({ map: { a: { b: 2 }, b: { b: 2, c: 3, d: 4 } } })) + .then(cleanup()) + }) + }) + }) + + describe('maps.removeByValueRange', function () { + it('removes map entries identified by value range', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 2, d: 3 } })) + .then(operate(maps.removeByValueRange('map', 2, 3, maps.returnType.RANK))) + .then(assertResultEql({ map: [1, 2] })) + .then(assertRecordEql({ map: { a: 1, d: 3 } })) + .then(cleanup()) + }) + + it('removes all keys from the specified start value until the end', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.removeByValueRange('map', 2, null, maps.returnType.RANK))) + .then(assertResultEql({ map: [1, 2] })) + .then(assertRecordEql({ map: { a: 1 } })) + .then(cleanup()) + }) + + it('removes all keys from the start to the specified end value', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.removeByValueRange('map', null, 2, maps.returnType.RANK))) + .then(assertResultEql({ map: [0] })) + .then(assertRecordEql({ map: { b: 2, c: 3 } })) + .then(cleanup()) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('removes map entries identified by value range', function () { + return initState() + .then(createRecord({ list: [['a', 'b', 'c'], { a: 1, b: 2, c: 2, d: 3 }] })) + .then(operate(maps.removeByValueRange('list', 2, 3).withContext((ctx: cdt.Context) => ctx.addListIndex(-1)))) + .then(assertRecordEql({ list: [['a', 'b', 'c'], { a: 1, d: 3 }] })) + .then(cleanup()) + }) + }) + }) + + describe('maps.removeByValueRelRankRange', function () { + helper.skipUnlessVersion('>= 4.3.0', this) + + context('with count', function () { + it('removes map entries nearest to value and greater by relative rank', function () { + return initState() + .then(createRecord({ map: { e: 2, j: 10, f: 15, a: 17 } })) + .then(orderByKeyValue('map')) + .then(operate(maps.removeByValueRelRankRange('map', 11, 1, 1).andReturn(maps.returnType.KEY))) + .then(assertResultEql({ map: ['a'] })) + .then(assertRecordEql({ map: { e: 2, j: 10, f: 15 } })) + .then(cleanup()) + }) + }) + + context('without count', function () { + it('removes map entries nearest to value and greater by relative rank', function () { + return initState() + .then(createRecord({ map: { e: 2, j: 10, f: 15, a: 17 } })) + .then(orderByKeyValue('map')) + .then(operate(maps.removeByValueRelRankRange('map', 11, -1).andReturn(maps.returnType.KEY))) + .then(assertResultEql({ map: ['j', 'f', 'a'] })) + .then(assertRecordEql({ map: { e: 2 } })) + .then(cleanup()) + }) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('removes map entries nearest to value and greater by relative rank', function () { + const mapContext = new Context() + .addListIndex(2) + .addListIndex(1) + return initState() + .then(createRecord({ list: ['a', 'b', ['c', { e: 2, j: 10, f: 15, a: 17 }], 'd', 'e'] })) + .then(orderByKeyValue('list', mapContext)) + .then(operate(maps.removeByValueRelRankRange('list', 11, 1, 1).withContext(mapContext))) + .then(assertRecordEql({ list: ['a', 'b', ['c', { e: 2, j: 10, f: 15 }], 'd', 'e'] })) + .then(cleanup()) + }) + }) + }) + + describe('maps.removeByIndex', function () { + it('removes a map entry identified by index', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.removeByIndex('map', 1, maps.returnType.KEY))) + .then(assertResultEql({ map: 'b' })) + .then(assertRecordEql({ map: { a: 1, c: 3 } })) + .then(cleanup()) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('removes a map entry identified by index', function () { + return initState() + .then(createRecord({ map: { nested: { a: 1, b: 2, c: 3 } } })) + .then(operate(maps.removeByIndex('map', 1).withContext((ctx: cdt.Context) => ctx.addMapValue({ a: 1, b: 2, c: 3 })))) + .then(assertRecordEql({ map: { nested: { a: 1, c: 3 } } })) + .then(cleanup()) + }) + }) + }) + + describe('maps.removeByIndexRange', function () { + it('removes map entries identified by index range', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 2, d: 3 } })) + .then(operate(maps.removeByIndexRange('map', 1, 2, maps.returnType.KEY))) + .then(assertResultEql({ map: ['b', 'c'] })) + .then(assertRecordEql({ map: { a: 1, d: 3 } })) + .then(cleanup()) + }) + + it('removes all map entries starting at the specified index if count is null', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.removeByIndexRange('map', 1, null, maps.returnType.KEY))) + .then(assertResultEql({ map: ['b', 'c'] })) + .then(assertRecordEql({ map: { a: 1 } })) + .then(cleanup()) + }) + + it('removes all map entries starting at the specified index if count is undefined', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.removeByIndexRange('map', 1))) + .then(assertResultEql({ map: null })) + .then(assertRecordEql({ map: { a: 1 } })) + .then(cleanup()) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('removes map entries identified by index range', function () { + return initState() + .then(createRecord({ map: { nested: { a: 1, b: 2, c: 2, d: 3 } } })) + .then(operate(maps.removeByIndexRange('map', 1, 2).withContext((ctx: cdt.Context) => ctx.addMapKey('nested')))) + .then(assertRecordEql({ map: { nested: { a: 1, d: 3 } } })) + .then(cleanup()) + }) + }) + }) + + describe('maps.removeByRank', function () { + it('removes a map entry identified by rank', function () { + return initState() + .then(createRecord({ map: { a: 3, b: 2, c: 1 } })) + .then(operate(maps.removeByRank('map', 0, maps.returnType.VALUE))) + .then(assertResultEql({ map: 1 })) + .then(assertRecordEql({ map: { a: 3, b: 2 } })) + .then(cleanup()) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('removes a map entry identified by rank', function () { + return initState() + .then(createRecord({ list: [{ a: 3, b: 2, c: 1 }] })) + .then(operate(maps.removeByRank('list', 0).withContext((ctx: cdt.Context) => ctx.addListIndex(0)))) + .then(assertRecordEql({ list: [{ a: 3, b: 2 }] })) + .then(cleanup()) + }) + }) + }) + + describe('maps.removeByRankRange', function () { + it('removes map entries identified by rank range', function () { + return initState() + .then(createRecord({ map: { a: 3, b: 2, c: 1 } })) + .then(operate(maps.removeByRankRange('map', 0, 2, maps.returnType.VALUE))) + .then(assertResultEql({ map: [1, 2] })) + .then(assertRecordEql({ map: { a: 3 } })) + .then(cleanup()) + }) + + it('removes all map entries starting at the specified rank until the end', function () { + return initState() + .then(createRecord({ map: { a: 3, b: 2, c: 1 } })) + .then(operate(maps.removeByRankRange('map', 1, null, maps.returnType.VALUE))) + .then(assertResultEql({ map: [2, 3] })) + .then(assertRecordEql({ map: { c: 1 } })) + .then(cleanup()) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('removes map entries identified by rank range', function () { + return initState() + .then(createRecord({ list: [{ a: 3, b: 2, c: 1 }] })) + .then(operate(maps.removeByRankRange('list', 0, 2).withContext((ctx: cdt.Context) => ctx.addListIndex(-1)))) + .then(assertRecordEql({ list: [{ a: 3 }] })) + .then(cleanup()) + }) + }) + }) + + describe('maps.size', function () { + it('returns the size of the map', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.size('map'))) + .then(assertResultEql({ map: 3 })) + .then(cleanup()) + }) + + it('returns zero if the map is empty', function () { + return initState() + .then(createRecord({ map: { } })) + .then(operate(maps.size('map'))) + .then(assertResultEql({ map: 0 })) + .then(cleanup()) + }) + + it('returns null if the map does not exist', function () { + return initState() + .then(createRecord({ i: 1 })) + .then(operate(maps.size('map'))) + .then(assertResultEql({ map: null })) + .then(cleanup()) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('returns the size of the map', function () { + return initState() + .then(createRecord({ map: { nested: { a: 1, b: 2, c: 3 } } })) + .then(operate(maps.size('map').withContext((ctx: cdt.Context) => ctx.addMapKey('nested')))) + .then(assertResultEql({ map: 3 })) + .then(cleanup()) + }) + }) + }) + + describe('maps.getByKey', function () { + it('fetches a map entry identified by key', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.getByKey('map', 'b', maps.returnType.KEY_VALUE))) + .then(assertResultEql({ map: ['b', 2] })) + .then(cleanup()) + }) + + it('does not fail if the key does not exist', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.getByKey('map', 'z', maps.returnType.KEY_VALUE))) + .then(assertResultEql({ map: [] })) + .then(cleanup()) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('fetches a map entry identified by key', function () { + return initState() + .then(createRecord({ list: [{ a: 1, b: 2, c: 3 }, { b: 3 }] })) + .then(operate( + maps + .getByKey('list', 'b') + .withContext((ctx: cdt.Context) => ctx.addListIndex(0)) + .andReturn(maps.returnType.KEY_VALUE) + )) + .then(assertResultEql({ list: ['b', 2] })) + .then(cleanup()) + }) + }) + }) + + describe('maps.getByKeyList', function () { + it('fetches a map entry identified by keys', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.getByKeyList('map', ['b', 'c'], maps.returnType.KEY_VALUE))) + .then(assertResultEql({ map: ['b', 2, 'c', 3] })) + .then(cleanup()) + }) + + it('does not fail if the key does not exist', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.getByKeyList('map', ['z'], maps.returnType.KEY_VALUE))) + .then(assertResultEql({ map: [] })) + .then(cleanup()) + }) + + it('does not fail if only some keys exist', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.getByKeyList('map', ['a', 'z'], maps.returnType.KEY_VALUE))) + .then(assertResultEql({ map: ['a', 1] })) + .then(cleanup()) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('fetches a map entries identified by keys', function () { + return initState() + .then(createRecord({ list: [{ a: 1, b: 2, c: 3 }, { b: 3 }] })) + .then(operate( + maps + .getByKeyList('list', ['a', 'b']) + .withContext((ctx: cdt.Context) => ctx.addListIndex(0)) + .andReturn(maps.returnType.KEY_VALUE) + )) + .then(assertResultEql({ list: ['a', 1, 'b', 2] })) + .then(cleanup()) + }) + }) + }) + + describe('maps.getByKeyRange', function () { + it('fetches map entries identified by key range', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3, d: 4 } })) + .then(orderByKey('map')) + .then(operate(maps.getByKeyRange('map', 'b', 'd', maps.returnType.KEY))) + .then(assertResultEql({ map: ['b', 'c'] })) + .then(cleanup()) + }) + + it('fetches all keys from the specified start key until the end', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3, d: 4 } })) + .then(orderByKey('map')) + .then(operate(maps.getByKeyRange('map', 'b', null, maps.returnType.KEY))) + .then(assertResultEql({ map: ['b', 'c', 'd'] })) + .then(cleanup()) + }) + + it('fetches all keys from the start to the specified end', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.getByKeyRange('map', null, 'b', maps.returnType.KEY))) + .then(assertResultEql({ map: ['a'] })) + .then(cleanup()) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('fetches map entries identified by key range', function () { + const mapContext = new Context().addListIndex(-1) + return initState() + .then(createRecord({ list: [{ b: 3, c: 4 }, { a: 1, b: 2, c: 3, d: 4 }] })) + .then(orderByKey('list', mapContext)) + .then(operate( + maps + .getByKeyRange('list', 'b', 'd') + .withContext(mapContext) + .andReturn(maps.returnType.KEY) + )) + .then(assertResultEql({ list: ['b', 'c'] })) + .then(cleanup()) + }) + }) + }) + + describe('maps.getByKeyRelIndexRange', function () { + helper.skipUnlessVersion('>= 4.3.0', this) + + context('with count', function () { + it('retrieves map entries nearest to key and greater, by index', function () { + return initState() + .then(createRecord({ map: { a: 17, e: 2, f: 15, j: 10 } })) + .then(orderByKey('map')) + .then(operate(maps.getByKeyRelIndexRange('map', 'f', 0, 1).andReturn(maps.returnType.KEY))) + .then(assertResultEql({ map: ['f'] })) + .then(cleanup()) + }) + }) + + context('without count', function () { + it('retrieves map entries nearest to key and greater, by index', function () { + return initState() + .then(createRecord({ map: { a: 17, e: 2, f: 15, j: 10 } })) + .then(orderByKey('map')) + .then(operate(maps.getByKeyRelIndexRange('map', 'f', 0).andReturn(maps.returnType.KEY))) + .then(assertResultEql({ map: ['f', 'j'] })) + .then(cleanup()) + }) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('retrieves map entries nearest to key and greater, by index', function () { + const mapContext = new Context().addListIndex(-1) + return initState() + .then(createRecord({ list: [{ a: 17, e: 2, f: 15, j: 10 }] })) + .then(orderByKey('list', mapContext)) + .then(operate( + maps + .getByKeyRelIndexRange('list', 'f', 0) + .withContext(mapContext) + .andReturn(maps.returnType.KEY) + )) + .then(assertResultEql({ list: ['f', 'j'] })) + .then(cleanup()) + }) + }) + }) + + describe('maps.getByValue', function () { + it('fetches a map entry identified by value', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.getByValue('map', 2, maps.returnType.VALUE))) + .then(assertResultEql({ map: [2] })) + .then(cleanup()) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('fetches a map entry identified by value', function () { + return initState() + .then(createRecord({ map: { nested: { a: 1, b: 2, c: 2 } } })) + .then(operate( + maps + .getByValue('map', 2) + .withContext((ctx: cdt.Context) => ctx.addMapKey('nested')) + .andReturn(maps.returnType.KEY) + )) + .then(assertResultSatisfy((result: AerospikeBins) => eql((result.map as number[]).sort(), ['b', 'c']))) + .then(cleanup()) + }) + }) + }) + + describe('maps.getByValueList', function () { + it('fetches a map keys and values identified by values', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.getByValueList('map', [2, 3], maps.returnType.KEY_VALUE))) + .then(assertResultEql({ map: ['b', 2, 'c', 3] })) + .then(cleanup()) + }) + + it('does not fail if the value does not exist', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.getByValueList('map', [4], maps.returnType.KEY_VALUE))) + .then(assertResultEql({ map: [] })) + .then(cleanup()) + }) + + it('does not fail if only some values exist', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.getByValueList('map', [1, 4], maps.returnType.KEY_VALUE))) + .then(assertResultEql({ map: ['a', 1] })) + .then(cleanup()) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('fetches a map keys and values identified by values', function () { + return initState() + .then(createRecord({ map: { nested: { a: 1, b: 2, c: 3 } } })) + .then(operate( + maps + .getByValueList('map', [1, 2]) + .withContext((ctx: cdt.Context) => ctx.addMapKey('nested')) + .andReturn(maps.returnType.KEY_VALUE) + )) + .then(assertResultEql({ map: ['a', 1, 'b', 2] })) + .then(cleanup()) + }) + }) + }) + + describe('maps.getByValueRange', function () { + it('fetches map entries identified by value range', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 2, d: 3 } })) + .then(operate(maps.getByValueRange('map', 2, 3, maps.returnType.VALUE))) + .then(assertResultEql({ map: [2, 2] })) + .then(cleanup()) + }) + + it('fetches all values from the specified start value until the end', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.getByValueRange('map', 2, null, maps.returnType.VALUE))) + .then(assertResultSatisfy((result: AerospikeBins) => eql((result.map as number[]).sort(), [2, 3]))) + .then(cleanup()) + }) + + it('fetches all values from the start to the specified end', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.getByValueRange('map', null, 2, maps.returnType.VALUE))) + .then(assertResultEql({ map: [1] })) + .then(cleanup()) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('fetches map entries identified by value range', function () { + return initState() + .then(createRecord({ list: [{ a: 2, b: 3, c: 4 }, { a: 1, b: 2, c: 2, d: 3 }] })) + .then(operate( + maps + .getByValueRange('list', 2, 3) + .withContext((ctx: cdt.Context) => ctx.addListIndex(1)) + .andReturn(maps.returnType.VALUE) + )) + .then(assertResultEql({ list: [2, 2] })) + .then(cleanup()) + }) + }) + }) + + describe('maps.getByValueRelRankRange', function () { + helper.skipUnlessVersion('>= 4.3.0', this) + + context('with count', function () { + it('retrieves map entries nearest to value and greater by relative rank', function () { + return initState() + .then(createRecord({ map: { e: 2, j: 10, f: 15, a: 17 } })) + .then(orderByKeyValue('map')) + .then(operate(maps.getByValueRelRankRange('map', 11, 1, 1).andReturn(maps.returnType.KEY))) + .then(assertResultEql({ map: ['a'] })) + .then(cleanup()) + }) + }) + + context('without count', function () { + it('retrieves map entries nearest to value and greater by relative rank', function () { + return initState() + .then(createRecord({ map: { e: 2, j: 10, f: 15, a: 17 } })) + .then(orderByKeyValue('map')) + .then(operate(maps.getByValueRelRankRange('map', 11, -1).andReturn(maps.returnType.KEY))) + .then(assertResultEql({ map: ['j', 'f', 'a'] })) + .then(cleanup()) + }) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('retrieves map entries nearest to value and greater by relative rank', function () { + const mapContext = new Context().addMapKey('nested') + return initState() + .then(createRecord({ map: { nested: { e: 2, j: 10, f: 15, a: 17 } } })) + .then(orderByKeyValue('map', mapContext)) + .then(operate( + maps + .getByValueRelRankRange('map', 11, 1, 1) + .withContext(mapContext) + .andReturn(maps.returnType.KEY) + )) + .then(assertResultEql({ map: ['a'] })) + .then(cleanup()) + }) + }) + }) + + describe('maps.getByIndex', function () { + it('fetches a map entry identified by index', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.getByIndex('map', 1, maps.returnType.KEY_VALUE))) + .then(assertResultEql({ map: ['b', 2] })) + .then(cleanup()) + }) + + it('fetches a map entry identified by negative index', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.getByIndex('map', -1, maps.returnType.KEY_VALUE))) + .then(assertResultEql({ map: ['c', 3] })) + .then(cleanup()) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('fetches a map entry identified by index', function () { + return initState() + .then(createRecord({ list: [1, 2, 3, 4, { a: 1, b: 2, c: 3 }] })) + .then(operate( + maps + .getByIndex('list', 1) + .withContext((ctx: cdt.Context) => ctx.addListIndex(4)) + .andReturn(maps.returnType.KEY_VALUE) + )) + .then(assertResultEql({ list: ['b', 2] })) + .then(cleanup()) + }) + }) + }) + + describe('maps.getByIndexRange', function () { + it('fetches map entries identified by index range', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 2, d: 3 } })) + .then(operate(maps.getByIndexRange('map', 1, 2, maps.returnType.KEY_VALUE))) + .then(assertResultEql({ map: ['b', 2, 'c', 2] })) + .then(cleanup()) + }) + + it('fetches map entries identified by negative index range', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 2, d: 3 } })) + .then(operate(maps.getByIndexRange('map', -2, 2, maps.returnType.KEY_VALUE))) + .then(assertResultEql({ map: ['c', 2, 'd', 3] })) + .then(cleanup()) + }) + + it('fetches all map entries starting from the specified index until the end', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(operate(maps.getByIndexRange('map', 1, null, maps.returnType.KEY_VALUE))) + .then(assertResultEql({ map: ['b', 2, 'c', 3] })) + .then(cleanup()) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('fetches map entries identified by index range', function () { + return initState() + .then(createRecord({ list: [{ a: 1, b: 2, c: 2, d: 3 }] })) + .then(operate( + maps + .getByIndexRange('list', 1, 2) + .withContext((ctx: cdt.Context) => ctx.addListIndex(0)) + .andReturn(maps.returnType.KEY_VALUE) + )) + .then(assertResultEql({ list: ['b', 2, 'c', 2] })) + .then(cleanup()) + }) + }) + }) + + describe('maps.getByRank', function () { + it('fetches a map entry identified by rank', function () { + return initState() + .then(createRecord({ map: { a: 3, b: 2, c: 1 } })) + .then(operate(maps.getByRank('map', 0, maps.returnType.VALUE))) + .then(assertResultEql({ map: 1 })) + .then(cleanup()) + }) + + it('fetches a map entry identified by negative rank', function () { + return initState() + .then(createRecord({ map: { a: 3, b: 2, c: 1 } })) + .then(operate(maps.getByRank('map', -1, maps.returnType.VALUE))) + .then(assertResultEql({ map: 3 })) + .then(cleanup()) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('fetches a map entry identified by rank', function () { + return initState() + .then(createRecord({ map: { nested: { a: 3, b: 2, c: 1 } } })) + .then(operate( + maps + .getByRank('map', 0) + .withContext((ctx: cdt.Context) => ctx.addMapKey('nested')) + .andReturn(maps.returnType.VALUE) + )) + .then(assertResultEql({ map: 1 })) + .then(cleanup()) + }) + }) + }) + + describe('maps.getByRankRange', function () { + it('fetches map entries identified by rank range', function () { + return initState() + .then(createRecord({ map: { a: 3, b: 2, c: 1 } })) + .then(operate(maps.getByRankRange('map', 0, 2, maps.returnType.VALUE))) + .then(assertResultEql({ map: [1, 2] })) + .then(cleanup()) + }) + + it('fetches map entries identified by negative rank range', function () { + return initState() + .then(createRecord({ map: { a: 3, b: 2, c: 1 } })) + .then(operate(maps.getByRankRange('map', -2, 2, maps.returnType.VALUE))) + .then(assertResultEql({ map: [2, 3] })) + .then(cleanup()) + }) + + it('fetches all map entries starting at the specified rank until the end', function () { + return initState() + .then(createRecord({ map: { a: 3, b: 2, c: 1 } })) + .then(operate(maps.getByRankRange('map', 1, null, maps.returnType.VALUE))) + .then(assertResultEql({ map: [2, 3] })) + .then(cleanup()) + }) + + context('with nested map context', function () { + helper.skipUnlessVersion('>= 4.6.0', this) + + it('fetches map entries identified by rank range', function () { + return initState() + .then(createRecord({ list: [{ a: 3, b: 2, c: 1 }] })) + .then(operate( + maps + .getByRankRange('list', 0, 2) + .withContext((ctx: cdt.Context) => ctx.addListIndex(0)) + .andReturn(maps.returnType.VALUE) + )) + .then(assertResultEql({ list: [1, 2] })) + .then(cleanup()) + }) + }) + }) + + context('returnTypes', function () { + it('returns nothing', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(orderByKey('map')) + .then(operate(maps.getByKey('map', 'b', maps.returnType.NONE))) + .then(assertResultEql({ map: null })) + .then(cleanup()) + }) + + it('returns index', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(orderByKey('map')) + .then(operate(maps.getByKey('map', 'a', maps.returnType.INDEX))) + .then(assertResultEql({ map: 0 })) + .then(cleanup()) + }) + + it('returns reverse index', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(orderByKey('map')) + .then(operate(maps.getByKey('map', 'a', maps.returnType.REVERSE_INDEX))) + .then(assertResultEql({ map: 2 })) + .then(cleanup()) + }) + + it('returns value order (rank)', function () { + return initState() + .then(createRecord({ map: { a: 3, b: 2, c: 1 } })) + .then(orderByKey('map')) + .then(operate(maps.getByKey('map', 'a', maps.returnType.RANK))) + .then(assertResultEql({ map: 2 })) + .then(cleanup()) + }) + + it('returns reverse value order (reverse rank)', function () { + return initState() + .then(createRecord({ map: { a: 3, b: 2, c: 1 } })) + .then(orderByKey('map')) + .then(operate(maps.getByKey('map', 'a', maps.returnType.REVERSE_RANK))) + .then(assertResultEql({ map: 0 })) + .then(cleanup()) + }) + + it('returns count of items selected', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(orderByKey('map')) + .then(operate(maps.getByKeyRange('map', 'a', 'c', maps.returnType.COUNT))) + .then(assertResultEql({ map: 2 })) + .then(cleanup()) + }) + + it('returns key for a single read', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(orderByKey('map')) + .then(operate(maps.getByIndex('map', 0, maps.returnType.KEY))) + .then(assertResultEql({ map: 'a' })) + .then(cleanup()) + }) + + it('returns keys for range read', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(orderByKey('map')) + .then(operate(maps.getByIndexRange('map', 0, 2, maps.returnType.KEY))) + .then(assertResultEql({ map: ['a', 'b'] })) + .then(cleanup()) + }) + + it('returns value for a single read', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(orderByKey('map')) + .then(operate(maps.getByIndex('map', 0, maps.returnType.VALUE))) + .then(assertResultEql({ map: 1 })) + .then(cleanup()) + }) + + it('returns values for range read', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(orderByKey('map')) + .then(operate(maps.getByIndexRange('map', 0, 2, maps.returnType.VALUE))) + .then(assertResultEql({ map: [1, 2] })) + .then(cleanup()) + }) + + it('returns key/value for a single read', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(orderByKey('map')) + .then(operate(maps.getByIndex('map', 0, maps.returnType.KEY_VALUE))) + .then(assertResultEql({ map: ['a', 1] })) + .then(cleanup()) + }) + + it('returns key/value for a range read', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(orderByKey('map')) + .then(operate(maps.getByIndexRange('map', 0, 2, maps.returnType.KEY_VALUE))) + .then(assertResultEql({ map: ['a', 1, 'b', 2] })) + .then(cleanup()) + }) + + context('returnType.EXISTS', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('returns true or false for a single key read', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(orderByKey('map')) + .then(operate(maps.getByKeyList('map', ['a', 'b', 'd'], maps.returnType.EXISTS))) + .then(assertResultEql({ map: true })) + .then(cleanup()) + }) + + it('returns true if any values exist', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(orderByKey('map')) + .then(operate(maps.getByValueList('map', [1, 2, 4], maps.returnType.EXISTS))) + .then(assertResultEql({ map: true })) + .then(cleanup()) + }) + }) + + context('returnType.ORDERED_MAP', function () { + helper.skipUnlessVersion('>= 6.3.0', this) + it('returns key/value for a single read', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(orderByKey('map')) + .then(operate(maps.getByIndex('map', 0, maps.returnType.ORDERED_MAP))) + .then(assertResultEql({ map: { a: 1 } })) + .then(cleanup()) + }) + + it('returns key/value for a range read', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(orderByKey('map')) + .then(operate(maps.getByIndexRange('map', 0, 2, maps.returnType.ORDERED_MAP))) + .then(assertResultEql({ map: { a: 1, b: 2 } })) + .then(cleanup()) + }) + }) + + context('returnType.UNORDERED_MAP', function () { + helper.skipUnlessVersion('>= 6.3.0', this) + it('returns key/value for a single read', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(orderByKey('map')) + .then(operate(maps.getByIndex('map', 0, maps.returnType.UNORDERED_MAP))) + .then(assertResultEql({ map: { a: 1 } })) + .then(cleanup()) + }) + + it('returns key/value for a range read', function () { + return initState() + .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) + .then(orderByKey('map')) + .then(operate(maps.getByIndexRange('map', 0, 2, maps.returnType.UNORDERED_MAP))) + .then(assertResultEql({ map: { a: 1, b: 2 } })) + .then(cleanup()) + }) + }) + }) +}) diff --git a/ts-test/tests/operate.ts b/ts-test/tests/operate.ts new file mode 100644 index 000000000..4729ec9a4 --- /dev/null +++ b/ts-test/tests/operate.ts @@ -0,0 +1,478 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* eslint-env mocha */ +/* global expect */ +/* eslint-disable no-unused-expressions */ + +import Aerospike, { GeoJSON as GeoJSONType, Client as Cli, KeyOptions, AerospikeBins, AerospikeRecord, AerospikeError as ASError, Double as Doub, GeoJSON as GJ, status as statusModule, operations, RecordMetadata, WritePolicy, OperatePolicy, Key} from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +const Double: typeof Doub = Aerospike.Double +const GeoJSON: typeof GJ = Aerospike.GeoJSON + +const keygen: any = helper.keygen + +const status: typeof statusModule = Aerospike.status +const AerospikeError: typeof ASError = Aerospike.AerospikeError +const op: typeof operations = Aerospike.operations + +context('Operations', function () { + const client: Cli = helper.client + let key: KeyOptions; + + beforeEach(() => { + key = keygen.string(helper.namespace, helper.set, { prefix: 'test/operate' })() + const bins: AerospikeBins = { + string: 'abc', + int: 123, + double1: 1.23, + double2: new Double(1.0), + geo: new GeoJSON({ type: 'Point', coordinates: [103.913, 1.308] }), + blob: Buffer.from('foo'), + list: [1, 2, 3], + map: { a: 1, b: 2, c: 3 } + } + const policy: WritePolicy = new Aerospike.WritePolicy({ + exists: Aerospike.policy.exists.CREATE_OR_REPLACE + }) + const meta: RecordMetadata = { ttl: 60 } + return client.put(key, bins, meta, policy) + }) + + afterEach(() => + client.remove(key) + .catch((error: any) => expect(error).to.be.instanceof(AerospikeError).with.property('code', status.ERR_RECORD_NOT_FOUND)) + ) + + describe('Client#operate()', function () { + describe('operations.write()', function () { + it('writes a new value to a bin', function () { + const ops: operations.Operation[] = [ + op.write('string', 'def'), + op.write('int', 432), + op.write('double1', 2.34), + op.write('double2', new Double(2.0)), + op.write('geo', new GeoJSON({ type: 'Point', coordinates: [123.456, 1.308] })), + op.write('blob', Buffer.from('bar')), + op.write('list', [2, 3, 4]), + op.write('map', { d: 4, e: 5, f: 6 }), + op.write('boolean', true) + ] + + return client.operate(key, ops) + .then(() => client.get(key)) + .then((record: AerospikeRecord) => { + expect(record.bins.string).to.equal('def') + expect(record.bins.int).to.equal(432) + expect(record.bins.double1).to.equal(2.34) + expect(record.bins.double2).to.equal(2.0) + expect(new GeoJSON(record.bins.geo as GJ).toJSON?.()).to.eql( + { type: 'Point', coordinates: [123.456, 1.308] } + ) + expect(record.bins.blob).to.eql(Buffer.from('bar')) + expect(record.bins.list).to.eql([2, 3, 4]) + expect(record.bins.map).to.eql({ d: 4, e: 5, f: 6 }) + expect(record.bins.boolean).to.eql(true) + }) + }) + + it('deletes a bin by writing null to it', function () { + const ops: operations.Operation[] = [ + op.write('string', null) + ] + + return client.operate(key, ops) + .then(() => client.get(key)) + .then((record: AerospikeRecord) => { + expect(record.bins).to.not.have.key('string') + }) + }) + }) + + describe('operations.add()', function () { + it('adds an integer value to a bin', function () { + const ops: operations.Operation[] = [ + op.add('int', 432) + ] + + return client.operate(key, ops) + .then(() => client.get(key)) + .then((record: AerospikeRecord) => { + expect(record.bins.int).to.equal(555) + }) + }) + + it('adds a double value to a bin', function () { + const ops: operations.Operation[] = [ + op.add('double1', 3.45), + op.add('double2', new Double(3.14159)) + ] + + return client.operate(key, ops) + .then(() => client.get(key)) + .then((record: AerospikeRecord) => { + expect(record.bins.double1).to.equal(4.68) + expect(record.bins.double2).to.equal(4.14159) + }) + }) + + it('can be called using the "incr" alias', function () { + const ops: operations.Operation[] = [ + op.incr('int', 432) + ] + + return client.operate(key, ops) + .then(() => client.get(key)) + .then((record: AerospikeRecord) => { + expect(record.bins.int).to.equal(555) + }) + }) + /* + it('returns a parameter error when trying to add a string value', function () { + const ops = [ + op.add('int', 'abc') + ] + + return client.operate(key, ops) + .catch(error => expect(error).to.be.instanceof(AerospikeError).with.property('code', status.ERR_PARAM)) + }) + */ + }) + + describe('operations.append()', function () { + it('appends a string value to a string bin', function () { + const ops: operations.Operation[] = [ + op.append('string', 'def') + ] + + return client.operate(key, ops) + .then(() => client.get(key)) + .then((record: AerospikeRecord) => { + expect(record.bins.string).to.equal('abcdef') + }) + }) + + /* + it('returns a parameter error when trying to append a numeric value', function () { + const ops = [ + op.append('string', 123) + ] + + return client.operate(key, ops) + .catch(error => expect(error).to.be.instanceof(AerospikeError).with.property('code', status.ERR_PARAM)) + }) + */ + }) + + describe('operations.prepend()', function () { + it('prepends a string value to a string bin', function () { + const ops: operations.Operation[] = [ + op.prepend('string', 'def') + ] + + return client.operate(key, ops) + .then(() => client.get(key)) + .then((record: AerospikeRecord) => { + expect(record.bins.string).to.equal('defabc') + }) + }) + /* + it('returns a parameter error when trying to prepend a numeric value', function () { + const ops = [ + op.prepend('string', 123) + ] + + return client.operate(key, ops) + .catch(error => expect(error).to.be.instanceof(AerospikeError).with.property('code', status.ERR_PARAM)) + }) + */ + }) + + describe('operations.touch()', function () { + // TEST LOGIC + // 1. Write a record to an aerospike server. + // 2. Read the record to get the TTL and calculate the difference in + // the TTL written and the TTL returned by server. + // 3. Touch the record with a defined TTL. + // 4. Read the record and calculate the difference in the TTL between the + // touch TTL value and read TTL value. + // 5. Compare the difference with the earlier difference observed. + // 6. This is to account for the clock asynchronicity between the + // client and the server machines. + // 7. Server returns the timestamp at which the record expires + // according the server clock. + // 8. The client calculates and returns the TTL based on the returned + // timestamp. In case the client and server clocks are not in sync, + // the calculated TTL may seem to be inaccurate. Nevertheless, the + // server will expire the record at the correct time. + it('updates the record\'s time-to-live (TTL)', async function () { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/operate/ttl' })() + const bins: AerospikeBins = { i: 123, s: 'abc' } + const meta: RecordMetadata = { ttl: 1000 } + + await client.put(key, bins, meta) + + let record: AerospikeRecord = await client.get(key) + const ttlDiff: number = record.ttl - meta.ttl!; + + const ops: operations.Operation[] = [ + op.touch(2592000) // 30 days + ] + await client.operate(key, ops) + + record = await client.get(key) + expect(record.ttl).to.be.above(2592000 + ttlDiff - 10) + expect(record.ttl).to.be.below(2592000 + ttlDiff + 10) + + await client.remove(key) + }) + }) + + describe('operations.delete()', function () { + helper.skipUnlessVersion('>= 4.7.0', this) + + it('deletes the record', function () { + const ops: operations.Operation[] = [op.delete()] + return client.operate(key, ops) + .then(() => client.exists(key)) + .then((exists: boolean) => expect(exists).to.be.false) + }) + + it('performs an atomic read-and-delete', function () { + const ops: operations.Operation[] = [ + op.read('string'), + op.delete() + ] + return client.operate(key, ops) + .then((result: AerospikeRecord) => expect(result.bins.string).to.eq('abc')) + .then(() => client.exists(key)) + .then((exists: boolean) => expect(exists).to.be.false) + }) + }) + + context('with OperatePolicy', function () { + context('exists policy', function () { + context('policy.exists.UPDATE', function () { + const policy: OperatePolicy = new Aerospike.policy.OperatePolicy({ + exists: Aerospike.policy.exists.UPDATE + }) + + it('does not create a key that does not exist yet', function () { + const notExistentKey = keygen.string(helper.namespace, helper.set, { prefix: 'test/operate/doesNotExist' })() + const ops = [op.write('i', 49)] + + return client.operate(notExistentKey, ops, {}, policy) + .then(() => 'error expected') + .catch((error: any) => expect(error).to.be.instanceof(AerospikeError).with.property('code', status.ERR_RECORD_NOT_FOUND)) + .then(() => client.exists(notExistentKey)) + .then((exists: boolean) => expect(exists).to.be.false) + }) + }) + }) + + context('readTouchTtlPercent policy', function () { + helper.skipUnlessVersion('>= 7.1.0', this) + + this.timeout(4000) + it('100% touches record', async function () { + const ops: operations.Operation[] = [op.read('i')] + const policy: OperatePolicy = new Aerospike.OperatePolicy({ + readTouchTtlPercent: 100 + }) + + await client.put(new Aerospike.Key('test', 'demo', 'operateTtl1'), { i: 2 }, { ttl: 10 }) + await new Promise(resolve => setTimeout(resolve, 3000)) + + let record: AerospikeRecord = await client.operate(new Aerospike.Key('test', 'demo', 'operateTtl1'), ops, null, policy) + expect(record.bins).to.eql({ i: 2 }) + expect(record.ttl).to.be.within(7, 8) + + record = await client.get(new Aerospike.Key('test', 'demo', 'operateTtl1'), policy) + expect(record.bins).to.eql({ i: 2 }) + expect(record.ttl).to.be.within(9, 10) + + await client.remove(new Aerospike.Key('test', 'demo', 'operateTtl1')) + }) + + it('71% touches record', async function () { + const ops = [op.read('i')] + const policy: OperatePolicy = new Aerospike.OperatePolicy({ + readTouchTtlPercent: 71 + }) + + await client.put(new Aerospike.Key('test', 'demo', 'operateTtl1'), { i: 2 }, { ttl: 10 }) + await new Promise(resolve => setTimeout(resolve, 3000)) + + let record: AerospikeRecord = await client.operate(new Aerospike.Key('test', 'demo', 'operateTtl1'), ops, null, policy) + expect(record.bins).to.eql({ i: 2 }) + expect(record.ttl).to.be.within(7, 8) + + record = await client.get(new Aerospike.Key('test', 'demo', 'operateTtl1'), policy) + expect(record.bins).to.eql({ i: 2 }) + expect(record.ttl).to.be.within(9, 10) + + await client.remove(new Aerospike.Key('test', 'demo', 'operateTtl1')) + }) + + it('60% does not touch record', async function () { + const ops = [op.read('i')] + const policy: OperatePolicy = new Aerospike.OperatePolicy({ + readTouchTtlPercent: 60 + }) + + await client.put(new Aerospike.Key('test', 'demo', 'operateTtl1'), { i: 2 }, { ttl: 10 }) + await new Promise(resolve => setTimeout(resolve, 3000)) + + let record: AerospikeRecord = await client.operate(new Aerospike.Key('test', 'demo', 'operateTtl1'), ops, null, policy) + expect(record.bins).to.eql({ i: 2 }) + expect(record.ttl).to.be.within(7, 8) + + record = await client.get(new Aerospike.Key('test', 'demo', 'operateTtl1'), policy) + expect(record.bins).to.eql({ i: 2 }) + expect(record.ttl).to.be.within(7, 8) + + await client.remove(new Aerospike.Key('test', 'demo', 'operateTtl1')) + }) + + it('0% does not touch record', async function () { + const ops = [op.read('i')] + const policy: OperatePolicy = new Aerospike.OperatePolicy({ + readTouchTtlPercent: 0 + }) + + await client.put(new Aerospike.Key('test', 'demo', 'operateTtl1'), { i: 2 }, { ttl: 10 }) + await new Promise(resolve => setTimeout(resolve, 3000)) + + let record: AerospikeRecord = await client.operate(new Aerospike.Key('test', 'demo', 'operateTtl1'), ops, null, policy) + expect(record.bins).to.eql({ i: 2 }) + expect(record.ttl).to.be.within(7, 8) + + record = await client.get(new Aerospike.Key('test', 'demo', 'operateTtl1'), policy) + expect(record.bins).to.eql({ i: 2 }) + expect(record.ttl).to.be.within(7, 8) + + await client.remove(new Aerospike.Key('test', 'demo', 'operateTtl1')) + }) + }) + + context('gen policy', function () { + context('policy.gen.EQ', function () { + const policy: OperatePolicy = new Aerospike.OperatePolicy({ + gen: Aerospike.policy.gen.EQ + }) + + it('executes the operation if the generation matches', function () { + const ops: operations.Operation[] = [op.add('int', 7)] + const meta: RecordMetadata= { gen: 1 } + + return client.operate(key, ops, meta, policy) + .then(() => client.get(key)) + .then((record: AerospikeRecord) => expect(record.bins.int).to.equal(130)) + }) + + it('rejects the operation if the generation does not match', function () { + const ops = [op.add('int', 7)] + const meta = { gen: 99 } + + return client.operate(key, ops, meta, policy) + .then(() => 'error expected') + .catch((error: any) => { + expect(error).to.be.instanceof(AerospikeError) + .with.property('code', status.ERR_RECORD_GENERATION) + return Promise.resolve(true) + }) + .then(() => client.get(key)) + .then((record: AerospikeRecord) => expect(record.bins.int).to.equal(123)) + }) + }) + }) + + context('with deserialize: false', function () { + const policy: OperatePolicy = new Aerospike.OperatePolicy({ + deserialize: false + }) + + it('returns list and map bins as byte buffers', function () { + const ops: operations.Operation[] = [op.read('int'), op.read('list'), op.read('map')] + + return client.operate(key, ops, null, policy) + .then((record: AerospikeRecord) => { + expect(record.bins.int).to.equal(123) + expect(record.bins.list).to.eql(Buffer.from([0x93, 0x01, 0x02, 0x03])) + expect(record.bins.map).to.eql(Buffer.from([0x84, 0xc7, 0x00, 0x01, 0xc0, 0xa2, 0x03, 0x61, 0x01, 0xa2, 0x03, 0x62, 0x02, 0xa2, 0x03, 0x63, 0x03])) + }) + }) + }) + }) + + it('calls the callback function with the results of the operation', function (done) { + const ops: operations.Operation[] = [ + op.read('int') + ] + + client.operate(key, ops, (error?: ASError, result?: AerospikeRecord) => { + if (error) throw error + expect(result?.bins.int).to.equal(123) + done() + }) + }) + }) + + describe('Client#add', function () { + it('acts as a shortcut for the add operation', function () { + return client.add(key, { int: 234 }) + .then(() => client.get(key)) + .then((record: AerospikeRecord) => { + expect(record.bins.int).to.equal(357) + }) + }) + }) + + describe('Client#incr', function () { + it('acts as a shortcut for the add operation', function () { + return client.incr(key, { int: 234 }) + .then(() => client.get(key)) + .then((record: AerospikeRecord) => { + expect(record.bins.int).to.equal(357) + }) + }) + }) + + describe('Client#append', function () { + it('acts as a shortcut for the append operation', function () { + return client.append(key, { string: 'def' }) + .then(() => client.get(key)) + .then((record: AerospikeRecord) => { + expect(record.bins.string).to.equal('abcdef') + }) + }) + }) + + describe('Client#prepend', function () { + it('acts as a shortcut for the prepend operation', function () { + return client.prepend(key, { string: 'def' }) + .then(() => client.get(key)) + .then((record: AerospikeRecord) => { + expect(record.bins.string).to.equal('defabc') + }) + }) + }) +}) diff --git a/ts-test/tests/policy.ts b/ts-test/tests/policy.ts new file mode 100644 index 000000000..2b72f1d30 --- /dev/null +++ b/ts-test/tests/policy.ts @@ -0,0 +1,260 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* eslint-env mocha */ +/* global expect */ +/* eslint-disable no-unused-expressions */ + +import Aerospike, { BasePolicy as BP, ApplyPolicy, WritePolicy, ReadPolicy, BatchPolicy, InfoPolicy, RemovePolicy, OperatePolicy, ScanPolicy, QueryPolicy} from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +const BasePolicy: typeof BP = Aerospike.BasePolicy +require('./test_helper') + +context('Client Policies #noserver', function () { + describe('BasePolicy', function () { + describe('new BasePolicy', function () { + it('sets the policy values from a value object', function () { + const subject: BP = new BasePolicy({ + socketTimeout: 200, + totalTimeout: 0, + maxRetries: 2, + compress: true + }) + + expect(subject.socketTimeout).to.equal(200) + expect(subject.totalTimeout).to.equal(0) + expect(subject.maxRetries).to.equal(2) + expect(subject.compress).to.be.true + }) + }) + }) + + describe('ApplyPolicy', function () { + describe('new ApplyPolicy', function () { + it('sets the policy values from a value object', function () { + const subject: ApplyPolicy = new Aerospike.ApplyPolicy({ + socketTimeout: 1000, + totalTimeout: 2000, + maxRetries: 1, + key: Aerospike.policy.key.SEND, + commitLevel: 2, + ttl: 3600, + durableDelete: true + }) + + expect(subject.socketTimeout).to.equal(1000) + expect(subject.totalTimeout).to.equal(2000) + expect(subject.maxRetries).to.equal(1) + expect(subject.key).to.equal(Aerospike.policy.key.SEND) + expect(subject.commitLevel).to.equal(2) + expect(subject.ttl).to.equal(3600) + expect(subject.durableDelete).to.be.true + }) + }) + }) + + describe('WritePolicy', function () { + describe('new WritePolicy', function () { + it('sets the policy values from a value object', function () { + const subject: WritePolicy = new Aerospike.WritePolicy({ + socketTimeout: 1000, + totalTimeout: 2000, + maxRetries: 1, + compressionThreshold: 500, + key: Aerospike.policy.key.SEND, + gen: Aerospike.policy.gen.EQ, + exists: Aerospike.policy.exists.CREATE, + commitLevel: 2, + durableDelete: true + }) + + expect(subject.socketTimeout).to.equal(1000) + expect(subject.totalTimeout).to.equal(2000) + expect(subject.maxRetries).to.equal(1) + expect(subject.compressionThreshold).to.equal(500) + expect(subject.key).to.equal(Aerospike.policy.key.SEND) + expect(subject.gen).to.equal(Aerospike.policy.gen.EQ) + expect(subject.exists).to.equal(Aerospike.policy.exists.CREATE) + expect(subject.commitLevel).to.equal(2) + expect(subject.durableDelete).to.be.true + }) + }) + }) + + describe('ReadPolicy', function () { + describe('new ReadPolicy', function () { + it('sets the policy values from a value object', function () { + const subject: ReadPolicy = new Aerospike.ReadPolicy({ + socketTimeout: 1000, + totalTimeout: 2000, + maxRetries: 1, + key: Aerospike.policy.key.SEND, + replica: Aerospike.policy.replica.MASTER, + readModeAP: Aerospike.policy.readModeAP.ONE, + readModeSC: Aerospike.policy.readModeSC.SESSION + }) + + expect(subject.socketTimeout).to.equal(1000) + expect(subject.totalTimeout).to.equal(2000) + expect(subject.maxRetries).to.equal(1) + expect(subject.key).to.equal(Aerospike.policy.key.SEND) + expect(subject.replica).to.equal(Aerospike.policy.replica.MASTER) + expect(subject.readModeAP).to.equal(Aerospike.policy.readModeAP.ONE) + expect(subject.readModeSC).to.equal(Aerospike.policy.readModeSC.SESSION) + }) + }) + }) + + describe('BatchPolicy', function () { + describe('new BatchPolicy', function () { + it('sets the policy values from a value object', function () { + const subject: BatchPolicy = new Aerospike.BatchPolicy({ + socketTimeout: 1000, + totalTimeout: 2000, + maxRetries: 1, + readModeAP: Aerospike.policy.readModeAP.ONE, + readModeSC: Aerospike.policy.readModeSC.SESSION, + allowInline: false, + sendSetName: true + }) + + expect(subject.socketTimeout).to.equal(1000) + expect(subject.totalTimeout).to.equal(2000) + expect(subject.maxRetries).to.equal(1) + expect(subject.readModeAP).to.equal(Aerospike.policy.readModeAP.ONE) + expect(subject.readModeSC).to.equal(Aerospike.policy.readModeSC.SESSION) + expect(subject.allowInline).to.be.false + expect(subject.sendSetName).to.be.true + }) + }) + }) + + describe('InfoPolicy', function () { + describe('new InfoPolicy', function () { + it('sets the policy values from a value object', function () { + const subject: InfoPolicy = new Aerospike.InfoPolicy({ + timeout: 1000, + sendAsIs: true, + checkBounds: false + }) + + expect(subject.timeout).to.equal(1000) + expect(subject.sendAsIs).to.be.true + expect(subject.checkBounds).to.be.false + }) + }) + }) + + describe('RemovePolicy', function () { + describe('new RemovePolicy', function () { + it('sets the policy values from a value object', function () { + const subject: RemovePolicy = new Aerospike.RemovePolicy({ + socketTimeout: 1000, + totalTimeout: 2000, + maxRetries: 1, + generation: 1234, + key: Aerospike.policy.key.SEND, + gen: Aerospike.policy.gen.EQ, + commitLevel: 2, + durableDelete: true + }) + + expect(subject.socketTimeout).to.equal(1000) + expect(subject.totalTimeout).to.equal(2000) + expect(subject.maxRetries).to.equal(1) + expect(subject.generation).to.equal(1234) + expect(subject.key).to.equal(Aerospike.policy.key.SEND) + expect(subject.gen).to.equal(Aerospike.policy.gen.EQ) + expect(subject.commitLevel).to.equal(2) + expect(subject.durableDelete).to.be.true + }) + }) + }) + + describe('OperatePolicy', function () { + describe('new OperatePolicy', function () { + it('sets the policy values from a value object', function () { + const subject: OperatePolicy = new Aerospike.OperatePolicy({ + socketTimeout: 1000, + totalTimeout: 2000, + maxRetries: 1, + key: Aerospike.policy.key.SEND, + gen: Aerospike.policy.gen.EQ, + replica: Aerospike.policy.replica.MASTER, + readModeAP: Aerospike.policy.readModeAP.ONE, + readModeSC: Aerospike.policy.readModeSC.SESSION, + commitLevel: 2, + durableDelete: true + }) + expect(subject.socketTimeout).to.equal(1000) + expect(subject.totalTimeout).to.equal(2000) + expect(subject.maxRetries).to.equal(1) + expect(subject.key).to.equal(Aerospike.policy.key.SEND) + expect(subject.gen).to.equal(Aerospike.policy.gen.EQ) + expect(subject.replica).to.equal(Aerospike.policy.replica.MASTER) + expect(subject.readModeAP).to.equal(Aerospike.policy.readModeAP.ONE) + expect(subject.readModeSC).to.equal(Aerospike.policy.readModeSC.SESSION) + expect(subject.commitLevel).to.equal(2) + expect(subject.durableDelete).to.be.true + }) + }) + }) + + describe('ScanPolicy', function () { + describe('new ScanPolicy', function () { + it('sets the policy values from a value object', function () { + const subject: ScanPolicy = new Aerospike.ScanPolicy({ + socketTimeout: 1000, + totalTimeout: 2000, + maxRetries: 1, + durableDelete: true, + recordsPerSecond: 100 + }) + + expect(subject.socketTimeout).to.equal(1000) + expect(subject.totalTimeout).to.equal(2000) + expect(subject.maxRetries).to.equal(1) + expect(subject.durableDelete).to.be.true + expect(subject.recordsPerSecond).to.equal(100) + }) + }) + }) + + describe('QueryPolicy', function () { + describe('new QueryPolicy', function () { + it('sets the policy values from a value object', function () { + const subject: QueryPolicy = new Aerospike.QueryPolicy({ + socketTimeout: 1000, + totalTimeout: 2000, + maxRetries: 1, + failOnClusterChange: true, + infoTimeout: 5000 + }) + + expect(subject.socketTimeout).to.equal(1000) + expect(subject.totalTimeout).to.equal(2000) + expect(subject.maxRetries).to.equal(1) + expect(subject.failOnClusterChange).to.equal(true) + expect(subject.infoTimeout).to.equal(5000) + }) + }) + }) +}) diff --git a/ts-test/tests/put.ts b/ts-test/tests/put.ts new file mode 100644 index 000000000..8fa7f84dc --- /dev/null +++ b/ts-test/tests/put.ts @@ -0,0 +1,638 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* global expect, describe, it, context */ + +import Aerospike, { status as statusModule, AerospikeError as ASError, Double as Doub, GeoJSON as GJ, Client as Cli, RecordMetadata, AerospikeBins, AerospikeRecord, Key, WritePolicy, Bin} from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +const keygen: any = helper.keygen +const metagen: any = helper.metagen +const recgen: any = helper.recgen +const valgen: any = helper.valgen + +const status: typeof statusModule = Aerospike.status +const AerospikeError: typeof ASError = Aerospike.AerospikeError +const Double: typeof Doub = Aerospike.Double +const GeoJSON: typeof GJ = Aerospike.GeoJSON + + +describe('client.put()', function () { + const client: Cli = helper.client + + it('should write and validate records', function (done) { + const meta: RecordMetadata = { ttl: 1000 } + const putAndGet: Function = function (key: Key, bins: AerospikeBins, cb: Function) { + client.put(key, bins, meta, function (err?: ASError) { + if (err) throw err + client.get(key, function (err?: ASError, record?: AerospikeRecord) { + if (err) throw err + expect(bins).to.eql(record?.bins) + cb() + }) + }) + } + + const kgen: Function = keygen.string(helper.namespace, helper.set, { + prefix: 'test/put/putAndGet/', + random: false + }) + const rgen: Function = recgen.record({ i: valgen.integer(), s: valgen.string(), b: valgen.bytes() }) + const total: number = 50 + let count: number = 0 + + for (let i = 0; i < total; i++) { + putAndGet(kgen(), rgen(), function () { + count++ + if (count === total) { + done() + } + }) + } + }) + + context('records with various key types', function () { + it('should write a record w/ string key', function (done) { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/put/' })() + const record: AerospikeRecord = recgen.record({ i: valgen.integer(), s: valgen.string() })() + + client.put(key, record, function (err?: ASError) { + if (err) throw err + + client.remove(key, function (err?: ASError) { + if (err) throw err + done() + }) + }) + }) + + it('should write a record w/ integer key', function (done) { + const key: Key = keygen.integer(helper.namespace, helper.set)() + const record: AerospikeRecord = recgen.record({ i: valgen.integer(), s: valgen.string() })() + + client.put(key, record, function (err?: ASError) { + if (err) throw err + + client.remove(key, function (err?: ASError) { + if (err) throw err + done() + }) + }) + }) + + context('BigInt keys', function () { + + it('should write a record w/ BigInt key', async function () { + const key: Key = new Aerospike.Key(helper.namespace, helper.set, BigInt(2) ** BigInt(63) - BigInt(1)) + const record: AerospikeRecord = recgen.record({ i: valgen.integer(), s: valgen.string() })() + + await client.put(key, record) + const result = await client.get(key) + expect(result.bins).to.eql(record) + await client.remove(key) + }) + }) + + it('should write a record w/ byte array key', function (done) { + const key: Key = keygen.bytes(helper.namespace, helper.set)() + const record: AerospikeRecord = recgen.record({ i: valgen.integer(), s: valgen.string() })() + + client.put(key, record, function (err?: ASError) { + if (err) throw err + + client.remove(key, function (err?: ASError) { + if (err) throw err + done() + }) + }) + }) + }) + + context('bins with various data types', function () { + const meta: RecordMetadata = { ttl: 600 } + const policy: WritePolicy = new Aerospike.WritePolicy({ + exists: Aerospike.policy.exists.CREATE_OR_REPLACE + }) + + function putGetVerify (bins: AerospikeBins | Bin, expected: AerospikeBins, done: any) { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/put/' })() + client.put(key, bins, meta, policy, function (err?: ASError) { + if (err) throw err + client.get(key, function (err?: ASError, record?: AerospikeRecord) { + if (err) throw err + expect(record?.bins).to.eql(expected) + client.remove(key, done) + }) + }) + } + + it('writes bin with string values and reads it back', function (done) { + const record: AerospikeBins = { string: 'hello world' } + const expected: AerospikeBins = { string: 'hello world' } + putGetVerify(record, expected, done) + }) + + it('writes bin with integer values and reads it back', function (done) { + const record: AerospikeBins = { low: Number.MIN_SAFE_INTEGER, high: Number.MAX_SAFE_INTEGER } + const expected: AerospikeBins = { low: -9007199254740991, high: 9007199254740991 } + putGetVerify(record, expected, done) + }) + + it('writes bin with Buffer value and reads it back', function (done) { + const record: AerospikeBins = { buffer: Buffer.from([0x61, 0x65, 0x72, 0x6f, 0x73, 0x70, 0x69, 0x6b, 0x65]) } + const expected: AerospikeBins = { buffer: Buffer.from([0x61, 0x65, 0x72, 0x6f, 0x73, 0x70, 0x69, 0x6b, 0x65]) } + putGetVerify(record, expected, done) + }) + + it('writes bin with float value as double and reads it back', function (done) { + const record: AerospikeBins = { double: 3.141592653589793 } + const expected: AerospikeBins = { double: 3.141592653589793 } + putGetVerify(record, expected, done) + }) + + it('writes bin with Double value as double and reads it back', function (done) { + const record: AerospikeBins = { double: new Double(3.141592653589793) } + const expected: AerospikeBins = { double: 3.141592653589793 } + putGetVerify(record, expected, done) + }) + + it('writes bin with GeoJSON value and reads it back as string', function (done) { + const record: AerospikeBins = { geo: new GeoJSON.Point(103.8, 1.283) } + const expected: AerospikeBins = { geo: '{"type":"Point","coordinates":[103.8,1.283]}' } + putGetVerify(record, expected, done) + }) + + it('writes bin with array value as list and reads it back', function (done) { + const record: AerospikeBins = { + list: [ + 1, + 'foo', + 1.23, + new Double(3.14), + Buffer.from('bar'), + new GeoJSON.Point(103.8, 1.283), + [1, 2, 3], + { a: 1, b: 2 }, + false + ] + } + const expected: AerospikeBins = { + list: [ + 1, + 'foo', + 1.23, + 3.14, + Buffer.from('bar'), + '{"type":"Point","coordinates":[103.8,1.283]}', + [1, 2, 3], + { a: 1, b: 2 }, + false + ] + } + putGetVerify(record, expected, done) + }) + + it('writes bin with object value as map and reads it back', function (done) { + const record: AerospikeBins = { + map: { + a: 1, + b: 'foo', + c: 1.23, + d: new Double(3.14), + e: Buffer.from('bar'), + f: new GeoJSON.Point(103.8, 1.283), + g: [1, 2, 3], + h: { a: 1, b: 2 }, + i: true + } + } + const expected: AerospikeBins = { + map: { + a: 1, + b: 'foo', + c: 1.23, + d: 3.14, + e: Buffer.from('bar'), + f: '{"type":"Point","coordinates":[103.8,1.283]}', + g: [1, 2, 3], + h: { a: 1, b: 2 }, + i: true + } + } + putGetVerify(record, expected, done) + }) + + it('writes bin with Map value as map and reads it back as an ordered object', function (done) { + const record: AerospikeBins = { + map: new Map([['g', [1, 2, 3]], ['h', { a: 1, b: 2 }], ['j', new Map([['b', 'foo'], ['a', 1]])], + ['d', new Double(3.14)], ['e', Buffer.from('bar')], ['f', new GeoJSON.Point(103.8, 1.283)], + ['a', 1], ['b', 'foo'], ['c', 1.23]] + ) + } + const expected: AerospikeBins = { + map: { + a: 1, + b: 'foo', + c: 1.23, + d: 3.14, + e: Buffer.from('bar'), + f: '{"type":"Point","coordinates":[103.8,1.283]}', + g: [1, 2, 3], + h: { a: 1, b: 2 }, + j: { a: 1, b: 'foo' } + } + } + putGetVerify(record, expected, done) + }) + + it('writes bin with the Bin class and reads it back as an object', function (done) { + const record: Bin = new Aerospike.Bin('map', { + g: [1, 2, 3], + h: { a: 1, b: 2 }, + j: new Map([['b', 'foo'], ['a', 1]]), + e: Buffer.from('bar'), + f: '{"type":"Point","coordinates":[103.8,1.283]}', + a: 1, + b: 'foo', + c: 1.23, + d: 3.14 + }) + const expected: AerospikeBins = { + map: { + a: 1, + b: 'foo', + c: 1.23, + d: 3.14, + e: Buffer.from('bar'), + f: '{"type":"Point","coordinates":[103.8,1.283]}', + g: [1, 2, 3], + h: { a: 1, b: 2 }, + j: { a: 1, b: 'foo' } + } + } + putGetVerify(record, expected, done) + }) + + context('BigInt values', function () { + + it('writes bin with BigInt value and reads it back as a Number', function (done) { + const record: AerospikeBins = { bigint: BigInt(42) } + const expected: AerospikeBins = { bigint: 42 } + putGetVerify(record, expected, done) + }) + + it('writes bin with BigInt value outside safe Number range', function (done) { + const tooLargeForNumber: BigInt = BigInt(Number.MAX_SAFE_INTEGER) + BigInt(2) + const record: AerospikeBins = { bigint: tooLargeForNumber } + const expected: AerospikeBins = { bigint: tooLargeForNumber } + putGetVerify(record, expected, done) + }) + }) + + context('Boolean values', function () { + helper.skipUnlessVersion('>= 5.6.0', this) + + it('writes bin with boolean value and reads it back', function (done) { + const record: AerospikeBins = { bool: true, bool2: false } + const expected: AerospikeBins = { bool: true, bool2: false } + putGetVerify(record, expected, done) + }) + }) + + context('invalid bin values', function () { + it('should fail with a parameter error when trying to write an undefined bin value', function (done) { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/put/' })() + const record: AerospikeBins = { valid: 123, invalid: undefined } + + client.put(key, record, function (err?: ASError) { + expect(err?.code).to.equal(status.ERR_PARAM) + + client.remove(key, function (err?: ASError) { + expect(err?.code).to.equal(status.ERR_RECORD_NOT_FOUND) + done() + }) + }) + }) + }) + }) + + context('bin names', function () { + helper.skipUnlessVersion('>= 4.2.0', this) + + it('should write a bin with a name of max. length 15', function () { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/put/' })() + const bins: any = { 'bin-name-len-15': 'bin name with 15 chars' } + + return client.put(key, bins) + .then(() => client.get(key)) + .then((record: AerospikeRecord) => { + expect(record.bins).to.eql({ + 'bin-name-len-15': 'bin name with 15 chars' + }) + }).then(() => client.remove(key)) + }) + + it('should return a parameter error when bin length exceeds 15 chars', function () { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/put/' })() + const bins: AerospikeBins = { 'bin-name-size-16': 'bin name with 16 chars' } + + return client.put(key, bins) + .then(() => 'no error') + .catch((error: any) => error) + .then((error: any) => { + expect(error).to.be.instanceof(AerospikeError) + .that.has.property('code', Aerospike.status.ERR_REQUEST_INVALID) + }) + }) + }) + + it('should delete a bin when writing null to it', async function () { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/put/' })() + const record: AerospikeBins = { bin1: 123, bin2: 456 } + await client.put(key, record) + + const update: AerospikeBins = { bin1: null } + await client.put(key, update) + + const result: AerospikeRecord = await client.get(key) + const expected: AerospikeBins = { bin2: 456 } + expect(result.bins).to.eql(expected) + await client.remove(key) + }) + + it('should write, read, write, and check gen', function (done) { + const kgen: Function = keygen.string(helper.namespace, helper.set, { prefix: 'test/put/' }) + const mgen: Function = metagen.constant({ ttl: 1000 }) + const rgen: Function = recgen.record({ i: valgen.integer(), s: valgen.string() }) + + const key: Key = kgen() + const meta: RecordMetadata = mgen(key) + const bins: AerospikeBins = rgen(key, meta) + + // write the record then check + client.put(key, bins, meta, function (err?: ASError, key1?: Key) { + if (err) throw err + expect(key1!).to.eql(key) + + client.get(key1!, function (err?: ASError, record2?: AerospikeRecord) { + if (err) throw err + expect(record2?.key).to.eql(key) + expect(record2?.bins).to.eql(bins) + + record2!.bins.i = (record2!.bins.i as number) + 1; + + client.put(record2?.key!, record2?.bins!, meta, function (err?: ASError, key3?: Key) { + if (err) throw err + expect(key3).to.eql(key) + + client.get(key3!, function (err?: ASError, record4?: AerospikeRecord) { + if (err) throw err + expect(record4?.key).to.eql(key) + expect(record4?.bins).to.eql(record2?.bins) + expect(record4?.gen!).to.equal(record2?.gen! + 1) + + client.remove(key, function (err?: ASError) { + if (err) throw err + done() + }) + }) + }) + }) + }) + }) + + it('should write, read, remove, read, write, and check gen', function (done) { + const kgen = keygen.string(helper.namespace, helper.set, { prefix: 'test/put/' }) + const mgen = metagen.constant({ ttl: 1000 }) + const rgen = recgen.record({ i: valgen.integer(), s: valgen.string() }) + + const key = kgen() + const meta = mgen(key) + const bins = rgen(key, meta) + + // write the record then check + client.put(key, bins, meta, function (err?: ASError, key1?: Key) { + if (err) throw err + expect(key1!).to.eql(key) + + client.get(key1!, function (err?: ASError, record2?: AerospikeRecord) { + if (err) throw err + expect(record2?.key).to.eql(key) + expect(record2?.bins).to.eql(bins) + + client.remove(record2?.key!, function (err?: ASError, key3?: Key) { + if (err) throw err + expect(key3).to.eql(key) + + client.get(key3!, function (err?: ASError, record4?: AerospikeRecord) { + expect(err?.code).to.eql(status.ERR_RECORD_NOT_FOUND) + + client.put(record4?.key!, bins, meta, function (err?: ASError, key5?: Key) { + if (err) throw err + expect(key5!).to.eql(key) + + client.get(key5!, function (err?: ASError, record6?: AerospikeRecord) { + if (err) throw err + expect(record6?.key).to.eql(key) + expect(record6?.bins).to.eql(bins) + expect(record6?.gen).to.eql(1) + + client.remove(record6?.key!, function (err?: ASError) { + if (err) throw err + done() + }) + }) + }) + }) + }) + }) + }) + }) + /* + it('should fail with a parameter error if gen is invalid', function (done) { + const key = keygen.string(helper.namespace, helper.set, { prefix: 'test/put/' })() + const bins = recgen.record({ i: valgen.integer(), s: valgen.string() })() + const meta = { + gen: 'generation1' + } + + client.put(key, bins, meta, (error: any) => { + expect(error).to.be.instanceof(AerospikeError).with.property('code', status.ERR_PARAM) + done() + }) + }) + */ + /* + it('should fail with a parameter error if ttl is invalid', function (done) { + const key = keygen.string(helper.namespace, helper.set, { prefix: 'test/put/' })() + const bins = recgen.record({ i: valgen.integer(), s: valgen.string() })() + const meta = { + ttl: 'time-to-live' + } + + client.put(key, bins, meta, (error: any) => { + expect(error).to.be.instanceof(AerospikeError).with.property('code', status.ERR_PARAM) + done() + }) + }) + */ + + it('should write null for bins with empty list and map', function (done) { + // generators + const kgen = keygen.string(helper.namespace, helper.set, { prefix: 'test/put/' }) + const mgen = metagen.constant({ ttl: 1000 }) + const rgen = recgen.record({ + l: valgen.constant([1, 2, 3]), + le: valgen.constant([]), + m: valgen.constant({ a: 1, b: 2 }), + me: valgen.constant({}) + }) + + // values + const key = kgen() + const meta = mgen(key) + const bins = rgen(key, meta) + + // write the record then check + client.put(key, bins, meta, function (err?: ASError, key1?: Key) { + if (err) throw err + expect(key1!).to.eql(key) + + client.get(key1!, function (err?: ASError, record2?: AerospikeRecord) { + if (err) throw err + expect(record2?.key).to.eql(key) + expect(record2?.bins).to.eql(bins) + + client.remove(key, function (err?: ASError) { + if (err) throw err + done() + }) + }) + }) + }) + + it('should write a key without set name', function (done) { + const noSet: null = null + const key: Key = keygen.string(helper.namespace, noSet, { prefix: 'test/put/' })() + const record: AerospikeBins = { bin1: 123, bin2: 456 } + + client.put(key, record, function (err?: ASError) { + if (err) throw err + + client.remove(key, function (err?: ASError) { + if (err) throw err + done() + }) + }) + }) + + it('should write a map with undefined entry and verify the record', function (done) { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/put/' })() + const record: AerospikeBins = { + list: [1, 2, 3, undefined], + map: { a: 1, b: 2, c: undefined } + } + + client.put(key, record, function (err?: ASError) { + if (err) throw err + + client.get(key, function (err?: ASError, record?: AerospikeRecord) { + if (err) throw err + expect(record?.bins.map).to.eql({ a: 1, b: 2, c: null }) + expect(record?.bins.list).to.eql([1, 2, 3, null]) + + client.remove(key, function (err?: ASError) { + if (err) throw err + done() + }) + }) + }) + }) + + context('exists policy', function () { + context('policy.exists.UPDATE', function () { + it('does not create a key that does not exist yet', function () { + const key: Key = keygen.integer(helper.namespace, helper.set)() + const policy: WritePolicy = new Aerospike.policy.WritePolicy({ + exists: Aerospike.policy.exists.UPDATE + }) + + return client.put(key, { i: 49 }, {}, policy) + .catch((error: any) => expect(error).to.be.instanceof(AerospikeError).with.property('code', status.ERR_RECORD_NOT_FOUND)) + .then(() => client.exists(key)) + .then((exists: any) => expect(exists).to.be.false) + }) + }) + + context('policy.exists.CREATE', function () { + it('does not update a record if it already exists', function () { + const key: any = keygen.integer(helper.namespace, helper.set)() + const policy: any = new Aerospike.policy.WritePolicy({ + exists: Aerospike.policy.exists.CREATE + }) + + return client.put(key, { i: 49 }, {}, policy) + .then(() => client.put(key, { i: 50 }, {}, policy)) + .catch((error: any) => expect(error).to.be.instanceof(AerospikeError).with.property('code', status.ERR_RECORD_EXISTS)) + .then(() => client.get(key)) + .then((record: AerospikeRecord) => expect(record.bins.i).to.equal(49)) + }) + }) + }) + + context('gen policy', function () { + it('updates record if generation matches', function () { + const key = keygen.integer(helper.namespace, helper.set)() + const policy = new Aerospike.WritePolicy({ + gen: Aerospike.policy.gen.EQ + }) + + return client.put(key, { i: 1 }) + .then(() => client.get(key)) + .then((record: AerospikeRecord) => expect(record.gen).to.equal(1)) + .then(() => client.put(key, { i: 2 }, { gen: 1 }, policy)) + .then(() => client.get(key)) + .then((record: AerospikeRecord) => { + expect(record.bins).to.eql({ i: 2 }) + expect(record.gen).to.equal(2) + }) + .then(() => client.remove(key)) + }) + + it('does not update record if generation does not match', function () { + const key = keygen.integer(helper.namespace, helper.set)() + const policy = new Aerospike.WritePolicy({ + gen: Aerospike.policy.gen.EQ + }) + + return client.put(key, { i: 1 }) + .then(() => client.get(key)) + .then((record: AerospikeRecord) => expect(record.gen).to.equal(1)) + .then(() => client.put(key, { i: 2 }, { gen: 99 }, policy)) + .catch((err: any) => expect(err.code).to.equal(status.ERR_RECORD_GENERATION)) + .then(() => client.get(key)) + .then((record: AerospikeRecord) => { + expect(record.bins).to.eql({ i: 1 }) + expect(record.gen).to.equal(1) + }) + .then(() => client.remove(key)) + }) + }) +}) diff --git a/ts-test/tests/query.ts b/ts-test/tests/query.ts new file mode 100644 index 000000000..655814fbc --- /dev/null +++ b/ts-test/tests/query.ts @@ -0,0 +1,1172 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* eslint-env mocha */ +/* global expect */ +/* eslint-disable no-unused-expressions */ + +import Aerospike, { Client, Query, Job as J, exp as expModule, cdt, AerospikeError as ASError, GeoJSON as GJ, GeoJSONType, RecordStream, Key as K, filter as filterModule, operations, indexDataType, indexType, QueryOptions, AerospikeRecord, AerospikeBins} from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +const query: typeof Query = Aerospike.Query +const Job: typeof J = Aerospike.Job + +const exp: typeof expModule = Aerospike.exp +const Context: typeof cdt.Context = Aerospike.cdt.Context + +const AerospikeError: typeof ASError = Aerospike.AerospikeError +const GeoJSON: typeof GJ= Aerospike.GeoJSON +const Key: typeof K = Aerospike.Key +const filter: typeof filterModule = Aerospike.filter +const op: typeof operations = Aerospike.operations + +const NUMERIC: indexDataType = Aerospike.indexDataType.NUMERIC +const STRING: indexDataType = Aerospike.indexDataType.STRING +const GEO2DSPHERE: indexDataType = Aerospike.indexDataType.GEO2DSPHERE +const BLOB: indexDataType = Aerospike.indexDataType.BLOB + +const LIST: indexType = Aerospike.indexType.LIST +const MAPVALUES: indexType = Aerospike.indexType.MAPVALUES +const MAPKEYS: indexType = Aerospike.indexType.MAPKEYS + +const keygen: any = helper.keygen +const metagen: any = helper.metagen +const putgen: any = helper.putgen +let samples: any; + +describe('Queries', function () { + const client: Client = helper.client + + const testSet = 'test/query-' + Math.floor(Math.random() * 100000) + samples = [ + { name: 'int match', i: 5 }, + { name: 'int non-match', i: 500 }, + { name: 'int list match', li: [1, 5, 9] }, + { name: 'int list non-match', li: [500, 501, 502] }, + { name: 'int map match', mi: { a: 1, b: 5, c: 9 } }, + { name: 'int map non-match', mi: { a: 500, b: 501, c: 502 } }, + { name: 'string match', s: 'banana' }, + { name: 'string non-match', s: 'tomato' }, + { name: 'string list match', ls: ['banana', 'blueberry'] }, + { name: 'string list non-match', ls: ['tomato', 'cuccumber'] }, + { name: 'string map match', ms: { a: 'banana', b: 'blueberry' } }, + { name: 'string map non-match', ms: { a: 'tomato', b: 'cuccumber' } }, + { name: 'string mapkeys match', mks: { banana: 1, blueberry: 2 } }, + { name: 'string mapkeys non-match', mks: { tomato: 3, cuccumber: 4 } }, + { name: 'point match', g: new GeoJSON.Point(103.913, 1.308) }, + { name: 'point non-match', g: new GeoJSON.Point(-122.101, 37.421) }, + { name: 'point list match', lg: [new GeoJSON.Point(103.913, 1.308), new GeoJSON.Point(105.913, 3.308)] }, + { name: 'point list non-match', lg: [new GeoJSON.Point(-122.101, 37.421), new GeoJSON.Point(-120.101, 39.421)] }, + { name: 'point map match', mg: { a: new GeoJSON.Point(103.913, 1.308), b: new GeoJSON.Point(105.913, 3.308) } }, + { name: 'point map non-match', mg: { a: new GeoJSON.Point(-122.101, 37.421), b: new GeoJSON.Point(-120.101, 39.421) } }, + { name: 'region match', g: new GeoJSON.Polygon([102.913, 0.308], [102.913, 2.308], [104.913, 2.308], [104.913, 0.308], [102.913, 0.308]) }, + { name: 'region non-match', g: new GeoJSON.Polygon([-121.101, 36.421], [-121.101, 38.421], [-123.101, 38.421], [-123.101, 36.421], [-121.101, 36.421]) }, + { name: 'region list match', lg: [new GeoJSON.Polygon([102.913, 0.308], [102.913, 2.308], [104.913, 2.308], [104.913, 0.308], [102.913, 0.308])] }, + { name: 'region list non-match', lg: [new GeoJSON.Polygon([-121.101, 36.421], [-121.101, 38.421], [-123.101, 38.421], [-123.101, 36.421], [-121.101, 36.421])] }, + { name: 'region map match', mg: { a: new GeoJSON.Polygon([102.913, 0.308], [102.913, 2.308], [104.913, 2.308], [104.913, 0.308], [102.913, 0.308]) } }, + { name: 'region map non-match', mg: [new GeoJSON.Polygon([-121.101, 36.421], [-121.101, 38.421], [-123.101, 38.421], [-123.101, 36.421], [-121.101, 36.421])] }, + { name: 'aggregate', value: 10 }, + { name: 'aggregate', value: 20 }, + { name: 'aggregate', value: 30 }, + { name: 'filter', value: 1 }, + { name: 'filter', value: 2 }, + { name: 'filter', value: 3 }, + { name: 'filter', value: 4 } + ] + + const indexes: any = [ + ['qidxName', 'name', STRING], + ['qidxInt', 'i', NUMERIC], + ['qidxIntList', 'li', NUMERIC, LIST], + ['qidxIntMap', 'mi', NUMERIC, MAPVALUES], + ['qidxStr', 's', STRING], + ['qidxStrList', 'ls', STRING, LIST], + ['qidxStrMap', 'ms', STRING, MAPVALUES], + ['qidxStrMapKeys', 'mks', STRING, MAPKEYS], + ['qidxGeo', 'g', GEO2DSPHERE], + ['qidxGeoList', 'lg', GEO2DSPHERE, LIST], + ['qidxGeoMap', 'mg', GEO2DSPHERE, MAPVALUES] + + ] + + let keys: any = [] + + function verifyQueryResults (queryOptions: QueryOptions, matchName: string, done: any) { + const query: Query = client.query(helper.namespace, testSet, queryOptions) + let matches = 0 + const stream: RecordStream = query.foreach() + stream.on('error', (error: ASError) => { throw error }) + stream.on('data', (record: AerospikeRecord) => { + expect(record.bins).to.have.property('name', matchName) + matches++ + }) + stream.on('end', function () { + expect(matches).to.equal(1) + done() + }) + } + + before(() => { + const generators: any = { + keygen: keygen.string(helper.namespace, testSet, { prefix: 'test/query/', random: false }), + recgen: () => samples.pop(), + metagen: metagen.constant({ ttl: 300 }) + } + + if (helper.cluster.isVersionInRange('>= 7.0.0')) { + samples.push({ name: 'blob match', blob: Buffer.from('guava') }) + samples.push({ name: 'blob non-match', blob: Buffer.from('pumpkin') }) + samples.push({ name: 'blob list match', lblob: [Buffer.from('guava'), Buffer.from('papaya')] }) + samples.push({ name: 'blob list non-match', lblob: [Buffer.from('pumpkin'), Buffer.from('turnip')] }) + samples.push({ name: 'blob map match', mblob: { a: Buffer.from('guava'), b: Buffer.from('papaya') } }) + samples.push({ name: 'blob map non-match', mblob: { a: Buffer.from('pumpkin'), b: Buffer.from('turnip') } }) + samples.push({ name: 'blob mapkeys match', mkblob: new Map([[Buffer.from('guava'), 1], [Buffer.from('papaya'), 2]]) }) + samples.push({ name: 'blob mapkeys non-match', mkblob: new Map([[Buffer.from('pumpkin'), 3], [Buffer.from('turnip'), 4]]) }) + samples.push({ name: 'nested blob match', blob: { nested: Buffer.from('guava') } }) + samples.push({ name: 'nested blob non-match', blob: { nested: Buffer.from('pumpkin') } }) + samples.push({ name: 'nested blob list match', lblob: { nested: [Buffer.from('guava'), Buffer.from('papaya')] } }) + samples.push({ name: 'nested blob list non-match', lblob: { nested: [Buffer.from('pumpkin'), Buffer.from('turnip')] } }) + samples.push({ name: 'nested blob map match', mblob: { nested: { a: Buffer.from('guava'), b: Buffer.from('papaya') } } }) + samples.push({ name: 'nested blob map non-match', mblob: { nested: { a: Buffer.from('pumpkin'), b: Buffer.from('turnip') } } }) + samples.push({ name: 'nested blob mapkeys match', mkblob: { nested: new Map([[Buffer.from('guava'), 1], [Buffer.from('papaya'), 2]]) } }) + samples.push({ name: 'nested blob mapkeys non-match', mkblob: { nested: new Map([[Buffer.from('pumpkin'), 3], [Buffer.from('turnip'), 4]]) } }) + + indexes.push(['qidxBlob', 'blob', BLOB]) + indexes.push(['qidxBlobList', 'lblob', BLOB, LIST]) + indexes.push(['qidxBlobMap', 'mblob', BLOB, MAPVALUES]) + indexes.push(['qidxBlobMapKeys', 'mkblob', BLOB, MAPKEYS]) + indexes.push(['qidxBlobListNested', 'lblob', BLOB, LIST, new Context().addMapKey('nested')]) + indexes.push(['qidxBlobMapNested', 'mblob', BLOB, MAPVALUES, new Context().addMapKey('nested')]) + indexes.push(['qidxBlobMapKeysNested', 'mkblob', BLOB, MAPKEYS, new Context().addMapKey('nested')]) + } + + if (helper.cluster.isVersionInRange('>= 6.1.0')) { + samples.push({ name: 'nested int list match', li: { nested: [1, 5, 9] } }) + samples.push({ name: 'nested int list non-match', li: { nested: [500, 501, 502] } }) + samples.push({ name: 'nested int map match', mi: { nested: { a: 1, b: 5, c: 9 } } }) + samples.push({ name: 'nested int map non-match', mi: { nested: { a: 500, b: 501, c: 502 } } }) + samples.push({ name: 'nested string list match', ls: { nested: ['banana', 'blueberry'] } }) + samples.push({ name: 'nested string list non-match', ls: { nested: ['tomato', 'cuccumber'] } }) + samples.push({ name: 'nested string map match', ms: { nested: { a: 'banana', b: 'blueberry' } } }) + samples.push({ name: 'nested string map non-match', ms: { nested: { a: 'tomato', b: 'cuccumber' } } }) + samples.push({ name: 'nested string mapkeys match', mks: { nested: { banana: 1, blueberry: 2 } } }) + samples.push({ name: 'nested string mapkeys non-match', mks: { nested: { tomato: 3, cuccumber: 4 } } }) + samples.push({ name: 'nested point match', g: { nested: new GeoJSON.Point(103.913, 1.308) } }) + samples.push({ name: 'nested point non-match', g: { nested: new GeoJSON.Point(-122.101, 37.421) } }) + samples.push({ name: 'nested point list match', lg: { nested: [new GeoJSON.Point(103.913, 1.308), new GeoJSON.Point(105.913, 3.308)] } }) + samples.push({ name: 'nested point list non-match', lg: { nested: [new GeoJSON.Point(-122.101, 37.421), new GeoJSON.Point(-120.101, 39.421)] } }) + samples.push({ name: 'nested point map match', mg: { nested: { a: new GeoJSON.Point(103.913, 1.308), b: new GeoJSON.Point(105.913, 3.308) } } }) + samples.push({ name: 'nested point map non-match', mg: { nested: { a: new GeoJSON.Point(-122.101, 37.421), b: new GeoJSON.Point(-120.101, 39.421) } } }) + samples.push({ name: 'nested region match', g: { nested: new GeoJSON.Polygon([102.913, 0.308], [102.913, 2.308], [104.913, 2.308], [104.913, 0.308], [102.913, 0.308]) } }) + samples.push({ name: 'nested region non-match', g: { nested: new GeoJSON.Polygon([-121.101, 36.421], [-121.101, 38.421], [-123.101, 38.421], [-123.101, 36.421], [-121.101, 36.421]) } }) + samples.push({ name: 'nested region list match', lg: { nested: [new GeoJSON.Polygon([102.913, 0.308], [102.913, 2.308], [104.913, 2.308], [104.913, 0.308], [102.913, 0.308])] } }) + samples.push({ name: 'nested region list non-match', lg: { nested: [new GeoJSON.Polygon([-121.101, 36.421], [-121.101, 38.421], [-123.101, 38.421], [-123.101, 36.421], [-121.101, 36.421])] } }) + samples.push({ name: 'nested region map match', mg: { nested: { a: new GeoJSON.Polygon([102.913, 0.308], [102.913, 2.308], [104.913, 2.308], [104.913, 0.308], [102.913, 0.308]) } } }) + samples.push({ name: 'nested region map non-match', mg: { nested: [new GeoJSON.Polygon([-121.101, 36.421], [-121.101, 38.421], [-123.101, 38.421], [-123.101, 36.421], [-121.101, 36.421])] } }) + samples.push({ name: 'nested aggregate', nested: { value: 10 } }) + samples.push({ name: 'nested aggregate', nested: { value: 20 } }) + samples.push({ name: 'nested aggregate', nested: { value: 30 } }) + samples.push({ name: 'nested aggregate', nested: { doubleNested: { value: 10 } } }) + samples.push({ name: 'nested aggregate', nested: { doubleNested: { value: 20 } } }) + samples.push({ name: 'nested aggregate', nested: { doubleNested: { value: 30 } } }) + + indexes.push(['qidxNameNested', 'name', STRING, MAPKEYS, new Context().addMapKey('nested')]) + indexes.push(['qidxIntListNested', 'li', NUMERIC, LIST, new Context().addMapKey('nested')]) + indexes.push(['qidxIntMapNested', 'mi', NUMERIC, MAPVALUES, new Context().addMapKey('nested')]) + indexes.push(['qidxStrListNested', 'ls', STRING, LIST, new Context().addMapKey('nested')]) + indexes.push(['qidxStrMapNested', 'ms', STRING, MAPVALUES, new Context().addMapKey('nested')]) + indexes.push(['qidxStrMapKeysNested', 'mks', STRING, MAPKEYS, new Context().addMapKey('nested')]) + indexes.push(['qidxGeoListNested', 'lg', GEO2DSPHERE, LIST, new Context().addMapKey('nested')]) + indexes.push(['qidxGeoMapNested', 'mg', GEO2DSPHERE, MAPVALUES, new Context().addMapKey('nested')]) + indexes.push(['qidxAggregateMapNested', 'nested', STRING, MAPKEYS]) + indexes.push(['qidxAggregateMapDoubleNested', 'nested', STRING, MAPKEYS, new Context().addMapKey('doubleNested')]) + } + + const numberOfSamples: any = samples.length + return Promise.all([ + putgen.put(numberOfSamples, generators) + .then((records: AerospikeRecord[]) => { keys = records.map((rec: AerospikeRecord) => rec.key) }) + .then(() => Promise.all(indexes.map((idx: Array) => + helper.index.create(idx[0], testSet, idx[1], idx[2], idx[3], idx[4])))), + helper.udf.register('udf.lua') + ]) + }) + + after(() => helper.udf.remove('udf.lua') + .then(() => Promise.all(indexes.map((idx: Array) => + helper.index.remove(idx[0]))))) + + describe('client.query()', function () { + it('creates a new Query instance and sets up it\'s properties', function () { + const namespace: string = helper.namespace + const set: string = 'demo' + const options: QueryOptions = { + select: ['a', 'b', 'c'], + nobins: false, + filters: [Aerospike.filter.equal('a', 9)] + } + const query: Query = client.query(namespace, set, options) + + expect(query).to.be.instanceof(Query) + expect(query.ns).to.equal(helper.namespace) + expect(query.set).to.equal('demo') + expect(query.selected).to.eql(['a', 'b', 'c']) + expect(query.nobins).to.be.false + expect(query.filters).to.be.instanceof(Array) + expect(query.filters.length).to.equal(1) + }) + + it('creates a query without specifying the set', function () { + const namespace: string = helper.namespace + const query: Query = client.query(namespace, { select: ['i'] }) + expect(query).to.be.instanceof(Query) + expect(query.ns).to.equal(helper.namespace) + expect(query.set).to.be.null + expect(query.selected).to.eql(['i']) + }) + }) + + describe('query.select()', function () { + it('sets the selected bins from an argument list', function () { + const query: Query = client.query(helper.namespace, helper.set) + query.select('a', 'b', 'c') + expect(query.selected).to.eql(['a', 'b', 'c']) + }) + + it('sets the selected bins from an array', function () { + const query: Query = client.query(helper.namespace, helper.set) + query.select(['a', 'b', 'c']) + expect(query.selected).to.eql(['a', 'b', 'c']) + }) + }) + + describe('query.where()', function () { + it('adds a filter predicate to the query', function () { + const query: Query = client.query(helper.namespace, helper.set) + query.where(Aerospike.filter.equal('a', 9)) + expect(query.filters.length).to.equal(1) + }) + }) + + describe('query.foreach() #slow', function () { + it('Should run a regular primary index query', function (done) { + const query: Query = client.query(helper.namespace, testSet) + const stream = query.foreach() + const results: AerospikeBins[] = [] + stream.on('error', (error: ASError) => { throw error }) + stream.on('data', (record: AerospikeRecord) => results.push(record.bins)) + stream.on('end', () => { + expect(results.length).to.be.above(samples.length) + done() + }) + }) + context('expectedDuration', function () { + helper.skipUnlessVersion('>= 7.1.0', this) + + it('Should run a regular primary index query with expectedDuration=LONG', function (done) { + const query: Query = client.query(helper.namespace, testSet) + const stream = query.foreach({ expectedDuration: Aerospike.policy.queryDuration.LONG }) + const results: AerospikeBins[] = [] + stream.on('error', (error: ASError) => { throw error }) + stream.on('data', (record: AerospikeRecord) => results.push(record.bins)) + stream.on('end', () => { + expect(results.length).to.be.above(samples.length) + done() + }) + }) + + it('Should run a regular primary index query with expectedDuration=SHORT', function (done) { + const query: Query = client.query(helper.namespace, testSet) + const stream = query.foreach({ expectedDuration: Aerospike.policy.queryDuration.SHORT }) + const results: AerospikeBins[] = [] + stream.on('error', (error: ASError) => { throw error }) + stream.on('data', (record: AerospikeRecord) => results.push(record.bins)) + stream.on('end', () => { + expect(results.length).to.be.above(samples.length) + done() + }) + }) + + it('Should run a regular primary index query with expectedDuration=LONG_RELAX_AP', function (done) { + const query: Query = client.query(helper.namespace, testSet) + const stream = query.foreach({ expectedDuration: Aerospike.policy.queryDuration.LONG_RELAX_AP }) + const results: AerospikeBins[] = [] + stream.on('error', (error: ASError) => { throw error }) + stream.on('data', (record: AerospikeRecord) => results.push(record.bins)) + stream.on('end', () => { + expect(results.length).to.be.above(samples.length) + done() + }) + }) + }) + + it('Should run a paginated primary index query', async function () { + let recordTotal = 0 + let recordsReceived = 0 + const maxRecs = 8 + const query: Query = client.query(helper.namespace, testSet, { paginate: true, maxRecords: maxRecs }) + let results: AerospikeRecord[] = [] + while (1) { + results = await query.results() + recordsReceived += results.length + expect(results.length).to.be.below(9) + results = [] + recordTotal += recordsReceived + if (recordsReceived !== maxRecs) { + expect(query.hasNextPage()).to.equal(false) + expect(recordTotal).to.be.above(samples.length) + break + } + recordsReceived = 0 + } + }) + + it('should apply a stream UDF to filter the results', function (done) { + const args: QueryOptions = { + filters: [filter.equal('name', 'filter')] + } + const query: Query = client.query(helper.namespace, testSet, args) + query.setUdf('udf', 'even') + const stream = query.foreach() + const results: AerospikeBins[] = [] + stream.on('error', (error: ASError) => { throw error }) + stream.on('data', (record: AerospikeRecord) => results.push(record.bins)) + stream.on('end', () => { + expect(results.sort()).to.eql([2, 4]) + done() + }) + }) + + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should apply a stream UDF to the nested context', function (done) { + const args: QueryOptions = { + filters: [filter.contains('name', 'value', MAPKEYS, new Context().addMapKey('nested'))] + } + const query: Query = client.query(helper.namespace, testSet, args) + query.setUdf('udf', 'even') + const stream: RecordStream = query.foreach() + const results: AerospikeBins[] = [] + stream.on('error', (error: ASError) => { throw error }) + stream.on('data', (record: AerospikeRecord) => results.push(record.bins)) + stream.on('end', () => { + expect(results.sort()).to.eql([]) + done() + }) + }) + }) + + describe('query.paginate()', function () { + it('paginates with the correct amount of keys and pages', async function () { + let recordsReceived = 0 + let recordTotal = 0 + let pageTotal = 0 + const lastPage = 3 + const maxRecs = 2 + const query: Query = client.query(helper.namespace, testSet, { paginate: true, maxRecords: maxRecs, filters: [filter.equal('name', 'filter')] }) + while (1) { + const stream = query.foreach() + stream.on('error', (error) => { throw error }) + stream.on('data', (record) => { + recordsReceived++ + }) + await new Promise((resolve: any) => { + stream.on('end', (queryState) => { + query.queryState = queryState + resolve() + }) + }) + pageTotal += 1 + if (recordsReceived !== maxRecs) { + recordTotal += recordsReceived + expect(query.queryState).to.equal(undefined) + expect(pageTotal).to.equal(lastPage) + expect(recordTotal).to.equal(4) + break + } else { + recordTotal += recordsReceived + recordsReceived = 0 + } + } + }) + + it('Paginates correctly using query.hasNextPage() and query.nextPage()', async function () { + let recordsReceived = 0 + let recordTotal = 0 + let pageTotal = 0 + const lastPage = 3 + const maxRecs = 2 + const query: Query = client.query(helper.namespace, testSet, { paginate: true, maxRecords: maxRecs, filters: [filter.equal('name', 'filter')] }) + while (1) { + const stream = query.foreach() + stream.on('error', (error: ASError) => { throw error }) + stream.on('data', (record: AerospikeRecord) => { + recordsReceived++ + }) + await new Promise((resolve: any) => { + stream.on('end', (queryState) => { + query.nextPage(queryState) + resolve() + }) + }) + pageTotal += 1 + if (recordsReceived !== maxRecs) { + recordTotal += recordsReceived + expect(query.hasNextPage()).to.equal(false) + expect(pageTotal).to.equal(lastPage) + expect(recordTotal).to.equal(4) + break + } else { + recordTotal += recordsReceived + recordsReceived = 0 + } + } + }) + + it('Paginates correctly using query.results()', async function () { + let recordTotal = 0 + let recordsReceived = 0 + let pageTotal = 0 + const lastPage = 3 + const maxRecs = 2 + const query: Query = client.query(helper.namespace, testSet, { paginate: true, maxRecords: maxRecs, filters: [filter.equal('name', 'filter')] }) + let results = [] + while (1) { + results = await query.results() + recordsReceived += results.length + results = [] + + pageTotal += 1 + recordTotal += recordsReceived + if (recordsReceived !== maxRecs) { + expect(query.hasNextPage()).to.equal(false) + expect(pageTotal).to.equal(lastPage) + expect(recordTotal).to.equal(4) + break + } + recordsReceived = 0 + } + }) + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('Paginates correctly using query.results() on an index with a cdt context', async function () { + let recordTotal = 0 + let recordsReceived = 0 + let pageTotal = 0 + const lastPage = 1 + const maxRecs = 5 + const query: Query = client.query(helper.namespace, testSet, { paginate: true, maxRecords: maxRecs, filters: [filter.contains('nested', 'value', MAPKEYS, new Context().addMapKey('doubleNested'))] }) + let results = [] + while (1) { + results = await query.results() + recordsReceived += results.length + results = [] + pageTotal += 1 + recordTotal += recordsReceived + if (recordsReceived !== maxRecs) { + expect(query.hasNextPage()).to.equal(false) + expect(pageTotal).to.equal(lastPage) + expect(recordTotal).to.equal(3) + break + } + recordsReceived = 0 + } + }) + }) + + it('Throw error when query.UDF is set and query.paginate is true', async function () { + const maxRecs = 2 + const query: Query = client.query(helper.namespace, testSet, { paginate: true, maxRecords: maxRecs, filters: [filter.equal('name', 'filter')] }) + query.setUdf('udf', 'even') + try { + await query.results() + expect(1).to.equal(2) + } catch (error: any) { + expect(error.message).to.equal('Stream UDF cannot be applied using a paginated stream. Please disable pagination or UDF.') + } + }) + }) + + it('returns the key if it was stored on the server', function (done) { + const uniqueKey = 'test/query/record_with_stored_key' + const key = new Aerospike.Key(helper.namespace, testSet, uniqueKey) + const record = { name: uniqueKey } + const meta = { ttl: 300 } + const policy = new Aerospike.WritePolicy({ + key: Aerospike.policy.key.SEND + }) + + client.put(key, record, meta, policy, function (err) { + if (err) throw err + + const query: Query = client.query(helper.namespace, testSet) + query.where(Aerospike.filter.equal('name', uniqueKey)) + const stream = query.foreach() + let count = 0 + stream.on('data', (record: AerospikeRecord) => { + expect(++count).to.equal(1) + expect(record.key).to.be.instanceof(Key) + expect(record.key.key).to.equal(uniqueKey) + }) + stream.on('end', done) + }) + }) + + context('with partitions settings', function () { + helper.skipUnlessVersion('>= 6.0.0', this) + it('returns the key if it was stored on the given partitions', function (done) { + const uniqueKey = 'test/query/record_with_stored_key' + const key = new Aerospike.Key(helper.namespace, testSet, uniqueKey) + const record = { name: uniqueKey } + const meta = { ttl: 300 } + const policy = new Aerospike.WritePolicy({ + key: Aerospike.policy.key.SEND + }) + + client.put(key, record, meta, policy, function (err) { + if (err) throw err + const query: Query = client.query(helper.namespace, testSet) + query.where(Aerospike.filter.equal('name', uniqueKey)) + query.partitions(0, 4096) + const stream = query.foreach() + let count = 0 + stream.on('data', (record: AerospikeRecord) => { + expect(++count).to.equal(1) + expect(record.key).to.be.instanceof(Key) + expect(record.key.key).to.equal(uniqueKey) + }) + stream.on('end', done) + }) + }) + }) + + it('returns the key matching the expression', function (done) { + const uniqueExpKey = 'test/query/record_with_stored_key' + const key = new Aerospike.Key(helper.namespace, testSet, uniqueExpKey) + const record = { name: uniqueExpKey } + const meta = { ttl: 300 } + const policy = new Aerospike.WritePolicy({ + key: Aerospike.policy.key.SEND + }) + + client.put(key, record, meta, policy, function (err) { + if (err) throw err + const query: Query = client.query(helper.namespace, testSet) + const queryPolicy = { filterExpression: exp.keyExist() } + const stream = query.foreach(queryPolicy) + let count = 0 + stream.on('data', (record: AerospikeRecord) => { + expect(++count).to.equal(1) + expect(record.key).to.be.instanceof(Key) + expect(record.key.key).to.equal(uniqueExpKey) + }) + stream.on('end', done) + }) + }) + + context('with nobins set to true', function () { + helper.skipUnlessVersion('>= 3.15.0', this) + + it('should return only meta data', function (done) { + const query: Query = client.query(helper.namespace, testSet) + const queryPolicy = { filterExpression: exp.eq(exp.binInt('i'), exp.int(5)) } + query.nobins = true + let received: AerospikeRecord; + const stream = query.foreach(queryPolicy) + stream.on('error', (error: ASError) => { throw error }) + stream.on('data', (record: AerospikeRecord) => { + received = record + stream.abort() + }) + stream.on('end', () => { + expect(received.bins).to.be.empty + expect(received.gen).to.be.ok + expect(received.ttl).to.be.ok + done() + }) + }) + + it('should return only meta data', function (done) { + const query: Query = client.query(helper.namespace, testSet) + query.where(Aerospike.filter.equal('i', 5)) + query.nobins = true + let received: AerospikeRecord; + const stream = query.foreach() + stream.on('error', (error: ASError) => { throw error }) + stream.on('data', (record: AerospikeRecord) => { + received = record + stream.abort() + }) + stream.on('end', () => { + expect(received.bins).to.be.empty + expect(received.gen).to.be.ok + expect(received.ttl).to.be.ok + done() + }) + }) + }) + /* + it('should raise client errors asynchronously', function () { + const invalidPolicy = new Aerospike.QueryPolicy({ + timeout: 'not a valid timeout' + }) + + const query: Query = client.query(helper.namespace) + const stream = query.foreach(invalidPolicy) + // if error is raised synchronously we will never reach here + stream.on('error', (error: any) => { + expect(error).to.be.instanceof(AerospikeError).with.property('code', Aerospike.status.ERR_PARAM) + }) + }) + */ + + it('attaches event handlers to the stream', function (done) { + const query: Query = client.query(helper.namespace, testSet) + let dataHandlerCalled = false + const stream = query.foreach(null, + (_record: AerospikeRecord) => { + dataHandlerCalled = true + stream.abort() + }, + (error: Error) => { throw error }, + () => { + expect(dataHandlerCalled).to.be.true + done() + }) + }) + + context('filter predicates', function () { + describe('filter.equal()', function () { + it('should match equal integer values', function (done) { + const args: QueryOptions = { filters: [filter.equal('i', 5)] } + verifyQueryResults(args, 'int match', done) + }) + context('Uses blob Secondary indexes', function () { + helper.skipUnlessVersion('>= 7.0.0', this) + it('should match equal blob values', function (done) { + const args: QueryOptions = { filters: [filter.equal('blob', Buffer.from('guava'))] } + verifyQueryResults(args, 'blob match', done) + }) + }) + it('should match equal string values', function (done) { + const args: QueryOptions = { filters: [filter.equal('s', 'banana')] } + verifyQueryResults(args, 'string match', done) + }) + + it('throws a type error if the comparison value is of invalid type', function () { + const fn = () => filter.equal('str', { foo: 'bar' }) + expect(fn).to.throw(TypeError) + }) + }) + + describe('filter.range()', function () { + it('should match integers within a range', function (done) { + const args: QueryOptions = { filters: [filter.range('i', 3, 7)] } + verifyQueryResults(args, 'int match', done) + }) + + it('should match integers in a list within a range', function (done) { + const args: QueryOptions = { filters: [filter.range('li', 3, 7, LIST)] } + verifyQueryResults(args, 'int list match', done) + }) + + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match integers in a list within a range in a nested context', function (done) { + const args: QueryOptions = { filters: [filter.range('li', 3, 7, LIST, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested int list match', done) + }) + }) + + it('should match integers in a map within a range', function (done) { + const args: QueryOptions = { filters: [filter.range('mi', 3, 7, MAPVALUES)] } + verifyQueryResults(args, 'int map match', done) + }) + + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match integers in a map within a range in a nested context', function (done) { + const args: QueryOptions = { filters: [filter.range('mi', 3, 7, MAPVALUES, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested int map match', done) + }) + }) + }) + + describe('filter.contains()', function () { + it('should match lists containing an integer', function (done) { + const args: QueryOptions = { filters: [filter.contains('li', 5, LIST)] } + verifyQueryResults(args, 'int list match', done) + }) + + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match lists containing an integer in a nested context', function (done) { + const args: QueryOptions = { filters: [filter.contains('li', 5, LIST, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested int list match', done) + }) + }) + + it('should match maps containing an integer value', function (done) { + const args: QueryOptions = { filters: [filter.contains('mi', 5, MAPVALUES)] } + verifyQueryResults(args, 'int map match', done) + }) + + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match maps containing an integer value in a nested context', function (done) { + const args: QueryOptions = { filters: [filter.contains('mi', 5, MAPVALUES, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested int map match', done) + }) + }) + + it('should match lists containing a string', function (done) { + const args: QueryOptions = { filters: [filter.contains('ls', 'banana', LIST)] } + verifyQueryResults(args, 'string list match', done) + }) + + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match lists containing a string in a nested context', function (done) { + const args: QueryOptions = { filters: [filter.contains('ls', 'banana', LIST, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested string list match', done) + }) + }) + + it('should match maps containing a string value', function (done) { + const args: QueryOptions = { filters: [filter.contains('ms', 'banana', MAPVALUES)] } + verifyQueryResults(args, 'string map match', done) + }) + + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match maps containing a string value in a nested context', function (done) { + const args: QueryOptions = { filters: [filter.contains('ms', 'banana', MAPVALUES, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested string map match', done) + }) + }) + + it('should match maps containing a string key', function (done) { + const args: QueryOptions = { filters: [filter.contains('mks', 'banana', MAPKEYS)] } + verifyQueryResults(args, 'string mapkeys match', done) + }) + + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match maps containing a string key in a nested context', function (done) { + const args: QueryOptions = { filters: [filter.contains('mks', 'banana', MAPKEYS, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested string mapkeys match', done) + }) + }) + + context('Uses blob Secondary indexes', function () { + helper.skipUnlessVersion('>= 7.0.0', this) + it('should match lists containing a blob', function (done) { + const args: QueryOptions = { filters: [filter.contains('lblob', Buffer.from('guava'), LIST)] } + verifyQueryResults(args, 'blob list match', done) + }) + + it('should match lists containing a blob in a nested context', function (done) { + const args: QueryOptions = { filters: [filter.contains('lblob', Buffer.from('guava'), LIST, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested blob list match', done) + }) + + it('should match maps containing a blob value', function (done) { + const args: QueryOptions = { filters: [filter.contains('mblob', Buffer.from('guava'), MAPVALUES)] } + verifyQueryResults(args, 'blob map match', done) + }) + + it('should match maps containing a blob value in a nested context', function (done) { + const args: QueryOptions = { filters: [filter.contains('mblob', Buffer.from('guava'), MAPVALUES, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested blob map match', done) + }) + + it('should match maps containing a blob key', function (done) { + const args: QueryOptions = { filters: [filter.contains('mkblob', Buffer.from('guava'), MAPKEYS)] } + verifyQueryResults(args, 'blob mapkeys match', done) + }) + + it('should match maps containing a blob key in a nested context', function (done) { + const args: QueryOptions = { filters: [filter.contains('mkblob', Buffer.from('guava'), MAPKEYS, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested blob mapkeys match', done) + }) + }) + it('throws a type error if the comparison value is of invalid type', function () { + const fn = () => filter.contains('list', { foo: 'bar' }, LIST) + expect(fn).to.throw(TypeError) + }) + }) + + describe('filter.geoWithinGeoJSONRegion()', function () { + it('should match locations within a GeoJSON region', function (done) { + const region: GJ = new GeoJSON({ type: 'Polygon', coordinates: [[[103, 1.3], [104, 1.3], [104, 1.4], [103, 1.4], [103, 1.3]]] }) + const args: QueryOptions = { filters: [filter.geoWithinGeoJSONRegion('g', region)] } + verifyQueryResults(args, 'point match', done) + }) + + it('should match locations in a list within a GeoJSON region', function (done) { + const region: GJ = new GeoJSON({ type: 'Polygon', coordinates: [[[103, 1.3], [104, 1.3], [104, 1.4], [103, 1.4], [103, 1.3]]] }) + const args: QueryOptions = { filters: [filter.geoWithinGeoJSONRegion('lg', region, LIST)] } + verifyQueryResults(args, 'point list match', done) + }) + + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match locations in a list within a GeoJSON region in a nested context', function (done) { + const region: GJ = new GeoJSON({ type: 'Polygon', coordinates: [[[103, 1.3], [104, 1.3], [104, 1.4], [103, 1.4], [103, 1.3]]] }) + const args: QueryOptions = { filters: [filter.geoWithinGeoJSONRegion('lg', region, LIST, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested point list match', done) + }) + }) + + it('should match locations in a map within a GeoJSON region', function (done) { + const region: GJ = new GeoJSON({ type: 'Polygon', coordinates: [[[103, 1.3], [104, 1.3], [104, 1.4], [103, 1.4], [103, 1.3]]] }) + const args: QueryOptions = { filters: [filter.geoWithinGeoJSONRegion('mg', region, MAPVALUES)] } + verifyQueryResults(args, 'point map match', done) + }) + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match locations in a map within a GeoJSON region in a nested context', function (done) { + const region: GJ = new GeoJSON({ type: 'Polygon', coordinates: [[[103, 1.3], [104, 1.3], [104, 1.4], [103, 1.4], [103, 1.3]]] }) + const args: QueryOptions = { filters: [filter.geoWithinGeoJSONRegion('mg', region, MAPVALUES, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested point map match', done) + }) + }) + + it('accepts a plain object as GeoJSON', function (done) { + const region: GeoJSONType = { type: 'Polygon', coordinates: [[[103, 1.3], [104, 1.3], [104, 1.4], [103, 1.4], [103, 1.3]]] } + const args: QueryOptions = { filters: [filter.geoWithinGeoJSONRegion('g', region)] } + verifyQueryResults(args, 'point match', done) + }) + }) + + describe('filter.geoWithinRadius()', function () { + it('should match locations within a radius from another location', function (done) { + const args: QueryOptions = { filters: [filter.geoWithinRadius('g', 103.9135, 1.3085, 15000)] } + verifyQueryResults(args, 'point match', done) + }) + + it('should match locations in a list within a radius from another location', function (done) { + const args: QueryOptions = { filters: [filter.geoWithinRadius('lg', 103.9135, 1.3085, 15000, LIST)] } + verifyQueryResults(args, 'point list match', done) + }) + + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match locations in a list within a radius from another location in a nested context', function (done) { + const args: QueryOptions = { filters: [filter.geoWithinRadius('lg', 103.9135, 1.3085, 15000, LIST, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested point list match', done) + }) + }) + + it('should match locations in a map within a radius from another location', function (done) { + const args: QueryOptions = { filters: [filter.geoWithinRadius('mg', 103.9135, 1.3085, 15000, MAPVALUES)] } + verifyQueryResults(args, 'point map match', done) + }) + + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match locations in a map within a radius from another location in a nested context', function (done) { + const args: QueryOptions = { filters: [filter.geoWithinRadius('mg', 103.9135, 1.3085, 15000, MAPVALUES, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested point map match', done) + }) + }) + }) + + describe('filter.geoContainsGeoJSONPoint()', function () { + it('should match regions that contain a GeoJSON point', function (done) { + const point: GJ = new GeoJSON({ type: 'Point', coordinates: [103.913, 1.308] }) + const args: QueryOptions = { filters: [filter.geoContainsGeoJSONPoint('g', point)] } + verifyQueryResults(args, 'region match', done) + }) + + it('should match regions in a list that contain a GeoJSON point', function (done) { + const point: GJ = new GeoJSON({ type: 'Point', coordinates: [103.913, 1.308] }) + const args: QueryOptions = { filters: [filter.geoContainsGeoJSONPoint('lg', point, LIST)] } + verifyQueryResults(args, 'region list match', done) + }) + + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match regions in a list that contain a GeoJSON point in a nested context', function (done) { + const point: GJ = new GeoJSON({ type: 'Point', coordinates: [103.913, 1.308] }) + const args: QueryOptions = { filters: [filter.geoContainsGeoJSONPoint('lg', point, LIST, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested region list match', done) + }) + }) + + it('should match regions in a map that contain a GeoJSON point', function (done) { + const point: GJ = new GeoJSON({ type: 'Point', coordinates: [103.913, 1.308] }) + const args: QueryOptions = { filters: [filter.geoContainsGeoJSONPoint('mg', point, MAPVALUES)] } + verifyQueryResults(args, 'region map match', done) + }) + + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match regions in a map that contain a GeoJSON point in a nested context', function (done) { + const point: GJ = new GeoJSON({ type: 'Point', coordinates: [103.913, 1.308] }) + const args: QueryOptions = { filters: [filter.geoContainsGeoJSONPoint('mg', point, MAPVALUES, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested region map match', done) + }) + }) + + it('accepts a plain object as GeoJSON', function (done) { + const point: GeoJSONType = { type: 'Point', coordinates: [103.913, 1.308] } + const args: QueryOptions = { filters: [filter.geoContainsGeoJSONPoint('g', point)] } + verifyQueryResults(args, 'region match', done) + }) + }) + + describe('filter.geoContainsPoint()', function () { + it('should match regions that contain a lng/lat coordinate pair', function (done) { + const args: QueryOptions = { filters: [filter.geoContainsPoint('g', 103.913, 1.308)] } + verifyQueryResults(args, 'region match', done) + }) + + it('should match regions in a list that contain a lng/lat coordinate pair', function (done) { + const args: QueryOptions = { filters: [filter.geoContainsPoint('lg', 103.913, 1.308, LIST)] } + verifyQueryResults(args, 'region list match', done) + }) + + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match regions in a list that contain a lng/lat coordinate pair in a nested context', function (done) { + const args: QueryOptions = { filters: [filter.geoContainsPoint('lg', 103.913, 1.308, LIST, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested region list match', done) + }) + }) + + it('should match regions in a map that contain a lng/lat coordinate pair', function (done) { + const args: QueryOptions = { filters: [filter.geoContainsPoint('mg', 103.913, 1.308, MAPVALUES)] } + verifyQueryResults(args, 'region map match', done) + }) + + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match regions in a map that contain a lng/lat coordinate pair in a nested context', function (done) { + const args: QueryOptions = { filters: [filter.geoContainsPoint('mg', 103.913, 1.308, MAPVALUES, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested region map match', done) + }) + }) + }) + }) + }) + + describe('query.results()', function () { + it('returns a Promise that resolves into the query results', function () { + const query: Query = client.query(helper.namespace, testSet) + query.where(filter.equal('i', 5)) + + return query.results().then(records => { + expect(records.length).to.eq(1) + expect(records[0].bins.name).to.eq('int match') + }) + }) + + context('with QueryPolicy', function () { + context('with deserialize: false', function () { + const policy = new Aerospike.QueryPolicy({ + deserialize: false + }) + + it('returns lists and maps as byte buffers', function () { + const query: Query = client.query(helper.namespace, testSet) + query.where(filter.equal('name', 'int list match')) + + return query.results(policy) + .then(records => { + expect(records.length).to.eq(1) + expect(records[0].bins.li).to.eql(Buffer.from([0x93, 0x01, 0x05, 0x09])) + }) + }) + }) + }) + }) + + describe('query.apply()', function () { + it('should apply a user defined function and aggregate the results', function (done) { + const args: QueryOptions = { + filters: [filter.equal('name', 'aggregate')] + } + const query: Query = client.query(helper.namespace, testSet, args) + query.apply('udf', 'count', function (error, result) { + if (error) throw error + expect(result).to.equal(3) + done() + }) + }) + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should apply a user defined function and aggregate the results from a map', function (done) { + const args: QueryOptions = { + filters: [filter.contains('nested', 'value', MAPKEYS)] + } + const query: Query = client.query(helper.namespace, testSet, args) + query.apply('udf', 'count', function (error, result) { + if (error) throw error + expect(result).to.equal(3) + done() + }) + }) + }) + + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should apply a user defined function and aggregate the results from a nested map', function (done) { + const args: QueryOptions = { + filters: [filter.contains('nested', 'value', MAPKEYS, new Context().addMapKey('doubleNested'))] + } + const query: Query = client.query(helper.namespace, testSet, args) + query.apply('udf', 'count', function (error, result) { + if (error) throw error + expect(result).to.equal(3) + done() + }) + }) + }) + + it('should apply a user defined function with arguments and aggregate the results', function (done) { + const args: QueryOptions = { + filters: [filter.equal('name', 'aggregate')] + } + const query: Query = client.query(helper.namespace, testSet, args) + query.apply('udf', 'countGreaterThan', ['value', 15], function (error, result) { + if (error) throw error + expect(result).to.equal(2) + done() + }) + }) + + it('returns a Promise that resolves to the result of the aggregation', function () { + const args: QueryOptions = { + filters: [filter.equal('name', 'aggregate')] + } + const query: Query = client.query(helper.namespace, testSet, args) + return query.apply('udf', 'count') + .then(result => { + expect(result).to.equal(3) + }) + }) + }) + + describe('query.background()', function () { + it('should run a background query and return a job', function (done) { + const args: QueryOptions = { + filters: [filter.equal('name', 'aggregate')] + } + const query: Query = client.query(helper.namespace, testSet, args) + query.background('udf', 'noop', function (error, job) { + if (error) throw error + expect(job).to.be.instanceof(Job) + done() + }) + }) + + it('returns a Promise that resolves to a Job', function () { + const args: QueryOptions = { + filters: [filter.equal('name', 'aggregate')] + } + const query: Query = client.query(helper.namespace, testSet, args) + return query.background('udf', 'noop') + .then(job => { + expect(job).to.be.instanceof(Job) + }) + }) + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('returns a Promise that resolves to a Job with a filter containing a CDT context', function () { + const args: QueryOptions = { + filters: [filter.contains('nested', 'value', MAPKEYS, new Context().addMapKey('doubleNested'))] + } + const query: Query = client.query(helper.namespace, testSet, args) + return query.background('udf', 'noop') + .then(job => { + expect(job).to.be.instanceof(Job) + }) + }) + }) + }) + + describe('query.operate()', function () { + helper.skipUnlessVersion('>= 4.7.0', this) + + it('should perform a background query that executes the operations #slow', async function () { + const query: Query = client.query(helper.namespace, testSet) + const ops = [op.write('backgroundOps', 4)] + const job = await query.operate(ops) + await job.waitUntilDone() + + const key = keys[Math.floor(Math.random() * keys.length)] + const record = await client.get(key) + expect(record.bins.backgroundOps).to.equal(4) + }) + + it('should set TTL to the specified value #slow', async function () { + const query: Query = client.query(helper.namespace, testSet) + query.ttl = 3600 + const ops = [op.incr('backgroundOps', 1)] + const job = await query.operate(ops) + await job.waitUntilDone() + + const key = keys[Math.floor(Math.random() * keys.length)] + const record = await client.get(key) + expect(record.ttl).to.equal(3599) + }) + + it('should set TTL to the specified value using query options #slow', async function () { + const query: Query = client.query(helper.namespace, testSet, { ttl: 7200 }) + const ops = [op.incr('backgroundOps', 1)] + const job = await query.operate(ops) + await job.waitUntilDone() + + const key = keys[Math.floor(Math.random() * keys.length)] + const record = await client.get(key) + expect(record.ttl).to.equal(7199) + }) + }) + + describe('stream.abort()', function () { + it('should stop the query when the stream is aborted', function (done) { + const query: Query = client.query(helper.namespace, testSet) + const stream = query.foreach() + let recordsReceived = 0 + stream.on('data', () => { + recordsReceived++ + if (recordsReceived === 5) { + stream.abort() + } + }) + stream.on('end', () => { + expect(recordsReceived).to.equal(5) + done() + }) + }) + }) + /* + context('legacy scan interface', function () { + ;['UDF', 'concurrent', 'percentage', 'priority'].forEach(function (key) { + it('should throw an exception if the query options contain key "' + key + '"', function () { + const args: QueryOptions = {} + args[key] = 'foo' + expect(() => client.query(helper.namespace, testSet, args)).to.throw('Invalid query arguments') + }) + }) + }) + */ +}) diff --git a/ts-test/tests/remove.ts b/ts-test/tests/remove.ts new file mode 100644 index 000000000..acd8375c8 --- /dev/null +++ b/ts-test/tests/remove.ts @@ -0,0 +1,84 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* global expect, describe, context, it */ + +import Aerospike, { status as statusModule, AerospikeError as ASError, Key, Client as Cli, RemovePolicy } from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +const keygen = helper.keygen + +const status: typeof statusModule = Aerospike.status +const AerospikeError: typeof ASError = Aerospike.AerospikeError + +describe('client.remove()', function () { + const client: Cli = helper.client + + it('removes an existing record', function () { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/remove/' })() + + return client.put(key, { str: 'abcde' }) + .then(() => client.remove(key)) + .then(() => client.exists(key)) + .then(result => expect(result).to.be.false) + }) + + it('returns an error when trying to remove a non-existing key', function () { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/remove/' })() + + return client.remove(key) + .catch(error => + expect(error).to.be.instanceof(AerospikeError).with.property('code', status.ERR_RECORD_NOT_FOUND)) + }) + + context('with generation policy value', function () { + it('should remove the record if the generation matches', function () { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/remove/' })() + const policy: RemovePolicy = new Aerospike.RemovePolicy({ + gen: Aerospike.policy.gen.EQ, + generation: 1 + }) + + return client.put(key, { str: 'abcde' }) + .then(() => { + return client.remove(key, policy) + }) + .then(() => client.exists(key)) + .then((result: boolean) => expect(result).to.be.false) + }) + + it('should not remove the record if the generation does not match', function () { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/remove/' })() + const policy: RemovePolicy = new Aerospike.RemovePolicy({ + gen: Aerospike.policy.gen.EQ, + generation: 1 + }) + + return client.put(key, { str: 'abcde' }) + .then(() => { + return client.remove(key, policy) + .catch(error => + expect(error).to.be.instanceof(AerospikeError).with.property('code', status.ERR_RECORD_GENERATION)) + }) + .then(() => client.exists(key)) + .then((result: boolean) => expect(result).to.be.false) + }) + }) +}) diff --git a/ts-test/tests/remove_bin.ts b/ts-test/tests/remove_bin.ts new file mode 100644 index 000000000..541024953 --- /dev/null +++ b/ts-test/tests/remove_bin.ts @@ -0,0 +1,48 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' +/* global describe, context, it */ + +import Aerospike, { AerospikeError as ASError, status as statusModule, Client as Cli, Key } from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +const keygen = helper.keygen + +const status: typeof statusModule = Aerospike.status +const AerospikeError: typeof ASError = Aerospike.AerospikeError + +describe('client.put(null bin)', function () { + const client: Cli = helper.client + + context('with simple put null value', function () { + it('delete bin using null put', function () { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/remove_bin/' })() + + return client.put(key, { str: 'abcde' }) + .then(() => { + client.put(key, { str: null }) + .then(() => { + client.get(key, function (err?: ASError) { + expect(err!).to.be.instanceof(AerospikeError).with.property('code', status.ERR_RECORD_NOT_FOUND) + }) + }) + }) + }) + }) +}) diff --git a/ts-test/tests/scan.ts b/ts-test/tests/scan.ts new file mode 100644 index 000000000..eaca8df55 --- /dev/null +++ b/ts-test/tests/scan.ts @@ -0,0 +1,461 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* global expect, describe, it, before, after, context */ +/* eslint-disable no-unused-expressions */ + +import Aerospike, { Scan as ScanType, Job as J, Key as K, operations, AerospikeRecord } from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +const Scan: typeof ScanType = Aerospike.Scan +const Job: typeof J = Aerospike.Job + +const Key: typeof K = Aerospike.Key +const op: typeof operations = Aerospike.operations + +const keygen: any = helper.keygen +const metagen: any = helper.metagen +const putgen: any = helper.putgen +const recgen: any = helper.recgen +const valgen: any = helper.valgen + +context('Scans', function () { + const client = helper.client + const testSet = 'test/scan-' + Math.floor(Math.random() * 100000) + const numberOfRecords = 100 + let keys: K[] = [] + + before(() => helper.udf.register('udf.lua') + .then(() => { + const config = { + keygen: keygen.string(helper.namespace, testSet, { prefix: 'test/scan/', random: false }), + recgen: recgen.record({ i: valgen.integer(), s: valgen.string() }), + metagen: metagen.constant({ ttl: 300 }), + policy: new Aerospike.WritePolicy({ + totalTimeout: 1000, + key: Aerospike.policy.key.SEND, + exists: Aerospike.policy.exists.CREATE_OR_REPLACE + }) + } + return putgen.put(numberOfRecords, config) + .then((records: AerospikeRecord[]) => { keys = records.map((rec: any) => rec.key) }) + })) + + after(() => helper.udf.remove('udf.lua')) + + describe('client.scan()', function () { + it('creates a new Scan instance and sets up it\'s properties', function () { + const namespace = helper.namespace + const set = 'demo' + const options = { + concurrent: true, + select: ['a', 'b', 'c'], + nobins: false + } + const scan: ScanType = client.scan(namespace, set, options) + + expect(scan).to.be.instanceof(Scan) + expect(scan.ns).to.equal(helper.namespace) + expect(scan.set).to.equal('demo') + expect(scan.concurrent).to.be.true + expect(scan.selected).to.eql(['a', 'b', 'c']) + expect(scan.nobins).to.be.false + }) + + it('creates a scan without specifying the set', function () { + const namespace: string = helper.namespace + const scan: ScanType = client.scan(namespace, { select: ['i'] }) + expect(scan).to.be.instanceof(Scan) + expect(scan.ns).to.equal(helper.namespace) + expect(scan.set).to.be.null + expect(scan.selected).to.eql(['i']) + }) + }) + + describe('scan.select()', function () { + it('sets the selected bins from an argument list', function () { + const scan: ScanType = client.scan(helper.namespace, helper.namespace) + scan.select('a', 'b', 'c') + expect(scan.selected).to.eql(['a', 'b', 'c']) + }) + + it('sets the selected bins from an array', function () { + const scan: ScanType = client.scan(helper.namespace, helper.namespace) + scan.select(['a', 'b', 'c']) + expect(scan.selected).to.eql(['a', 'b', 'c']) + }) + }) + + describe('scan.foreach() #slow', function () { + it('retrieves all records in the set', function (done) { + this.timeout(10000) // 10 second timeout + const scan: ScanType = client.scan(helper.namespace, testSet) + let recordsReceived = 0 + const stream = scan.foreach() + stream.on('data', () => recordsReceived++) + stream.on('end', () => { + expect(recordsReceived).to.equal(numberOfRecords) + done() + }) + }) + + describe('scan.paginate', function () { + it('Paginates with the correct amount of keys and pages', async function () { + let recordsReceived = 0 + let recordTotal = 0 + let pageTotal = 0 + const lastPage = 11 + const maxRecs = 10 + const scan: ScanType = client.scan(helper.namespace, testSet, { paginate: true }) + while (1) { + const stream = scan.foreach({ maxRecords: maxRecs }) + stream.on('error', (error: any) => { throw error }) + stream.on('data', (record: any) => { + recordsReceived++ + }) + await new Promise((resolve: any) => { + stream.on('end', (scanState: any) => { + scan.scanState = scanState + resolve() + }) + }) + pageTotal += 1 + if (recordsReceived !== maxRecs) { + recordTotal += recordsReceived + expect(scan.scanState).to.equal(undefined) + expect(pageTotal).to.equal(lastPage) + expect(recordTotal).to.equal(numberOfRecords) + break + } else { + recordTotal += recordsReceived + recordsReceived = 0 + } + } + }) + + it('Paginates correctly using scan.hasNextPage() and scan.nextPage()', async function () { + let recordsReceived = 0 + let recordTotal = 0 + let pageTotal = 0 + const lastPage = 11 + const maxRecs = 10 + const scan: ScanType = client.scan(helper.namespace, testSet, { paginate: true }) + while (1) { + const stream = scan.foreach({ maxRecords: maxRecs }) + stream.on('error', (error: any) => { throw error }) + stream.on('data', (record: any) => { + recordsReceived++ + }) + await new Promise((resolve: any) => { + stream.on('end', (scanState: any) => { + scan.nextPage(scanState) + resolve() + }) + }) + pageTotal += 1 + if (recordsReceived !== maxRecs) { + recordTotal += recordsReceived + expect(scan.hasNextPage()).to.equal(false) + expect(pageTotal).to.equal(lastPage) + expect(recordTotal).to.equal(numberOfRecords) + break + } else { + recordTotal += recordsReceived + recordsReceived = 0 + } + } + }) + + it('Paginates correctly using scan.results()', async function () { + let recordsReceived = 0 + let recordTotal = 0 + let pageTotal = 0 + const lastPage = 11 + const maxRecs = 10 + const scan: ScanType = client.scan(helper.namespace, testSet, { paginate: true }) + while (1) { + const stream = scan.foreach({ maxRecords: maxRecs }) + stream.on('error', (error: any) => { throw error }) + stream.on('data', (record: any) => { + recordsReceived++ + }) + await new Promise((resolve: any) => { + stream.on('end', (scanState: number[]) => { + scan.nextPage(scanState) + resolve() + }) + }) + pageTotal += 1 + if (recordsReceived !== maxRecs) { + recordTotal += recordsReceived + expect(scan.hasNextPage()).to.equal(false) + expect(pageTotal).to.equal(lastPage) + expect(recordTotal).to.equal(numberOfRecords) + break + } else { + recordTotal += recordsReceived + recordsReceived = 0 + } + } + }) + }) + + it('retrieves all records from the given partitions', function (done) { + const scan: ScanType = client.scan(helper.namespace, testSet) + let recordsReceived = 0 + scan.partitions(0, 4096) + const stream = scan.foreach() + stream.on('data', () => recordsReceived++) + stream.on('end', () => { + expect(recordsReceived).to.equal(numberOfRecords) + done() + }) + }) + + it('returns the key if it is stored on the server', function (done) { + this.timeout(10000) // 10 second timeout + // requires { key: Aerospike.policy.key.SEND } when creating the record + const scan: ScanType = client.scan(helper.namespace, testSet) + const stream = scan.foreach() + stream.on('data', record => { + expect(record.key).to.be.instanceof(Key) + expect(record.key.key).to.not.be.empty + stream.abort() + }) + stream.on('end', done) + }) + + it('attaches event handlers to the stream', function (done) { + this.timeout(10000) // 10 second timeout + const scan: ScanType = client.scan(helper.namespace, testSet) + let dataHandlerCalled = false + const stream = scan.foreach(null, + (_record: any) => { + dataHandlerCalled = true + stream.abort() + }, + (error: any) => { throw error }, + () => { + expect(dataHandlerCalled).to.be.true + done() + }) + }) + + it('sets a scan policy', function (done) { + this.timeout(10000) // 10 second timeout + const scan: ScanType = client.scan(helper.namespace, testSet) + const policy = new Aerospike.ScanPolicy({ + totalTimeout: 10000, + socketTimeout: 10000, + durableDelete: true, + recordsPerSecond: 50, + maxRecords: 5000 + }) + + const stream = scan.foreach(policy) + stream.on('data', () => stream.abort()) + stream.on('error', (error: any) => { + if (error.code === Aerospike.status.ERR_TIMEOUT) { + // ignore errors caused by cluster change events + } else { + throw error + } + }) + stream.on('end', done) + }) + + context('with nobins set to true', function () { + it('should return only meta data', function (done) { + this.timeout(10000) // 10 second timeout + const scan: ScanType = client.scan(helper.namespace, testSet, { nobins: true }) + const stream = scan.foreach() + stream.on('data', record => { + expect(record.bins).to.be.empty + expect(record.gen).to.be.ok + expect(record.ttl).to.be.ok + stream.abort() + }) + stream.on('end', done) + }) + }) + + context('with bin selection', function () { + it('should return only selected bins', function (done) { + this.timeout(10000) // 10 second timeout + const scan: ScanType = client.scan(helper.namespace, testSet) + scan.select('i') + const stream = scan.foreach() + stream.on('data', record => { + expect(record.bins).to.have.all.keys('i') + stream.abort() + }) + stream.on('end', done) + }) + }) + + context('with max records limit', function () { + helper.skipUnlessVersion('>= 4.9.0', this) + + it('returns at most X number of records', function (done) { + this.timeout(10000) // 10 second timeout + const scan: ScanType = client.scan(helper.namespace, testSet, { nobins: true }) + + const maxRecords = 33 + const stream = scan.foreach({ maxRecords }) + let recordsReceived = 0 + stream.on('data', () => recordsReceived++) + stream.on('end', () => { + // The actual number returned may be less than maxRecords if node + // record counts are small and unbalanced across nodes. + expect(recordsReceived).to.be.at.most(maxRecords) + done() + }) + }) + }) + + context('without set', function () { + it('executes a scan without set', function (done) { + this.timeout(10000) // 10 second timeout + const scan: ScanType = client.scan(helper.namespace) + let recordsReceived = 0 + const stream = scan.foreach() + stream.on('error', (error: any) => { throw error }) + stream.on('data', () => { + recordsReceived++ + stream.abort() + }) + stream.on('end', () => { + expect(recordsReceived).to.equal(1) + done() + }) + }) + }) + }) + + describe('scan.background()', function () { + it('applies a UDF to every record', function (done) { + const token = valgen.string({ length: { min: 10, max: 10 } })() + const backgroundScan = client.scan(helper.namespace, testSet) + backgroundScan.background('udf', 'updateRecord', ['x', token], function (err: any, job: any) { + if (err) throw err + job.waitUntilDone(10, function (err: any) { + if (err) throw err + const validationScan = client.scan(helper.namespace, testSet) + const stream = validationScan.foreach() + stream.on('error', (error: any) => { throw error }) + stream.on('data', (record: any) => expect(record.bins.x).to.equal(token)) + stream.on('end', done) + }) + }) + }) + + it('returns a Promise that resolves to a Job', function () { + const backgroundScan = client.scan(helper.namespace, testSet) + return backgroundScan.background('udf', 'noop') + .then(job => { + expect(job).to.be.instanceof(Job) + }) + }) + }) + + describe('scan.operate()', function () { + helper.skipUnlessVersion('>= 4.7.0', this) + + it('should perform a background scan that executes the operations #slow', async function () { + const scan: ScanType = client.scan(helper.namespace, testSet) + const ops = [op.write('backgroundOps', 1)] + const job = await scan.operate(ops) + await job.waitUntilDone() + + const key: any = keys[Math.floor(Math.random() * keys.length)] + const record = await client.get(key) + expect(record.bins.backgroundOps).to.equal(1) + }) + + it('should set TTL to the specified value #slow', async function () { + const scan: ScanType = client.scan(helper.namespace, testSet) + scan.ttl = 10800 + const ops = [op.incr('backgroundOps', 1)] + const job = await scan.operate(ops) + await job.waitUntilDone() + + const key: any = keys[Math.floor(Math.random() * keys.length)] + const record = await client.get(key) + expect(record.ttl).to.equal(10799) + }) + + it('should set TTL to the specified value with scan options #slow', async function () { + const scan: ScanType = client.scan(helper.namespace, testSet, { ttl: 14400 }) + const ops = [op.incr('backgroundOps', 1)] + const job = await scan.operate(ops) + await job.waitUntilDone() + + const key: any = keys[Math.floor(Math.random() * keys.length)] + const record = await client.get(key) + expect(record.ttl).to.equal(14399) + }) + + it('should perform a background scan that executes the touch operation #slow', async function () { + const ttl = 123 + const scan: ScanType = client.scan(helper.namespace, testSet) + const job = await scan.operate([Aerospike.operations.touch(ttl)]) + await job.waitUntilDone() + + const key: any = keys[Math.floor(Math.random() * keys.length)] + const record = await client.get(key) + console.log('After scan-op TTL : %d Key TTL: %d', ttl, record.ttl) + expect(record.ttl).to.equal(ttl - 1) + }) + }) + + describe('stream.abort()', function () { + it('should stop the scan when the stream is aborted', function (done) { + const scan: ScanType = client.scan(helper.namespace, testSet) + const stream = scan.foreach() + let recordsReceived = 0 + stream.on('data', () => { + recordsReceived++ + if (recordsReceived === 5) { + stream.abort() + } + }) + stream.on('end', () => { + expect(recordsReceived).to.equal(5) + done() + }) + }) + }) + + describe('job.info()', function () { + it('returns the scan status and progress', function (done) { + const scan: ScanType = client.scan(helper.namespace, testSet) + scan.background('udf', 'noop', function (error: any, job: any) { + if (error) throw error + job.info(function (error: any, info: any) { + if (error) throw error + expect(info.status).to.be.within(Aerospike.jobStatus.INPROGRESS, Aerospike.jobStatus.COMPLETED) + expect(info.recordsRead).to.be.within(0, numberOfRecords) + expect(info.progressPct).to.be.within(0, 100) + done() + }) + }) + }) + }) +}) diff --git a/ts-test/tests/select.ts b/ts-test/tests/select.ts new file mode 100644 index 000000000..e1f049486 --- /dev/null +++ b/ts-test/tests/select.ts @@ -0,0 +1,125 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* global expect, describe, it */ + +import Aerospike, { status as statusModule, Client as Cli, Key, RecordMetadata, AerospikeBins, AerospikeError, AerospikeRecord, ReadPolicy} from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +const keygen: any = helper.keygen +const recgen: any = helper.recgen +const valgen: any = helper.valgen + +const status: typeof statusModule = Aerospike.status + +describe('client.select()', function () { + const client: Cli = helper.client + + it('should read the record', function (done) { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/select/' })() + const meta: RecordMetadata = { ttl: 1000 } + const bins: AerospikeBins = recgen.record({ i: valgen.integer(), s: valgen.string(), b: valgen.bytes() })() + const selected: string[] = ['i', 's'] + + client.put(key, bins, meta, function (err?: AerospikeError) { + if (err) throw err + + client.select(key, selected, function (err?: AerospikeError, record?: AerospikeRecord) { + if (err) throw err + expect(record?.bins).to.have.all.keys(selected) + + for (const bin in selected) { + expect(record?.bins[bin]).to.equal(bins[bin]) + } + + client.remove(key, function (err?: AerospikeError) { + if (err) throw err + done() + }) + }) + }) + }) + + it('should fail - when a select is called without key', function (done) { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/select/' })() + const meta: RecordMetadata = { ttl: 1000 } + const bins: AerospikeBins = recgen.record({ i: valgen.integer(), s: valgen.string(), b: valgen.bytes() })() + const selected: string[] = ['i', 's'] + + client.put(key, bins, meta, function (err: any) { + if (err) throw err + + client.select({ ns: helper.namespace, set: helper.set }, selected, function (err: any) { + expect(err.code).to.equal(status.ERR_PARAM) + + client.remove(key, function (err: any) { + if (err) throw err + done() + }) + }) + }) + }) + + it('should not find the record', function (done) { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/select/not_found/' })() + + client.select(key, ['i'], function (err: any, record: any) { + expect(err.code).to.equal(status.ERR_RECORD_NOT_FOUND) + done() + }) + }) + + it('should read the record w/ a key send policy', function (done) { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/select/' })() + const meta: RecordMetadata = { ttl: 1000 } + const bins: AerospikeBins = recgen.record({ i: valgen.integer(), s: valgen.string(), b: valgen.bytes() })() + const selected: string[] = ['i', 's'] + const policy: ReadPolicy = new Aerospike.ReadPolicy({ + key: Aerospike.policy.key.SEND + }) + + client.put(key, bins, meta, function (err: any) { + if (err) throw err + + client.select(key, selected, policy, function (err: any, record: any) { + if (err) throw err + expect(record.bins).to.have.all.keys(selected) + + for (const bin in selected) { + expect(record.bins[bin]).to.equal(bins[bin]) + } + + client.remove(key, function (err: any) { + if (err) throw err + done() + }) + }) + }) + }) + + it('should return a Promise that resolves to a Record', function () { + const key: Key = keygen.string(helper.namespace, helper.set, { prefix: 'test/select/' })() + + return client.put(key, { i: 42, s: 'abc', f: 3.1416 }) + .then(() => client.select(key, ['i', 'f'])) + .then((record: any) => expect(record.bins).to.eql({ i: 42, f: 3.1416 })) + .then(() => client.remove(key)) + }) +}) diff --git a/ts-test/tests/stats.ts b/ts-test/tests/stats.ts new file mode 100644 index 000000000..f319e7a88 --- /dev/null +++ b/ts-test/tests/stats.ts @@ -0,0 +1,57 @@ +// ***************************************************************************** +// Copyright 2018-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* eslint-env mocha */ +/* global expect */ +/* eslint-disable no-unused-expressions */ + + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +const client = helper.client + +describe('Client#stats', function () { + before(function (done) { + // Send an async command to each node ensure we have at least 1 async + // connection open. At least 1 sync connection has been opened to send some + // info commands. + client.scan(helper.namespace, 'noSuchSet').foreach().on('end', done) + }) + + it('returns command queue stats', function () { + const stats = client.stats() + expect(stats.commands).to.not.be.empty + expect(stats.commands.inFlight).to.be.at.least(0) + expect(stats.commands.queued).to.be.at.least(0) + }) + + it('returns cluster node stats', function () { + const stats = client.stats() + expect(stats.nodes).to.be.an('array').that.is.not.empty + + const node: any = stats.nodes.pop() + expect(node.name).to.be.a('string').of.length(15) + for (const connStats of [node.syncConnections, node.asyncConnections]) { + expect(connStats.inPool).to.be.at.least(1) + expect(connStats.inUse).to.be.at.least(0) + expect(connStats.opened).to.be.at.least(1) + expect(connStats.closed).to.be.at.least(0) + } + }) +}) diff --git a/ts-test/tests/stress/perfdata.js b/ts-test/tests/stress/perfdata.js new file mode 100644 index 000000000..0e4bf934f --- /dev/null +++ b/ts-test/tests/stress/perfdata.js @@ -0,0 +1,92 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +const { format } = require('util') + +const helper = require('../test_helper') + +const keygen = helper.keygen +const metagen = helper.metagen +const putgen = helper.putgen +const recgen = helper.recgen +const valgen = helper.valgen + +// Creates a new timer interval and passes the elapsed time in milliseconds +// into each invocation of the interval callback. +function interval (duration, callback) { + const obj = {} + obj.startTime = process.hrtime() + obj.elapsed = function () { + const diff = process.hrtime(obj.startTime) + return Math.round(diff[0] * 1000 + (diff[1] / 1e6)) + } + obj.call = function () { callback(obj.elapsed()) } + obj.timer = setInterval(obj.call, duration).unref() + obj.clear = function () { clearInterval(obj.timer) } + return obj +} + +// Generates records with specific record size +function generate (ns, set, numberOfRecords, recordSize, done) { + const numBinsPerRecord = recordSize[0] + const sizePerBin = recordSize[1] + const bins = { id: valgen.integer({ random: false, min: 0 }) } + for (let i = 0; i < numBinsPerRecord; i++) { + bins['b' + i] = valgen.bytes({ length: { min: sizePerBin, max: sizePerBin } }) + } + const generators = { + keygen: keygen.string(ns, set, { length: { min: 20, max: 20 } }), + recgen: recgen.record(bins), + metagen: metagen.constant({}), + throttle: { + limit: 5000, + interval: 1000 + } + } + let keysCreated = 0 + const uniqueKeys = new Set() + const timer = interval(10 * 1000, function (ms) { + const throughput = Math.round(1000 * keysCreated / ms) + console.info('%s ms: %d records created (%d records / second) - %s', ms, keysCreated, throughput, memoryUsage()) + }) + putgen.put(numberOfRecords, generators, function (key) { + if (key) { + keysCreated++ + uniqueKeys.add(key.key) + } else { + timer.call() + timer.clear() + done(uniqueKeys.size) // actual number of records might be slightly less due to duplicate keys + } + }) +} + +const MEGA = 1024 * 1024 // bytes in a MB +function memoryUsage () { + const memUsage = process.memoryUsage() + const rss = Math.round(memUsage.rss / MEGA) + const heapUsed = Math.round(memUsage.heapUsed / MEGA) + const heapTotal = Math.round(memUsage.heapTotal / MEGA) + return format('mem: %d MB, heap: %d / %d MB', rss, heapUsed, heapTotal) +} + +module.exports = { + interval, + generate, + memoryUsage +} diff --git a/ts-test/tests/stress/query.js b/ts-test/tests/stress/query.js new file mode 100644 index 000000000..aaac2a8b9 --- /dev/null +++ b/ts-test/tests/stress/query.js @@ -0,0 +1,126 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* global expect, describe, it, before */ + +const Aerospike = require('../../lib/aerospike') +const helper = require('../test_helper') +const perfdata = require('./perfdata') + +const fs = require('fs') + +describe('client.query()', function () { + this.enableTimeouts(false) + const client = helper.client + let testSet = 'test/queryperf' + const idxKey = new Aerospike.Key(helper.namespace, helper.set, 'queryPerfData') + const recordSize = [8, 128] // 8 x 128 bytes ≈ 1 kb / record + let numberOfRecords = 1e6 // 1 Mio. records at 1 kb ≈ 1 GB total data size + + // Execute query using given onData handler to process each scanned record + function executeQuery (onData, done) { + const query = client.query(helper.namespace, testSet) + query.where(Aerospike.filter.range('id', 0, numberOfRecords)) + const stream = query.foreach() + + let received = 0 + const timer = perfdata.interval(10000, function (ms) { + const throughput = Math.round(1000 * received / ms) + console.log('%d ms: %d records received (%d rps; %s)', + ms, received, throughput, perfdata.memoryUsage()) + }) + + stream.on('error', function (err) { throw err }) + stream.on('data', function (record) { received++ }) + stream.on('end', function () { + timer.call() + timer.clear() + expect(received).to.be(numberOfRecords) + done() + }) + stream.on('data', onData) + } + + // Create test data + before(function (done) { + client.get(idxKey, function (err, record) { + if (err && err.code !== Aerospike.status.AEROSPIKE_ERR_RECORD_NOT_FOUND) { + throw err + } else if (err) { + // perf test data does not yet exist - generate it + console.info('generating %d records as performance test data in set %s', numberOfRecords, testSet) + console.time('generating performance test data') + perfdata.generate(helper.namespace, testSet, numberOfRecords, recordSize, function (recordsGenerated) { + console.timeEnd('generating performance test data') + numberOfRecords = recordsGenerated // might be slightly less due to duplciate keys + const index = { + ns: helper.namespace, + set: testSet, + bin: 'id', + index: 'queryPerfIndex', + datatype: Aerospike.indexDataType.NUMERIC + } + console.info('generating secondary index (SI) on performance data') + console.time('creating SI') + client.createIndex(index, function (err, job) { + if (err) throw err + setTimeout(function () { + job.waitUntilDone(function () { + console.timeEnd('creating SI') + client.put(idxKey, { norec: numberOfRecords, set: testSet }, done) + }) + }, 5000) + }) + }) + } else { + // perf test data already exists + numberOfRecords = record.bins.norec + testSet = record.bins.set + console.info('using performance test data from set %s (%d records)', testSet, numberOfRecords) + done() + } + }) + }) + + // Test definitions + it('queries ' + numberOfRecords + ' records with noop', function (done) { + const noop = function () {} + executeQuery(noop, done) + }) + + it('queries ' + numberOfRecords + ' records with busy loop', function (done) { + const busy = function () { + // busy loop + for (let x = 0; x < 1e5; x++) {} // eslint-disable-line + } + executeQuery(busy, done) + }) + + it('queries ' + numberOfRecords + ' records with file IO', function (done) { + const file = 'query-stress-test.log' + const stream = fs.createWriteStream(file) + stream.on('error', function (err) { throw err }) + const fileAppend = function (record) { + stream.write(JSON.stringify(record) + '\n') + } + executeQuery(fileAppend, function () { + stream.end() + fs.unlink(file, done) + }) + }) +}) diff --git a/ts-test/tests/stress/scan.js b/ts-test/tests/stress/scan.js new file mode 100644 index 000000000..9a18fc182 --- /dev/null +++ b/ts-test/tests/stress/scan.js @@ -0,0 +1,152 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* global expect, describe, it, before */ + +const Aerospike = require('../../lib/aerospike') +const helper = require('../test_helper') +const perfdata = require('./perfdata') + +const fs = require('fs') + +const mega = 1024 * 1024 // bytes in a MB + +describe('client.scan()', function () { + this.enableTimeouts(false) + const client = helper.client + let testSet = 'test/scanperf' + const idxKey = new Aerospike.Key(helper.namespace, helper.set, 'scanPerfData') + const recordSize = [8, 128] // 8 x 128 bytes ≈ 1 kb / record + let numberOfRecords = 1e6 // 1 Mio. records at 1 kb ≈ 1 GB total data size + const webWorkerThreads = 10 // number of WebWorker threads to use + const reportingInterval = 10000 // report progress every 10 seconds + + // Execute scan using given onData handler to process each scanned record + function executeScan (onData, done) { + const scan = client.scan(helper.namespace, testSet) + scan.concurrent = true + const stream = scan.foreach() + + let received = 0 + const timer = perfdata.interval(reportingInterval, function (ms) { + const throughput = Math.round(1000 * received / ms) + console.log('%d ms: %d records received (%d rps; %s)', + ms, received, throughput, perfdata.memUsage()) + }) + + stream.on('error', function (err) { throw err }) + stream.on('data', function (record) { received++ }) + stream.on('end', function () { + timer.call() + timer.clear() + expect(received).to.be(numberOfRecords) + done() + }) + stream.on('data', onData) + } + + // Create test data + before(function (done) { + client.get(idxKey, function (err, record) { + if (err && err.code !== Aerospike.status.AEROSPIKE_ERR_RECORD_NOT_FOUND) { + throw err + } else if (err) { + // perf test data does not yet exist - generate it + console.info('generating %d records as performance test data in set %s', numberOfRecords, testSet) + console.time('generating performance test data') + perfdata.generate(helper.namespace, testSet, numberOfRecords, recordSize, function (recordsGenerated) { + console.timeEnd('generating performance test data') + numberOfRecords = recordsGenerated // might be slightly less due to duplciate keys + client.put(idxKey, { norec: numberOfRecords, set: testSet }, done) + }) + } else { + // perf test data already exists + numberOfRecords = record.norec + testSet = record.set + console.info('using performance test data from set %s (%d records)', testSet, numberOfRecords) + done() + } + }) + }) + + // Test definitions + it('scans ' + numberOfRecords + ' records with noop', function (done) { + const noop = function () {} + executeScan(noop, done) + }) + + it('scans ' + numberOfRecords + ' records with busy loop', function (done) { + const busy = function () { + // busy loop + for (let x = 0; x < 1e5; x++) {} // eslint-disable-line + } + executeScan(busy, done) + }) + + it('scans ' + numberOfRecords + ' records with busy loop in WebWorker', function (done) { + let Worker + try { + Worker = require('webworker-threads') + } catch (err) { + console.error('gem install webworker-threads to run this test!') + this.skip('gem install webworker-threads to run this test!') + return + } + function doWork () { + // busy loop + for (let x = 0; x < 1e5; x++) {} // eslint-disable-line + } + const threadPool = Worker.createPool(webWorkerThreads).all.eval(doWork) + console.log('created WebWorker pool with %s threads', webWorkerThreads) + let processed = 0 + const timer = perfdata.interval(reportingInterval, function (ms) { + const throughput = Math.round(1000 * processed / ms) + const memUsage = process.memoryUsage() + const rss = Math.round(memUsage.rss / mega) + const heapUsed = Math.round(memUsage.heapUsed / mega) + const heapTotal = Math.round(memUsage.heapTotal / mega) + console.log('%d ms: %d records processed (%d rps; mem: %d MB, heap: %d / %d MB)', + ms, processed, throughput, rss, heapUsed, heapTotal) + }) + const worker = function (record, meta, key) { + threadPool.any.eval('doWork()', function (err) { + if (err) throw err + if (++processed === numberOfRecords) { + timer.call() + timer.clear() + threadPool.destroy() + done() + } + }) + } + executeScan(worker, function () {}) + }) + + it('scans ' + numberOfRecords + ' records with file IO', function (done) { + const file = 'scan-stress-test.log' + const stream = fs.createWriteStream(file) + stream.on('error', function (err) { throw err }) + const fileAppend = function (record) { + stream.write(JSON.stringify(record) + '\n') + } + executeScan(fileAppend, function () { + stream.end() + fs.unlink(file, done) + }) + }) +}) diff --git a/ts-test/tests/test_helper.ts b/ts-test/tests/test_helper.ts new file mode 100644 index 000000000..e5a44f7f6 --- /dev/null +++ b/ts-test/tests/test_helper.ts @@ -0,0 +1,265 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// **************************************************************************** + +'use strict' + +import Aerospike, {Client, Config, Job, IndexJob, indexDataType, indexType, cdt, InfoAllResponse} from 'aerospike'; + +import options from './util/options'; +import * as semver from 'semver'; +import { SemVer } from 'semver'; +import * as path from 'path'; +import { runInNewProcessFn } from './util/run_in_new_process'; +import { Suite } from 'mocha'; + +import * as chai from 'chai'; +const expect: any = chai.expect; +(global as any).expect = expect; + +export {options} +export const namespace = options.namespace +export const set = options.set + +import * as keygen from './generators/key'; +import * as metagen from './generators/metadata'; +import * as recgen from './generators/record'; +import * as valgen from './generators/value'; +import * as putgen from './generators/put'; +import * as util from './util'; + +export { keygen, metagen, recgen, valgen, putgen, util }; + +const config: Config = options.getConfig() +const client: Client = Aerospike.client(config) +export {client, config} + +Aerospike.setDefaultLogging(config.log ?? {}) + + + + + class UDFHelper { + private client: Client; + constructor(client: Client) { + this.client = client; + } + + register(filename: string) { + const script = path.join(__dirname, filename); + return this.client.udfRegister(script) + .then((job: Job) => job.wait(50)); + } + + remove(filename: string) { + return this.client.udfRemove(filename) + .then((job: Job) => job.wait(50)) + .catch((error: any) => { + if (error.code !== Aerospike.status.ERR_UDF) { + return Promise.reject(error); + } + }); + } + } + + class IndexHelper { + private client: Client; + constructor(client: Client) { + this.client = client; + } + + create(indexName: string, setName: string, binName: string, dataType: indexDataType, indexType: indexType, context?: cdt.Context) { + const index = { + ns: options.namespace, + set: setName, + bin: binName, + index: indexName, + type: indexType || Aerospike.indexType.DEFAULT, + datatype: dataType, + context + }; + return this.client.createIndex(index) + .then((job: IndexJob) => job.wait(10)) + .catch((error: any) => { + if (error.code === Aerospike.status.ERR_INDEX_FOUND) { + // ignore - index already exists + } else { + return Promise.reject(error); + } + }); + } + + remove(indexName: string) { + return this.client.indexRemove(options.namespace, indexName) + .catch((error: any) => { + if (error.code === Aerospike.status.ERR_INDEX_NOT_FOUND) { + // ignore - index does not exist + } else { + return Promise.reject(error); + } + }); + } + } + + + class ServerInfoHelper { + private features: Set; + private edition: string; + private build: string; + private namespaceInfo: { [key: string]: any }; + private cluster: any[]; + private client: Client; + constructor(client: Client) { + this.features = new Set(); + this.edition = 'community'; + this.build = ''; + this.namespaceInfo = {}; + this.cluster = []; + this.client = client; + } + + hasFeature(feature: string) { + return this.features.has(feature); + } + + isEnterprise() { + return this.edition.match('Enterprise'); + } + + isVersionInRange(versionRange: string) { + const version: string = process.env.AEROSPIKE_VERSION_OVERRIDE || this.build; + const semverVersion: SemVer | null = semver.coerce(version); // truncate a build number like "4.3.0.2-28-gdd9f506" to just "4.3.0" + return semver.satisfies(semverVersion!, versionRange); + } + + supportsTtl() { + const { config } = this.namespaceInfo; + return config['nsup-period'] > 0 || config['allow-ttl-without-nsup'] === 'true'; + } + + fetchInfo() { + return this.client.infoAll('build\nedition\nfeatures') + .then((results: InfoAllResponse[]) => { + results.forEach((response: InfoAllResponse) => { + const info = Aerospike.info.parse(response.info); + this.edition = info.edition; + this.build = info.build; + const features = info.features; + if (Array.isArray(features)) { + features.forEach(feature => this.features.add(feature)); + } + }); + }); + } + + fetchNamespaceInfo(ns: string) { + const nsKey = `namespace/${ns}`; + const cfgKey = `get-config:context=namespace;id=${ns}`; + return this.client.infoAny([nsKey, cfgKey].join('\n')) + .then((results: string) => { + const info = Aerospike.info.parse(results); + this.namespaceInfo = { + info: info[nsKey], + config: info[cfgKey], + }; + }); + } + + randomNode() { + const nodes = this.client.getNodes(); + const i = Math.floor(Math.random() * nodes.length); + return nodes[i]; + } + } + + + const udfHelper = new UDFHelper(client) + const indexHelper = new IndexHelper(client) + const serverInfoHelper = new ServerInfoHelper(client) + + export const udf = udfHelper + export const index = indexHelper + export const cluster = serverInfoHelper + + export function runInNewProcess(fn: Function, data: any) { + if (data === undefined) { + data = null + } + const env = { + NODE_PATH: path.join(process.cwd(), 'node_modules') + } + return runInNewProcessFn(fn, env, data) + } + + export function skip(this: any, ctx: Suite, message: string) { + ctx.beforeEach(function (this: any) { + this.skip(message) + }) + } + + export function skipIf (this: any, ctx: Suite, condition: any, message: string) { + ctx.beforeEach(function (this: any) { + let skip = condition + if (typeof condition === 'function') { + skip = condition() + } + if (skip) { + this.skip(message) + } + }) + } + + export function skipUnless (ctx: Suite, condition: any, message: string) { + if (typeof condition === 'function') { + skipIf(ctx, () => !condition(), message) + } else { + skipIf(ctx, !condition, message) + } + } + + export function skipUnlessSupportsFeature (this: any, feature: string, ctx: Suite) { + skipUnless(ctx, () => this.cluster.hasFeature(feature), `requires server feature "${feature}"`) + } + + export function skipUnlessEnterprise(this: any, ctx: Suite) { + skipUnless(ctx, () => this.cluster.isEnterprise(), 'requires enterprise edition') + } + + export function skipUnlessVersion(this: any, versionRange: any, ctx: Suite) { + skipUnless(ctx, () => this.cluster.isVersionInRange(versionRange), `cluster version does not meet requirements: "${versionRange}"`) + } + + export function skipUnlessSupportsTtl(this: any, ctx: Suite) { + skipUnless(ctx, () => this.cluster.supportsTtl(), 'test namespace does not support record TTLs') + } + + if (process.env.GLOBAL_CLIENT !== 'false') { + /* global before */ + before(() => client.connect() + .then(() => serverInfoHelper.fetchInfo()) + .then(() => serverInfoHelper.fetchNamespaceInfo(options.namespace)) + .catch((error: any) => { + console.error('ERROR:', error) + console.error('CONFIG:', client.config) + throw error + }) + ) + + /* global after */ + after(function (done) { + client.close() + done() + }) + } diff --git a/ts-test/tests/truncate.ts b/ts-test/tests/truncate.ts new file mode 100644 index 000000000..723064d51 --- /dev/null +++ b/ts-test/tests/truncate.ts @@ -0,0 +1,109 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* eslint-env mocha */ +import Aerospike, { Client, BatchResult, AerospikeRecord, Key } from 'aerospike'; + +import * as helper from './test_helper' + +const { sleep } = helper.util + +const setgen = helper.valgen.string({ + prefix: 'test/trunc/', + random: true, + length: { min: 6, max: 6 } +}) + +const keygen = helper.keygen +const metagen = helper.metagen +const recgen = helper.recgen +const putgen = helper.putgen + +describe('client.truncate() #slow', function () { + helper.skipUnlessVersion('>= 3.12.0', this) + + const client: Client = helper.client + + // Generates a number of records; the callback function is called with a list + // of the record keys. + function genRecords (kgen: () => Key, noRecords: any) { + const generators = { + keygen: kgen, + recgen: recgen.constant({ a: 'foo', b: 'bar' }), + metagen: metagen.constant({ ttl: 300 }) + } + return putgen.put(noRecords, generators, {}) + } + + // Checks to verify that records that are supposed to have been truncated + // are gone and that records that are supposed to remain still exist. If some + // truncated records still exist it will try again every pollInt ms. + async function checkRecords (truncated: any, remaining: any, pollInt: number): Promise { + const results: any = await client.batchRead((truncated || []).concat(remaining || [])); + for (const result of results) { + const expectExist = !!remaining.find((record: AerospikeRecord) => record.key.equals(result.record.key)) + switch (result.status) { + case Aerospike.status.OK: + if (!expectExist) { + await sleep(pollInt) + return checkRecords(truncated, remaining, pollInt) + } + break + case Aerospike.status.ERR_RECORD_NOT_FOUND: + if (expectExist) throw new Error("Truncate removed record it wasn't supposed to: " + result.record.key) + break + default: + throw new Error('Unexpected batchRead status code: ' + result.status) + } + } + } + + it('deletes all records in the set', async function () { + const ns: string = helper.namespace + const set: string = setgen() + const noRecords: number = 5 + const pollIntMs: number = 10 // Poll interval in ms to check whether records have been removed + + const kgen: () => Key = keygen.string(ns, set, { prefix: 'test/trunc/', random: false }) + const records: AerospikeRecord[] = await genRecords(kgen, noRecords) + await sleep(5) + await client.truncate(ns, set, 0) + await checkRecords(records, [], pollIntMs) + }) + + it('deletes all records with an older update timestamp', async function () { + this.timeout(15000) + const ns = helper.namespace + const set = setgen() + const noRecordsToDelete = 5 + const noRecordsToRemain = 2 + const pollIntMs = 100 // Poll interval in ms to check whether records have been removed + const allowanceMs = 5000 // Test will fail if client and server clocks differ by more than this many ms! + + let kgen = keygen.string(ns, set, { prefix: 'test/trunc/del/', random: false }) + const batchToDelete = await genRecords(kgen, noRecordsToDelete) + await sleep(allowanceMs) + const timeNanos = Date.now() * 1000000 + await sleep(allowanceMs) + kgen = keygen.string(ns, set, { prefix: 'test/trunc/rem/', random: false }) + const batchToRemain = await genRecords(kgen, noRecordsToRemain) + await sleep(5) + await client.truncate(ns, set, timeNanos) + await checkRecords(batchToDelete, batchToRemain, pollIntMs) + }) +}) diff --git a/ts-test/tests/types/generators.types.ts b/ts-test/tests/types/generators.types.ts new file mode 100644 index 000000000..22366fcd2 --- /dev/null +++ b/ts-test/tests/types/generators.types.ts @@ -0,0 +1,70 @@ +// types/arrayDefaults.ts + +export type StringDefaults = { + random: boolean; + length: { + min: number; + max: number; + }; + prefix: string; + suffix: string; + charset: string; +}; + +export type BytesDefaults = { + length: { + min: number; + max: number; + }; + byte: { + min: number; + max: number; + }; +}; + +export type IntegerDefaults = { + random: boolean; + min: number; + max: number; +}; + +export type DoubleDefaults = { + random: boolean; + min: number; + max: number; + step: number; +}; + +export type GeneratorFunction = () => number | string | Uint8Array; + +export type ArrayDefaults = { + values: GeneratorFunction[]; +}; + +export interface Options { + help: boolean; + host: string | null; + port: number | null; + totalTimeout: number; + log: number; + log_file: number; + namespace: string; + set: string; + user: string | null; + password: string | null; + clusterName?: string; + cafile?: string; + keyfile?: string; + keyfilePassword?: string; + certfile?: string; + auth?: string; +} + +export type Defaults = StringDefaults | BytesDefaults | IntegerDefaults | DoubleDefaults | ArrayDefaults; + +export interface TypeOptions { + defaults: Defaults +} + +export interface ExtendedOptions extends Options, TypeOptions {} + diff --git a/ts-test/tests/udf.lua b/ts-test/tests/udf.lua new file mode 100644 index 000000000..e28a504ed --- /dev/null +++ b/ts-test/tests/udf.lua @@ -0,0 +1,61 @@ +function withArguments(rec, value) + return value +end + +function withoutArguments(rec) + return 1 +end + +function noop(rec) +end + + +function createRecord(rec, binName, binValue) + rec[binName] = binValue + if (aerospike:exists(rec)) then + status = aerospike:udpate(rec) + else + status = aerospike:create(rec) + end + return status +end + +function updateRecord(rec, binName, binValue) + rec[binName] = binValue + aerospike:update(rec) + return rec +end + +function count(stream) + local function mapper(rec) + return 1 + end + local function reducer(v1, v2) + return v1 + v2 + end + return stream : map(mapper) : reduce(reducer) +end + +function countGreaterThan(stream, binName, value) + local function mapper(rec) + if rec[binName] > value then + return 1 + else + return 0 + end + end + local function reducer(v1, v2) + return v1 + v2 + end + return stream : map(mapper) : reduce(reducer) +end + +function even(stream, bin) + local function filt(rec) + return rec.value % 2 == 0 + end + local function mapper(rec) + return rec.value + end + return stream : filter(filt) : map(mapper) +end diff --git a/ts-test/tests/udf.ts b/ts-test/tests/udf.ts new file mode 100644 index 000000000..2c018eacf --- /dev/null +++ b/ts-test/tests/udf.ts @@ -0,0 +1,140 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* global context, it, expect */ + +const path = require('path') + +import Aerospike, { AerospikeError, Job } from 'aerospike'; + +import { expect } from 'chai'; +import * as helper from './test_helper'; + +context('registering/unregistering UDF modules', function () { + const client = helper.client + const module = 'udf.lua' + const filename = path.join(__dirname, module) + + it('should register and then remove a module', function (done) { + client.udfRegister(filename, function (err?: AerospikeError, registerJob?: Job) { + if (err) throw err + registerJob?.wait(10, function (err?: AerospikeError) { + if (err) throw err + client.udfRemove(module, function (err?: AerospikeError, removeJob?: Job) { + if (err) throw err + removeJob?.wait(10, done) + }) + }) + }) + }) + + it('should register a module as Lua language', function (done) { + client.udfRegister(filename, Aerospike.language.LUA, function (err?: AerospikeError, registerJob?: Job) { + if (err) throw err + registerJob?.wait(10, function (err?: AerospikeError) { + if (err) throw err + client.udfRemove(module, function (err?: AerospikeError, removeJob?: Job) { + if (err) throw err + removeJob?.wait(10, done) + }) + }) + }) + }) + + it('should register a module with an info policy', function (done) { + const policy = new Aerospike.InfoPolicy({ + timeout: 1000, + sendAsIs: true, + checkBounds: false + }) + + client.udfRegister(filename, policy, function (err?: AerospikeError, registerJob?: Job) { + if (err) throw err + registerJob?.wait(10, function (err?: AerospikeError) { + if (err) throw err + client.udfRemove(module, function (err?: AerospikeError, removeJob?: Job) { + if (err) throw err + removeJob?.wait(10, done) + }) + }) + }) + }) + + it('should register a module as Lua language with an info policy', function (done) { + const policy = new Aerospike.InfoPolicy({ + timeout: 1000, + sendAsIs: true, + checkBounds: false + }) + + client.udfRegister(filename, Aerospike.language.LUA, policy, function (err?: AerospikeError, registerJob?: Job) { + if (err) throw err + registerJob?.wait(10, function (err?: AerospikeError) { + if (err) throw err + client.udfRemove(module, function (err?: AerospikeError, removeJob?: Job) { + if (err) throw err + removeJob?.wait(10, done) + }) + }) + }) + }) + + it('returns a Promise if no callback function is passed', function () { + return client.udfRegister(filename) + .then(job => job.wait(10)) + .then(() => client.udfRemove(module)) + .then(job => job.wait(10)) + }) + + context('error handling', function () { + it('should fail to register an non-existent module', function (done) { + client.udfRegister('no-such-udf.lua', function (err?: AerospikeError) { + expect(err?.code).to.equal(Aerospike.status.ERR_CLIENT) + done() + }) + }) + + it('should fail to register module with invalid language', function (done) { + client.udfRegister(filename, -99, function (err?: AerospikeError) { + expect(err?.code).to.equal(Aerospike.status.ERR_PARAM) + done() + }) + }) + + context('removing a non-existent module', function () { + context('server version 4.5.1 and later', function () { + helper.skipUnlessVersion('>= 4.5.1', this) + + it('should not fail when removing a non-existent module', function () { + return client.udfRemove('no-such-udf.lua').then((job: Job) => job.waitUntilDone()) + }) + }) + + context('server version 4.5.0 and earlier', function () { + helper.skipUnlessVersion('< 4.5.1', this) + + it('should return an error when removing a non-existent module', function (done) { + client.udfRemove('no-such-udf.lua', function (error?: AerospikeError) { + expect(error).to.exist.and.have.property('code', Aerospike.status.ERR_UDF) + done() + }) + }) + }) + }) + }) +}) diff --git a/ts-test/tests/util/index.ts b/ts-test/tests/util/index.ts new file mode 100644 index 000000000..3ff7d6ecd --- /dev/null +++ b/ts-test/tests/util/index.ts @@ -0,0 +1,4 @@ +export * from './options'; +export * from './run_in_new_process'; +export * from './sleep' +export * from './statefulAsyncTest' \ No newline at end of file diff --git a/ts-test/tests/util/options.ts b/ts-test/tests/util/options.ts new file mode 100644 index 000000000..49865df16 --- /dev/null +++ b/ts-test/tests/util/options.ts @@ -0,0 +1,207 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +import * as Aerospike from 'aerospike'; + +import { ConfigOptions, Host, TLSInfo, BasePolicyOptions } from 'aerospike'; + +import yargs, {Argv} from 'yargs'; +import { hideBin } from 'yargs/helpers'; + + +import * as fs from 'fs'; // semver is likely the default export, but it may have named exports as well +import * as path from 'path'; // semver is likely the default export, but it may have named exports as well + +// ***************************************************************************** +// Options parsing +// ***************************************************************************** + +const parser: yargs.Argv = yargs(hideBin(process.argv)) + .usage('$0 [options]') + .options({ + help: { + type: 'boolean', + describe: 'Display this message.' + }, + host: { + alias: 'h', + type: 'string', + default: null, + describe: 'Aerospike database address.' + }, + port: { + alias: 'p', + type: 'number', + default: null, + describe: 'Aerospike database port.' + }, + totalTimeout: { + alias: 't', + type: 'number', + default: 1000, + describe: 'Timeout in milliseconds.' + }, + log: { + alias: 'l', + type: 'number', + default: Aerospike.log.WARN, + describe: 'Log level [0-5]' + }, + log_file: { + alias: 'f', + type: 'number', + default: fs.openSync('test.log', 'a'), + describe: 'Log file to redirect the log messages' + }, + namespace: { + alias: 'n', + type: 'string', + default: 'test', + describe: 'Namespace for the keys.' + }, + set: { + alias: 's', + type: 'string', + default: 'demo', + describe: 'Set for the keys.' + }, + user: { + alias: 'U', + type: 'string', + default: null, + describe: 'Username to connect to a secure cluster' + }, + password: { + alias: 'P', + type: 'string', + default: null, + describe: 'Password to connect to a secure cluster' + }, + clusterName: { + type: 'string', + describe: 'Name of the cluster to join' + }, + cafile: { + type: 'string', + describe: 'Path to a trusted CA certificate file' + }, + keyfile: { + type: 'string', + describe: 'Path to the client\'s key for mutual auth' + }, + keyfilePassword: { + type: 'string', + describe: 'Decryption password for the client\'s key file' + }, + certfile: { + type: 'string', + describe: 'Path to the client\'s certificate chain file for mutual auth' + }, + auth: { + type: 'number', + describe: 'Specify client authentication mode' + } + }); + + +let options: any +if (process.env.OPTIONS) { + const rawOptions: string[] = process.env.OPTIONS.trim().split(' ') + options = parser.parse(options) +} else { + options = parser.argv +} + +if (options.help === true) { + parser.showHelp() + process.exit(0) +} + +// enable debug stacktraces +process.env.AEROSPIKE_DEBUG_STACKTRACES = process.env.AEROSPIKE_DEBUG_STACKTRACES || 'true' + +function testDir (): string { + return path.resolve( __dirname , '..'); +} + +options.getConfig = function (): ConfigOptions { + const defaultPolicy: BasePolicyOptions = { + totalTimeout: options.totalTimeout, + maxRetries: 6 + } + const config = { + log: { + level: options.log, + file: options.log_file + }, + policies: { + apply: defaultPolicy, + batch: defaultPolicy, + info: defaultPolicy, + operate: defaultPolicy, + query: defaultPolicy, + read: defaultPolicy, + remove: defaultPolicy, + scan: defaultPolicy, + write: defaultPolicy + }, + modlua: { + userPath: testDir() + } + } as ConfigOptions; + + if (options.host !== null) { + const host = { + addr: options.host, + port: options.port || 3000, + } as Host; + config.hosts = [host] + } else if (process.env.AEROSPIKE_HOSTS) { + config.hosts = process.env.AEROSPIKE_HOSTS + } + + if (options.user !== null) { + config.user = options.user + } + if (options.password !== null) { + config.password = options.password + } + + if (options.clusterName) { + config.clusterName = options.clusterName + } + + if (options.cafile) { + config.tls = { + enable: true, + cafile: options.cafile, + certfile: options.certfile, + keyfile: options.keyfile, + keyfilePassword: options.keyfilePassword + } as TLSInfo; + } + + if (options.auth) { + config.authMode = options.auth + } + // Disable maxErrorRate + config.maxErrorRate = 0 + return config +} + +export default options; \ No newline at end of file diff --git a/ts-test/tests/util/run_in_new_process.ts b/ts-test/tests/util/run_in_new_process.ts new file mode 100644 index 000000000..27cf79719 --- /dev/null +++ b/ts-test/tests/util/run_in_new_process.ts @@ -0,0 +1,70 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +import * as childProcess from 'child_process'; +import { ChildProcess } from 'child_process'; +import tmp from 'tmp'; +import fs from 'fs'; + +function generateTestSource (fn: Function, data: any) { + return ` + 'use strict' + const Aerospike = require(process.cwd()) + const fn = ${fn.toString()} + const data = JSON.parse(\`${JSON.stringify(data)}\`) + const report = (result) => new Promise((resolve) => process.send(result, resolve)) + + ;(async () => { + try { + const result = await fn(Aerospike, data) + await report({ result }) + } catch (error) { + await report({ error }) + } + process.exit() + })() +` +} + +function createTempFile(fn: Function, data: any): string { + const source: string = generateTestSource(fn, data) + const temp: tmp.FileResult = tmp.fileSync({ postfix: '.js' }) + fs.writeSync(temp.fd, source) + return temp.name +} + +function forkAndRun(fn: Function, env: NodeJS.ProcessEnv, data: any): ChildProcess { + const temp: string = createTempFile(fn, data) + return childProcess.fork(temp, { env }) +} + +export function runInNewProcessFn(fn: Function, env: NodeJS.ProcessEnv, data: any): Promise { + return new Promise((resolve, reject) => { + const child: ChildProcess = forkAndRun(fn, env, data) + child.on('message', (message: { error?: string, result?: T }) => { + child.disconnect() + if (message.error) { + reject(new Error(message.error)) + } else { + resolve(message.result as T) + } + }); + child.on('error', (error: Error) => + console.error('Error for PID %s: %s', child.pid, error.message)) + }); +} diff --git a/ts-test/tests/util/sleep.ts b/ts-test/tests/util/sleep.ts new file mode 100644 index 000000000..7e06f699a --- /dev/null +++ b/ts-test/tests/util/sleep.ts @@ -0,0 +1,3 @@ +export function sleep (ms: number) { + return new Promise((resolve) => setTimeout(resolve, ms)) +} diff --git a/ts-test/tests/util/statefulAsyncTest.ts b/ts-test/tests/util/statefulAsyncTest.ts new file mode 100644 index 000000000..5ec21d5c4 --- /dev/null +++ b/ts-test/tests/util/statefulAsyncTest.ts @@ -0,0 +1,109 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* global expect */ +import Aerospike from 'aerospike'; + +const AerospikeError = Aerospike.AerospikeError +import * as helper from '../test_helper'; +import { expect } from 'chai'; + +class State { + [key: string]: any; + + private _expectError: boolean = false; + private error: any = null; + set (name: any, promise: any) { + if (this._expectError) { + return promise.catch((error: any) => { + this.error = error + return this + }) + } else { + return promise.then((value: any) => { + this[name] = value + return this + }) + } + } + + setExpectError () { + this._expectError = true + return this + } +} + +export function initState () { + return Promise.resolve(new State()) +} +export function expectError(){ + return (state: any) => state.setExpectError() +} + +export function createRecord(bins: any) { + return (state: any) => { + const key = helper.keygen.string(helper.namespace, helper.set, {})() + const meta = { ttl: 600 } + const policy = new Aerospike.WritePolicy({ + exists: Aerospike.policy.exists.CREATE_OR_REPLACE + }) + return state.set('key', helper.client.put(key, bins, meta, policy)) + } +} +export function operate(ops: any) { + return (state: any) => { + return state.set('result', helper.client.operate(state.key, Array.isArray(ops) ? ops : [ops])) + } +} + +export function assertResultEql(expected: any) { + return (state: any) => { + expect(state.result.bins).to.eql(expected, 'result of operation does not match expectation') + return state + } +} + +export function assertResultSatisfy(matcher: any) { + return (state: any) => { + expect(state.result.bins).to.satisfy(matcher, 'result of operation does not satisfy expectation') + return state + } +} + +export function assertRecordEql(expected: any) { + return (state: any) => { + return helper.client.get(state.key).then((record) => + expect(record.bins).to.eql(expected, 'after operation, record bins do not match expectations') + ).then(() => state) + } +} + +export function assertError(code: any) { + return (state: any) => { + expect(state.error, `expected operation to raise exception with error code ${code}`) + .to.be.instanceof(AerospikeError) + .with.property('code', code) + return state + } +} + +export function cleanup (){ + return (state: any) =>{ + helper.client.remove(state.key) + } +} diff --git a/ts-test/tests/utils.js b/ts-test/tests/utils.js new file mode 100644 index 000000000..7e8e36000 --- /dev/null +++ b/ts-test/tests/utils.js @@ -0,0 +1,90 @@ +// ***************************************************************************** +// Copyright 2013-2023 Aerospike, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ***************************************************************************** + +'use strict' + +/* global expect, describe, it */ + +require('./test_helper') +const utils = require('../lib/utils') + +describe('utils.parseHostString() #noserver', function () { + it('parses a domain name', function () { + const host = utils.parseHostString('aero.local') + expect(host).to.eql({ addr: 'aero.local', port: 3000 }) + }) + + it('parses a domain name with port', function () { + const host = utils.parseHostString('aero.local:3333') + expect(host).to.eql({ addr: 'aero.local', port: 3333 }) + }) + + it('parses a domain name with TLS name and port', function () { + const host = utils.parseHostString('aero.local:aero.tls:3333') + expect(host).to.eql({ addr: 'aero.local', tlsname: 'aero.tls', port: 3333 }) + }) + + it('parses a domain name with TLS name', function () { + const host = utils.parseHostString('aero.local:aero.tls') + expect(host).to.eql({ addr: 'aero.local', tlsname: 'aero.tls', port: 3000 }) + }) + + it('parses an IPv4 address', function () { + const host = utils.parseHostString('192.168.33.10') + expect(host).to.eql({ addr: '192.168.33.10', port: 3000 }) + }) + + it('parses an IPv4 address with port', function () { + const host = utils.parseHostString('192.168.33.10:3333') + expect(host).to.eql({ addr: '192.168.33.10', port: 3333 }) + }) + + it('parses an IPv4 address with TLS name and port', function () { + const host = utils.parseHostString('192.168.33.10:aero.tls:3333') + expect(host).to.eql({ addr: '192.168.33.10', tlsname: 'aero.tls', port: 3333 }) + }) + + it('parses an IPv4 address with TLS name', function () { + const host = utils.parseHostString('192.168.33.10:aero.tls') + expect(host).to.eql({ addr: '192.168.33.10', tlsname: 'aero.tls', port: 3000 }) + }) + + it('parses an IPv6 address', function () { + const host = utils.parseHostString('[fde4:8dba:82e1::c4]') + expect(host).to.eql({ addr: 'fde4:8dba:82e1::c4', port: 3000 }) + }) + + it('parses an IPv6 address with port', function () { + const host = utils.parseHostString('[fde4:8dba:82e1::c4]:3333') + expect(host).to.eql({ addr: 'fde4:8dba:82e1::c4', port: 3333 }) + }) + + it('parses an IPv6 address with TLS name and port', function () { + const host = utils.parseHostString('[fde4:8dba:82e1::c4]:aero.tls:3333') + expect(host).to.eql({ addr: 'fde4:8dba:82e1::c4', tlsname: 'aero.tls', port: 3333 }) + }) + + it('parses an IPv6 address with TLS name', function () { + const host = utils.parseHostString('[fde4:8dba:82e1::c4]:aero.tls') + expect(host).to.eql({ addr: 'fde4:8dba:82e1::c4', tlsname: 'aero.tls', port: 3000 }) + }) + + it('throws an error if it cannot parse the string', function () { + expect(function () { + utils.parseHostString('not a valid host') + }).to.throw('Invalid host address') + }) +}) diff --git a/ts-test/tsconfig.json b/ts-test/tsconfig.json new file mode 100644 index 000000000..7af498bee --- /dev/null +++ b/ts-test/tsconfig.json @@ -0,0 +1,12 @@ +{ + "compilerOptions": { + "target": "esnext", + "module": "commonjs", + "outDir": "./dist", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true + }, + "include": ["tests/**/*.ts", "tests/*.ts", "tests/util/*.ts"] +} \ No newline at end of file diff --git a/tsconfig.json b/tsconfig.json index 9cb2ada86..17c194591 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,13 +1,13 @@ { - "include": ["lib/*", "lib/commands/*", "lib/policies/*"], + "include": ["lib/*", "lib/commands/*", "lib/policies/*", "typings/index.d.ts"], "compilerOptions": { /* Visit https://aka.ms/tsconfig.json to read more about this file */ "strict": true, "module": "commonjs", "moduleResolution": "node", - "target": "es2019", + "target": "es5", "lib": [ - "es2020", + "es5", "dom" ], "allowJs": true, @@ -27,5 +27,8 @@ "isolatedModules": false, "skipLibCheck": true, "baseUrl": ".", + "paths": { + "*": ["typings/*"] + } } -} \ No newline at end of file +} diff --git a/tsconfig.tsbuildinfo b/tsconfig.tsbuildinfo new file mode 100644 index 000000000..865209270 --- /dev/null +++ b/tsconfig.tsbuildinfo @@ -0,0 +1 @@ +{"fileNames":["../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es5.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2015.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2016.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2017.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2018.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2019.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2020.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.dom.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2015.core.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2015.collection.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2015.generator.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2015.iterable.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2015.promise.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2015.proxy.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2015.reflect.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2015.symbol.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2015.symbol.wellknown.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2016.array.include.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2016.intl.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2017.date.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2017.object.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2017.sharedmemory.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2017.string.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2017.intl.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2017.typedarrays.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2018.asyncgenerator.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2018.asynciterable.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2018.intl.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2018.promise.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2018.regexp.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2019.array.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2019.object.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2019.string.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2019.symbol.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2019.intl.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2020.bigint.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2020.date.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2020.promise.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2020.sharedmemory.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2020.string.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2020.symbol.wellknown.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2020.intl.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.es2020.number.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.esnext.intl.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.decorators.d.ts","../../.nvm/versions/node/v18.20.4/lib/node_modules/typescript/lib/lib.decorators.legacy.d.ts","./lib/user.d.ts","./lib/privilege.d.ts","./lib/role.d.ts","./lib/admin.d.ts","./lib/admin.js","./lib/error.d.ts","./lib/event_loop.d.ts","./lib/geojson.d.ts","./lib/filter.d.ts","./lib/exp.d.ts","./lib/info.d.ts","./lib/lists.d.ts","./lib/hll.d.ts","./lib/maps.d.ts","./lib/cdt_context.d.ts","./lib/operations.d.ts","./lib/bitwise.d.ts","./lib/policies/base_policy.d.ts","./lib/policies/apply_policy.d.ts","./lib/policies/operate_policy.d.ts","./lib/policies/query_policy.d.ts","./lib/policies/read_policy.d.ts","./lib/policies/remove_policy.d.ts","./lib/policies/scan_policy.d.ts","./lib/policies/write_policy.d.ts","./lib/policies/batch_policy.d.ts","./lib/policies/batch_apply_policy.d.ts","./lib/policies/batch_read_policy.d.ts","./lib/policies/batch_remove_policy.d.ts","./lib/policies/batch_write_policy.d.ts","./lib/policies/command_queue_policy.d.ts","./lib/policies/hll_policy.d.ts","./lib/policies/info_policy.d.ts","./lib/policies/admin_policy.d.ts","./lib/policies/list_policy.d.ts","./lib/policies/map_policy.d.ts","./lib/policy.d.ts","./lib/status.d.ts","./lib/features.d.ts","./lib/config.d.ts","./lib/record_stream.d.ts","./lib/query.d.ts","./lib/scan.d.ts","./lib/client.d.ts","./lib/double.d.ts","./lib/key.d.ts","./lib/record.d.ts","./lib/bin.d.ts","./lib/utils.d.ts","./lib/aerospike.d.ts","./lib/exp_operations.d.ts","./lib/index_job.d.ts","./lib/job.d.ts","./lib/batch_type.d.ts","./lib/privilege_code.d.ts","./lib/aerospike.js","./lib/batch_type.js","./lib/bigint.d.ts","./lib/bigint.js","./lib/bin.js","./lib/bitwise.js","./lib/cdt_context.js","./lib/commands/index.d.ts","./lib/udf_job.d.ts","./lib/client.js","./lib/config.js","./lib/double.js","./lib/error.js","./lib/event_loop.js","./lib/exp_lists.d.ts","./lib/exp_maps.d.ts","./lib/exp_bit.d.ts","./lib/exp_hll.d.ts","./lib/exp.js","./lib/exp_bit.js","./lib/exp_hll.js","./lib/exp_lists.js","./lib/exp_maps.js","./lib/exp_operations.js","./lib/features.js","./lib/filter.js","./lib/geojson.js","./lib/hll.js","./lib/index_job.js","./lib/info.js","./lib/job.js","./lib/key.js","./lib/lists.js","./lib/maps.js","./lib/operations.js","./lib/query_duration.d.ts","./lib/policy.js","./lib/privilege.js","./lib/privilege_code.js","./lib/query.js","./lib/query_duration.js","./lib/record.js","./lib/record_stream.js","./lib/role.js","./lib/scan.js","./lib/status.js","./lib/typedefs.d.ts","./lib/typedefs.js","./lib/udf_job.js","./lib/user.js","./lib/utils.js","./lib/commands/batch_command.d.ts","./lib/commands/command.d.ts","./lib/commands/batch_command.js","./lib/commands/command.js","./lib/commands/connect_command.d.ts","./lib/commands/connect_command.js","./lib/commands/exists_command.d.ts","./lib/commands/exists_command.js","./lib/commands/read_record_command.d.ts","./lib/commands/stream_command.d.ts","./lib/commands/write_record_command.d.ts","./lib/commands/query_background_command.d.ts","./lib/commands/index.js","./lib/commands/query_background_command.js","./lib/commands/read_record_command.js","./lib/commands/stream_command.js","./lib/commands/write_record_command.js","./lib/policies/admin_policy.js","./lib/policies/apply_policy.js","./lib/policies/base_policy.js","./lib/policies/batch_apply_policy.js","./lib/policies/batch_policy.js","./lib/policies/batch_read_policy.js","./lib/policies/batch_remove_policy.js","./lib/policies/batch_write_policy.js","./lib/policies/bitwise_policy.d.ts","./lib/policies/bitwise_policy.js","./lib/policies/command_queue_policy.js","./lib/policies/hll_policy.js","./lib/policies/info_policy.js","./lib/policies/list_policy.js","./lib/policies/map_policy.js","./lib/policies/operate_policy.js","./lib/policies/query_policy.js","./lib/policies/read_policy.js","./lib/policies/remove_policy.js","./lib/policies/scan_policy.js","./lib/policies/write_policy.js","./typings/index.d.ts","./node_modules/@types/unist/index.d.ts","./node_modules/@types/hast/index.d.ts","./node_modules/@types/json5/index.d.ts","./node_modules/@types/mdast/index.d.ts","./node_modules/@types/node/ts5.6/globals.typedarray.d.ts","./node_modules/@types/node/ts5.6/buffer.buffer.d.ts","./node_modules/@types/node/assert.d.ts","./node_modules/@types/node/assert/strict.d.ts","./node_modules/undici-types/header.d.ts","./node_modules/undici-types/readable.d.ts","./node_modules/undici-types/file.d.ts","./node_modules/undici-types/fetch.d.ts","./node_modules/undici-types/formdata.d.ts","./node_modules/undici-types/connector.d.ts","./node_modules/undici-types/client.d.ts","./node_modules/undici-types/errors.d.ts","./node_modules/undici-types/dispatcher.d.ts","./node_modules/undici-types/global-dispatcher.d.ts","./node_modules/undici-types/global-origin.d.ts","./node_modules/undici-types/pool-stats.d.ts","./node_modules/undici-types/pool.d.ts","./node_modules/undici-types/handlers.d.ts","./node_modules/undici-types/balanced-pool.d.ts","./node_modules/undici-types/agent.d.ts","./node_modules/undici-types/mock-interceptor.d.ts","./node_modules/undici-types/mock-agent.d.ts","./node_modules/undici-types/mock-client.d.ts","./node_modules/undici-types/mock-pool.d.ts","./node_modules/undici-types/mock-errors.d.ts","./node_modules/undici-types/proxy-agent.d.ts","./node_modules/undici-types/env-http-proxy-agent.d.ts","./node_modules/undici-types/retry-handler.d.ts","./node_modules/undici-types/retry-agent.d.ts","./node_modules/undici-types/api.d.ts","./node_modules/undici-types/interceptors.d.ts","./node_modules/undici-types/util.d.ts","./node_modules/undici-types/cookies.d.ts","./node_modules/undici-types/patch.d.ts","./node_modules/undici-types/websocket.d.ts","./node_modules/undici-types/eventsource.d.ts","./node_modules/undici-types/filereader.d.ts","./node_modules/undici-types/diagnostics-channel.d.ts","./node_modules/undici-types/content-type.d.ts","./node_modules/undici-types/cache.d.ts","./node_modules/undici-types/index.d.ts","./node_modules/@types/node/globals.d.ts","./node_modules/@types/node/async_hooks.d.ts","./node_modules/@types/node/buffer.d.ts","./node_modules/@types/node/child_process.d.ts","./node_modules/@types/node/cluster.d.ts","./node_modules/@types/node/console.d.ts","./node_modules/@types/node/constants.d.ts","./node_modules/@types/node/crypto.d.ts","./node_modules/@types/node/dgram.d.ts","./node_modules/@types/node/diagnostics_channel.d.ts","./node_modules/@types/node/dns.d.ts","./node_modules/@types/node/dns/promises.d.ts","./node_modules/@types/node/domain.d.ts","./node_modules/@types/node/dom-events.d.ts","./node_modules/@types/node/events.d.ts","./node_modules/@types/node/fs.d.ts","./node_modules/@types/node/fs/promises.d.ts","./node_modules/@types/node/http.d.ts","./node_modules/@types/node/http2.d.ts","./node_modules/@types/node/https.d.ts","./node_modules/@types/node/inspector.d.ts","./node_modules/@types/node/module.d.ts","./node_modules/@types/node/net.d.ts","./node_modules/@types/node/os.d.ts","./node_modules/@types/node/path.d.ts","./node_modules/@types/node/perf_hooks.d.ts","./node_modules/@types/node/process.d.ts","./node_modules/@types/node/punycode.d.ts","./node_modules/@types/node/querystring.d.ts","./node_modules/@types/node/readline.d.ts","./node_modules/@types/node/readline/promises.d.ts","./node_modules/@types/node/repl.d.ts","./node_modules/@types/node/sea.d.ts","./node_modules/@types/node/sqlite.d.ts","./node_modules/@types/node/stream.d.ts","./node_modules/@types/node/stream/promises.d.ts","./node_modules/@types/node/stream/consumers.d.ts","./node_modules/@types/node/stream/web.d.ts","./node_modules/@types/node/string_decoder.d.ts","./node_modules/@types/node/test.d.ts","./node_modules/@types/node/timers.d.ts","./node_modules/@types/node/timers/promises.d.ts","./node_modules/@types/node/tls.d.ts","./node_modules/@types/node/trace_events.d.ts","./node_modules/@types/node/tty.d.ts","./node_modules/@types/node/url.d.ts","./node_modules/@types/node/util.d.ts","./node_modules/@types/node/v8.d.ts","./node_modules/@types/node/vm.d.ts","./node_modules/@types/node/wasi.d.ts","./node_modules/@types/node/worker_threads.d.ts","./node_modules/@types/node/zlib.d.ts","./node_modules/@types/node/globals.global.d.ts","./node_modules/@types/node/ts5.6/index.d.ts"],"fileIdsList":[[197,239],[47,48,49,197,239],[50,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,79,80,81,82,83,84,85,86,90,91,92,93,94,95,197,239],[50,52,53,54,55,56,57,58,59,60,61,62,63,83,84,85,86,88,89,90,91,92,93,94,95,97,98,99,100,101,197,239],[62,197,239],[86,88,89,197,239],[52,53,61,62,86,88,89,95,98,109,110,197,239,251,261,283],[93,154,197,239],[52,197,239],[154,197,239],[84,93,154,197,239],[99,197,239],[153,154,157,159,161,162,163,164,197,239],[99,154,197,239],[92,93,154,197,239],[83,197,239],[84,197,239],[52,77,197,239],[116,117,118,119,197,239],[61,197,239],[54,197,239],[57,84,99,197,239,283],[109,197,239],[104,197,239],[52,61,62,197,239],[61,62,197,239],[64,197,239],[64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,197,239],[64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,137,197,239],[87,197,239],[55,87,109,197,198,239,283],[197,239,271,283],[87,99,109,197,239],[57,99,197,239,283],[192,197,239],[197,198,239],[197,238,239],[197,239,244,274],[197,239,240,245,251,252,259,271,282],[197,239,240,241,251,259],[197,239,242,283],[197,239,243,244,252,260],[197,239,244,271,279],[197,239,245,247,251,259],[197,238,239,246],[197,239,247,248],[197,239,251],[197,239,249,251],[197,238,239,251],[197,239,251,252,253,271,282],[197,239,251,252,253,266,271,274],[197,236,239,287],[197,236,239,247,251,254,259,271,282],[197,239,251,252,254,255,259,271,279,282],[197,239,254,256,271,279,282],[197,239,251,257],[197,239,258,282,287],[197,239,247,251,259,271],[197,239,260],[197,239,261],[197,238,239,262],[197,198,199,238,239,240,241,242,243,244,245,246,247,248,249,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288],[197,239,264],[197,239,265],[197,239,251,266,267],[197,239,266,268,283,285],[197,239,251,271,272,273,274],[197,239,271,273],[197,239,271,272],[197,239,274],[197,239,275],[197,198,239,271],[197,239,251,277,278],[197,239,277,278],[197,239,244,259,271,279],[197,239,280],[239],[196,197,198,199,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289],[197,239,259,281],[197,239,254,265,282],[197,239,244,283],[197,239,271,284],[197,239,258,285],[197,239,286],[197,239,244,251,253,262,271,282,285,287],[197,239,271,288],[197,208,212,239,282],[197,208,239,271,282],[197,203,239],[197,205,208,239,279,282],[197,239,259,279],[197,239,290],[197,203,239,290],[197,205,208,239,259,282],[197,200,201,204,207,239,251,271,282],[197,208,215,239],[197,200,206,239],[197,208,229,230,239],[197,204,208,239,274,282,290],[197,229,239,290],[197,202,203,239,290],[197,208,239],[197,202,203,204,205,206,207,208,209,210,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,230,231,232,233,234,235,239],[197,208,223,239],[197,208,215,216,239],[197,206,208,216,217,239],[197,207,239],[197,200,203,208,239],[197,208,212,216,217,239],[197,212,239],[197,206,208,211,239,282],[197,200,205,208,215,239],[197,239,271],[197,203,208,229,239,287,290]],"fileInfos":[{"version":"44e584d4f6444f58791784f1d530875970993129442a847597db702a073ca68c","affectsGlobalScope":true,"impliedFormat":1},{"version":"45b7ab580deca34ae9729e97c13cfd999df04416a79116c3bfb483804f85ded4","impliedFormat":1},{"version":"3facaf05f0c5fc569c5649dd359892c98a85557e3e0c847964caeb67076f4d75","impliedFormat":1},{"version":"9a68c0c07ae2fa71b44384a839b7b8d81662a236d4b9ac30916718f7510b1b2d","impliedFormat":1},{"version":"5e1c4c362065a6b95ff952c0eab010f04dcd2c3494e813b493ecfd4fcb9fc0d8","impliedFormat":1},{"version":"68d73b4a11549f9c0b7d352d10e91e5dca8faa3322bfb77b661839c42b1ddec7","impliedFormat":1},{"version":"5efce4fc3c29ea84e8928f97adec086e3dc876365e0982cc8479a07954a3efd4","impliedFormat":1},{"version":"9e8ca8ed051c2697578c023d9c29d6df689a083561feba5c14aedee895853999","affectsGlobalScope":true,"impliedFormat":1},{"version":"6920e1448680767498a0b77c6a00a8e77d14d62c3da8967b171f1ddffa3c18e4","affectsGlobalScope":true,"impliedFormat":1},{"version":"dc2df20b1bcdc8c2d34af4926e2c3ab15ffe1160a63e58b7e09833f616efff44","affectsGlobalScope":true,"impliedFormat":1},{"version":"515d0b7b9bea2e31ea4ec968e9edd2c39d3eebf4a2d5cbd04e88639819ae3b71","affectsGlobalScope":true,"impliedFormat":1},{"version":"45d8ccb3dfd57355eb29749919142d4321a0aa4df6acdfc54e30433d7176600a","affectsGlobalScope":true,"impliedFormat":1},{"version":"0dc1e7ceda9b8b9b455c3a2d67b0412feab00bd2f66656cd8850e8831b08b537","affectsGlobalScope":true,"impliedFormat":1},{"version":"ce691fb9e5c64efb9547083e4a34091bcbe5bdb41027e310ebba8f7d96a98671","affectsGlobalScope":true,"impliedFormat":1},{"version":"8d697a2a929a5fcb38b7a65594020fcef05ec1630804a33748829c5ff53640d0","affectsGlobalScope":true,"impliedFormat":1},{"version":"4ff2a353abf8a80ee399af572debb8faab2d33ad38c4b4474cff7f26e7653b8d","affectsGlobalScope":true,"impliedFormat":1},{"version":"93495ff27b8746f55d19fcbcdbaccc99fd95f19d057aed1bd2c0cafe1335fbf0","affectsGlobalScope":true,"impliedFormat":1},{"version":"6fc23bb8c3965964be8c597310a2878b53a0306edb71d4b5a4dfe760186bcc01","affectsGlobalScope":true,"impliedFormat":1},{"version":"ea011c76963fb15ef1cdd7ce6a6808b46322c527de2077b6cfdf23ae6f5f9ec7","affectsGlobalScope":true,"impliedFormat":1},{"version":"38f0219c9e23c915ef9790ab1d680440d95419ad264816fa15009a8851e79119","affectsGlobalScope":true,"impliedFormat":1},{"version":"69ab18c3b76cd9b1be3d188eaf8bba06112ebbe2f47f6c322b5105a6fbc45a2e","affectsGlobalScope":true,"impliedFormat":1},{"version":"4738f2420687fd85629c9efb470793bb753709c2379e5f85bc1815d875ceadcd","affectsGlobalScope":true,"impliedFormat":1},{"version":"2f11ff796926e0832f9ae148008138ad583bd181899ab7dd768a2666700b1893","affectsGlobalScope":true,"impliedFormat":1},{"version":"4de680d5bb41c17f7f68e0419412ca23c98d5749dcaaea1896172f06435891fc","affectsGlobalScope":true,"impliedFormat":1},{"version":"9fc46429fbe091ac5ad2608c657201eb68b6f1b8341bd6d670047d32ed0a88fa","affectsGlobalScope":true,"impliedFormat":1},{"version":"ac9538681b19688c8eae65811b329d3744af679e0bdfa5d842d0e32524c73e1c","affectsGlobalScope":true,"impliedFormat":1},{"version":"0a969edff4bd52585473d24995c5ef223f6652d6ef46193309b3921d65dd4376","affectsGlobalScope":true,"impliedFormat":1},{"version":"9e9fbd7030c440b33d021da145d3232984c8bb7916f277e8ffd3dc2e3eae2bdb","affectsGlobalScope":true,"impliedFormat":1},{"version":"811ec78f7fefcabbda4bfa93b3eb67d9ae166ef95f9bff989d964061cbf81a0c","affectsGlobalScope":true,"impliedFormat":1},{"version":"717937616a17072082152a2ef351cb51f98802fb4b2fdabd32399843875974ca","affectsGlobalScope":true,"impliedFormat":1},{"version":"d7e7d9b7b50e5f22c915b525acc5a49a7a6584cf8f62d0569e557c5cfc4b2ac2","affectsGlobalScope":true,"impliedFormat":1},{"version":"71c37f4c9543f31dfced6c7840e068c5a5aacb7b89111a4364b1d5276b852557","affectsGlobalScope":true,"impliedFormat":1},{"version":"576711e016cf4f1804676043e6a0a5414252560eb57de9faceee34d79798c850","affectsGlobalScope":true,"impliedFormat":1},{"version":"89c1b1281ba7b8a96efc676b11b264de7a8374c5ea1e6617f11880a13fc56dc6","affectsGlobalScope":true,"impliedFormat":1},{"version":"74f7fa2d027d5b33eb0471c8e82a6c87216223181ec31247c357a3e8e2fddc5b","affectsGlobalScope":true,"impliedFormat":1},{"version":"1a94697425a99354df73d9c8291e2ecd4dddd370aed4023c2d6dee6cccb32666","affectsGlobalScope":true,"impliedFormat":1},{"version":"063600664504610fe3e99b717a1223f8b1900087fab0b4cad1496a114744f8df","affectsGlobalScope":true,"impliedFormat":1},{"version":"934019d7e3c81950f9a8426d093458b65d5aff2c7c1511233c0fd5b941e608ab","affectsGlobalScope":true,"impliedFormat":1},{"version":"bf14a426dbbf1022d11bd08d6b8e709a2e9d246f0c6c1032f3b2edb9a902adbe","affectsGlobalScope":true,"impliedFormat":1},{"version":"e3f9fc0ec0b96a9e642f11eda09c0be83a61c7b336977f8b9fdb1e9788e925fe","affectsGlobalScope":true,"impliedFormat":1},{"version":"59fb2c069260b4ba00b5643b907ef5d5341b167e7d1dbf58dfd895658bda2867","affectsGlobalScope":true,"impliedFormat":1},{"version":"479553e3779be7d4f68e9f40cdb82d038e5ef7592010100410723ceced22a0f7","affectsGlobalScope":true,"impliedFormat":1},{"version":"368af93f74c9c932edd84c58883e736c9e3d53cec1fe24c0b0ff451f529ceab1","affectsGlobalScope":true,"impliedFormat":1},{"version":"811c71eee4aa0ac5f7adf713323a5c41b0cf6c4e17367a34fbce379e12bbf0a4","affectsGlobalScope":true,"impliedFormat":1},{"version":"33358442698bb565130f52ba79bfd3d4d484ac85fe33f3cb1759c54d18201393","affectsGlobalScope":true,"impliedFormat":1},{"version":"782dec38049b92d4e85c1585fbea5474a219c6984a35b004963b00beb1aab538","affectsGlobalScope":true,"impliedFormat":1},"50ee7f5f71cc6a62d1cbd1571f161d22b63beb3b6087a219e4b84e30852792f1","df89ab0f80d21469681a98fa9040403945e20a251b52299ebd685190f6019490","5e25b241328cddaad4b6347e8275fa25b9c8e5c146a3665b17e56d619e973e36","5b4e284589f86aed09aa5b4b63ed18908417f4943091c5f24182ac667c045d69","3e6469e0505a3481a7b0e3e1da5b7486c7a008dc2ba4eb8f2db204609fd72e62","56afc456fb04d217443205588178a98cb9fc09697795224ab9164f5bd5f0c18f","916481f69e6650a2037a10b221bee0d2e8ee2e914526d20182bd4f6df2d33c1b","d60d77e63245f032db2e0e2007d03f54323c0c56a4fe6079b8cdde412ec06533","b2cb1e3f54727e256efbf7dce9e1596220d2274fa6391070b1520d4b08e1611b","2fc6a5fa42c9683e5915a941e6e05fcf4128611e545dd2270bc4b44763a3d442","2f63350a7b2799d1e35c8ba95e1ce42ff69fc4a0ef22c125f3818ad4727e6be4","b7568390431cf2f274632cce630cb8ac0fbd82ef084853febe93c64b2c89cb45","da94e4b6ef7c0df33ce733b470706b4595be4b611aaa64d6b525ab6d159b2a0b","86dbf81a495c2dc52b28968741637746bbf3aec5d2966bdb8564a0f55af6a827","edb0b26c4e2b7ffa14399a432ee7684546a12ced3c05e70fec5849ff5210de04","bc0e8256595a35ad26cb16e52b12855a13633210f103167a061f61e9d60dce09","5fd6219c70a7f610b4abe9227903e97c8ba3040ce6eb20e6c0271976a2043c87","9cba44bf3ef4f8d28ce163909cb5ddf841a6c0b99ab44203459b3592f229c899","22e5ea944a7bee091fa4b12f01ffa910e155cd6834c5ada187b8ce0dfdb55054","ef9cdab77c302359f84f89e1c4d5b240dd1eab009f5e07e2ec6e774614a3411a","bd31e7425de0d60174bd5a4fa18bd2b55ede926dfc4dca6cfa407754a9037450","fe92589910d6e91f54c464bb57ac419bcc1a27d303c6f43a5ab19200034ca5c5","e95891d2983be766880dfefc8001b941f6a93f2af1a328d6ca6e29a8f6bf65b4","0f2f225172fee77a9f9befccf8b43ca24c79e712dd1bd121cffd4403a5536037","ac35e96b938f6976a9b8e470a668f0fa75289e0bfc3e9d05d90e10567dfa500c","4263e692ee60cf3acc3f70286fe15cd2fbacdf6484118672fbb37b91d4103f2c","2fa0d1d1a388dddc21081480fc3c697f862798f258cd65cb04d9a24d3564a5c7","9a6edc0219aabf7d2917200666d5de95f343421079f267f685f4ddb18f561d02","dd85fa2dd8369dfd1087c49bcb9e84cea4d7521916dc1a756f3604cb79f619b3","65ff2bca01d1fa304795d2e805dc0081df8626338e1bc0169f7fe184c9684b43","ce4c9c538cec52021e0856d82f495bfb187eb8ae7da4f9894b3201a4c8a8e0ca","a852ab1d9e97b560f2439d6f750420c893711b8334feeb5cde8a66c8a1e6389c","d0fd97b1fdf7d23f66452b936023fc4512b3a05673d8edce09be99b58ee8e1e3","2f77d3036723b70c5181eddd35390d675779897448a4d6ae7691380553035532","a137fe99abac68542fa43ad0d7cc96fcd93751af66c002c8fd2f86049177993d","4fc4236609eaaa7bb598a577b51b4139409cd4f8572b1f1ba7af7b95c67dc2eb","b1190ecdf4f9ccd9faaf79c8eeeee196a9c9ba8a9c96f686f67e31d5af366737","d3930ee6bf4fe2569d8e41e2bafde50bde97d904ddb9a015a576fcf4823d6ebf","3842e8df9a450b985bb61020086838af47c572ba27a15add844aad1025563a6f","f92652ea3d1687581e2b280fa759ebeec57e9df823e5e28d13cfdf2820a3cb68","bb638d7604a132dd7e6412724ecf649e648245a1bea63ced1e8e342dc6f521e1","fd13ace5e32bf7b76ccd6f8f9365ef8c67cb3634f302142c9f38d78842b897b8","dd73da6ece13e632e3c168416a030b6ed930f940e25ed2d9a7ab52d5665876e4","90a41cb0fb3bf90d555c7c41354b2aa4df429c5d5fb363de626378645ad939b1","9c00da747b8ec2d7ee34492425b5a556401814cffe3db5fbfce783d2553a96a9","84940d81a4cd128d254ba2e0c55b4ccf8e0476ba16e3ff967416e999aa7b4ccc","6ed576ccea9d94a9288c43c8ad6ca59760c767f34e813e4d102a3930359bacc2","f029604588727266c8a1b7e3b4facfe5045ecf788a857dfab4d4cfcf6551f233","eff4c890cefed4d0d13dd00336d99b179e53c0cafb6c2bb5a6323295704cac98","33c660ba8517401fc35194533ca28ad6e6383fc53ecd60b0d229bbc2a2d368bb","b3e8172eed1ce31c39854e7b99b31c7c758a1ee4098b9e5988f0cea69fa2d064","414dcdca2f5395f4db0e5763db579aef57e0333a2ce53bdea7d9ad32c29ca055","a923d7d228367c7c22cf356e41a737c6ffc04350c81abed3726706dfb8243805","d7cd81e386ecc72ecac044234711f669d5c26ccb1a717483e20d4933e5ce1f3f","76bc991e7b66a626cd01e204a49fdf47bc3afb14b0e1338e512c78e7bb2579f0","f3b2f2bcd7a34b0a0ad3237de604829ee8df0f0f8589b670c2243bcc056595a2","7440c79b25d2c18ef0e300ab21e5d27433d05798e8d4b4a0a801c94fa7797cb2","f7d3c8f833a2dd614b9e240ea571159f19e312f9b9f9bad7724319e1beb40ffd","ce8d2118af5e51798b7b2e28ddd2f2b8d566c9b660d813d7cbb1ca7557473407","3aadd88ef693c05c6482003a96a6d793ec3888693651faac5176165f93ad45f4","669b14921b67e8919cd0bde4613b65c085242f0a7e6a1d5589547a6d99b13ea9","51683072397afe5eb67f0e2b1393f023a6b7c18d42dc5eea3b88f13432acf229","c2651a9b758b6513684cda52211cbf04052b66433ae233dcc13c68629ed11463","f028184141b1575b94c726595a9078c0a66626f4709b85c73bb762e33f28eeb9","0e4778bb4cf59aa0385e8e457dbf9b9aacc93a0aefba6dd133c216069af4655e","567bd8e8d139e6764cb7b7cfa7e25ef1eda0d6abaca1bdad648033daa99be013","9adb1b64644424b39705e748578a02ef44e95d84fb6403366d844bc02653986d","79954b0108e0b3f77da9bfdb937c2f13ea0b4aab3747ec7f98444deae475746b","80f4b485978b67cb538b36f1a20dddca9140d09f220cc2cb96b342b25e386d33","56959ec4bf8ae08d63aeebef403ac43b48f3bd55ba90701849c0286dd92de105","6f96dbf1cda767a4f687ac9aa2f7de4379aad97f83537901fee126dd6b5d6a0d","dbe768d75c748c19bee8b9c1fb3af9942e8329e53c25f8a1627e6b541d400fe0","2b8d7087792bbe6c1b0039c0cd134cf9be2ad09ac90c9cfda7d7b18cd6f371fe","8e9eb326c88942bd7035bd10eb66662c40219cb5636d2fb29245e3e4cbcf3df0","ebdc0ce922691e1193206d34ce3771c3008460e1f33460c19630d44ccc1368bc","73ab2f25b66b20c2a97a59e7ea08d4c88948919160dc44abdeffb4b2b970629a","36d99417cbc1f754ef5f9886f2e920280fd39513036c36e8604482f8877b4972","1b9b7f04260f72f251d6c6e5c60220cbd6ba35f45d8495d47ed37ee6cc9ebf18","382a5d97de0fe844419dafd7fdc4e16896683cdb2ed418783b5f51e18201c1e7","bf1154972d54503d03697ad280cf7c9ab708f95f3e46b74b7b7e4ae266b4bd64","dd0ad456d442b1d498736c24503a8dcb3e1be4d5e154c53680bbc10724ac4ac1","920f6745bb086602e559445d6b5531ebf7a260c82220a9716c09c73ba36af632","9d2cce52b6fd42c49331031ab154914e27311ad7557c64d368beae6b3a2e607e","48047cace29ac151468755a758c195e5ea98071d374abc094a5d7d48d1564b96","3a156fae9b192cca85af6a9b09e0247fcff6929233b731ab1a23163cba425dcc","411539a138f9b0e7d2ec4b5ce52a2e11417002dc2135e758715124f05b56be0e","4e95d7e7632fa32621b7a3ce3192d20b056d8a5bed03b1d3068fc4f40e16064c","8423779c832886ea3404266ea5ee90e1efd2f8bda63afa362bb618e8c6127a6b","151dfd2e5426164b8e2c9c66714b70b21bf53691b4bb46a3259adeadf6c64eb9","4f85f591dd16787d834ae89af2d1d749bac8629749f0aff9b43469e19b77870d","987c30a199379df19acf457096564555da42148a29b3cb467761b78bef4d7888","96b3aeb114456ff42c81bb54a34da8bbb85578545a1d87760d56378d5858260f","65a457dac67db6f9e8859a6722d99665d354aa631efb25f0fc0eefcb47077a22","710aa36a491bed99840d31384ab8c7ee74c1b61771f2de673563c80a723f40c3","9e0a5cf0d09701df1191fe2dbbd3279563dbe19ada9d7d1479f80c767859a112","95a48a24ccea07e3fc2b46dcf194f96c7bd57bb039cc73dd9a902770f348e94a","21cb334ecafdf45da6fe2b08eff3077b2d80f5361e216534f3df1a1ad043c57e","aee37a098463ea12162e2b5a482fb2323705c096d10fa1012bf020a0ffcf0a91","f78f65bb082519032d668a4922e1dbba89bbb95b9a11e6d38154171932496a79","fbf076e1db9f73afc0d0643a3efd96107cd277d8d3967ea4d1a527c6788041ec","634135d2f35be27442c8bb1cf52aa9de0f2c46f567e8b49c4cf75512c6b8b41b",{"version":"3c3f1caeacad09dc7029b0bff6ae34851f23cc049507e21f309a1f88922b2006","affectsGlobalScope":true},"4a1539f0248a4ce29d044a86f280030684fd3f9cfb02de0ac19604fd35812e22","7151dfcd2e1d032f87f3c384c6a9f1f4d17e3e848055853e15e44d224bf4a493","753e834f4fb581aa34531cfada4c05eddbb21272d47225f89594cd05327e7c85","0dce75fc59855382009d38a312444f16c69f8f3e575f8614e75f7a5e220f4e8a","2bec9ff290fb58e4a3665fafeea5d03776946161eaa6952be0c4c9e480b464f5","6268f3464d4e52fec099fa6a5f1381e58ea160a6a18ea36168ae74647d98ce38","8c7946eed568b2171a3d05da4c20a56f6abc524340b8a7a65b0f263a0fd2cbfc","504698dc2af72165e914ce09dfbf7c4f7b0552b5e6440c13de087825209aa13d","29e86d610b24345ead04f44210067f07a9e0aee07ec0545f2f96f02eb18a04ae","0fb7d620ee965130042df2e83d5eaf259fb0578286f3a35a7e351c431d3c0ada","bf6af035d6ace468eda338a3b1918be77789b536a26e2b330eed35a02bffc82e","091a310a6e3d55e934d328ab64886a3c1b1c3b63fcdc4032dd6146e6c1265dcb","ab595e1759a75c80b93e2fd34241ba52395c7e0b95725f9353b11d92bc321443","3bdf5800240e3ce7fa7ef79a15439a731918684e354db8d231b41d5f681f1972","44281d798f67ff91a4ed10629b70b57dd8b6805f780219bc7100b70359400ad8","e7223050aa283125200339a51031ed6db40d7d6e77c5d3ab539dd56365f77fd9","86b884f73f5924988cf886f42066eed6d7038ba1807ae047f1328a43a48e7abe","42092e5814dca58106807bbbb0da1de9df3deb68269afcbcd0cf050bb590bb69","4f3289ed4d52de1690ea9bcaafd7383c1996ad2d84d051ad1928aa9b338741ae","79d6ad8b0aaec71cf00299f0f9615903a3998b055de956617a042d2546780ea8","e9fee1e6744fc76cf3ef7db9c3fed442ab3da36c4d159612301663fe5eda91ba","635275bdb5135438a63d210dd005b59e40fad300c19c0b7a98cfbbca21041935","6783f7dbc89f984d42006702ddd84f45179435d86fe0ee9ac5f3d3bd8006dce6","6bca56f54926368dd54b11573bb6b5df435bc23d44223be38f5ed4aee23edafe","0e151e18d3fbc3552ac37ba1a7f64ec8141c66dfae7f4916baa16fcf55ff9653","70adda51e756bce1090b44627b634870df6e62d1204f130f5f31b99f98d3418a","f236d3978f716864b2a80bb2a8c240fc6660a54de84e258784d56ffbdeee295b","3ee29da10ab65a468d3841ada7641ebe4d93bf7621e4ce9fdf510f27619c5193","3ee35ba9e1bec37cc8d4a96d45a0ffdb479c900cc40b63f536ed274c1b489e28","aeec56b8f08618ab91f5a2224ab3c827a1a7098024aa37653c50b8f58b88f863","107a73d6f392170e386064104447b5b53c78d462ce25cf9421b92719c4d59c9b","7a07e71710467f6977f1059dc32b57fba8c84e11ec24dc03cd783e839d02cc59","508a3c11d8fd8ccb8f41edd657db60b3b8b272f56d444fa84b9caf14521c7b96","c6858ecf0ac04a31d2c67e60afb118356cf94e7346b2aa88d5abe840d246b8c1","a914b2188de936bd04ee305411024b7546c9938275f98eafcac347654de6163e","51fc7243dd326936c8beeeff375208bf26c5fa6f77749796ae2fd6f059da014b","12de18cae9cb85048c5f24770923cbc0e4605bfb36e93374c03804e65655fc09","66b7c4b74def66c5b6f9bc229ddf39f55f01413f003694bdff6aab5fc6ae5862","31a4899350c13a16c4e3785b23b613d883a093912fb78b1a2519fff63405847d","3ee620f6c18edbc243e037a4f7373c760a03a85450c230a3dfee5d63fc75e68a","9a07db048782b1ef9c012bd85708b59ad22e4423bd9879601cc4ba0e683ceae6","b2389854aecaa1b342243844f5774a937370757dac77e0ba671d0022df402766","d554b74cda8386c323b4d9abe0259b245a02f715dc77967bc30f4d898a202632",{"version":"89121c1bf2990f5219bfd802a3e7fc557de447c62058d6af68d6b6348d64499a","impliedFormat":1},{"version":"79b4369233a12c6fa4a07301ecb7085802c98f3a77cf9ab97eee27e1656f82e6","impliedFormat":1},{"version":"96d14f21b7652903852eef49379d04dbda28c16ed36468f8c9fa08f7c14c9538","impliedFormat":1},{"version":"d4a22007b481fe2a2e6bfd3a42c00cd62d41edb36d30fc4697df2692e9891fc8","impliedFormat":1},{"version":"613b21ccdf3be6329d56e6caa13b258c842edf8377be7bc9f014ed14cdcfc308","affectsGlobalScope":true,"impliedFormat":1},{"version":"2d1319e6b5d0efd8c5eae07eb864a00102151e8b9afddd2d45db52e9aae002c4","affectsGlobalScope":true,"impliedFormat":1},{"version":"f6114eb1e8f70ec08816bdaa6ec740a0a7a01f25743e36f655f00157be394374","impliedFormat":1},{"version":"7394959e5a741b185456e1ef5d64599c36c60a323207450991e7a42e08911419","impliedFormat":1},{"version":"5929864ce17fba74232584d90cb721a89b7ad277220627cc97054ba15a98ea8f","impliedFormat":1},{"version":"24bd580b5743dc56402c440dc7f9a4f5d592ad7a419f25414d37a7bfe11e342b","impliedFormat":1},{"version":"25c8056edf4314820382a5fdb4bb7816999acdcb929c8f75e3f39473b87e85bc","impliedFormat":1},{"version":"c464d66b20788266e5353b48dc4aa6bc0dc4a707276df1e7152ab0c9ae21fad8","impliedFormat":1},{"version":"78d0d27c130d35c60b5e5566c9f1e5be77caf39804636bc1a40133919a949f21","impliedFormat":1},{"version":"c6fd2c5a395f2432786c9cb8deb870b9b0e8ff7e22c029954fabdd692bff6195","impliedFormat":1},{"version":"1d6e127068ea8e104a912e42fc0a110e2aa5a66a356a917a163e8cf9a65e4a75","impliedFormat":1},{"version":"5ded6427296cdf3b9542de4471d2aa8d3983671d4cac0f4bf9c637208d1ced43","impliedFormat":1},{"version":"6bdc71028db658243775263e93a7db2fd2abfce3ca569c3cca5aee6ed5eb186d","impliedFormat":1},{"version":"cadc8aced301244057c4e7e73fbcae534b0f5b12a37b150d80e5a45aa4bebcbd","impliedFormat":1},{"version":"385aab901643aa54e1c36f5ef3107913b10d1b5bb8cbcd933d4263b80a0d7f20","impliedFormat":1},{"version":"9670d44354bab9d9982eca21945686b5c24a3f893db73c0dae0fd74217a4c219","impliedFormat":1},{"version":"0b8a9268adaf4da35e7fa830c8981cfa22adbbe5b3f6f5ab91f6658899e657a7","impliedFormat":1},{"version":"11396ed8a44c02ab9798b7dca436009f866e8dae3c9c25e8c1fbc396880bf1bb","impliedFormat":1},{"version":"ba7bc87d01492633cb5a0e5da8a4a42a1c86270e7b3d2dea5d156828a84e4882","impliedFormat":1},{"version":"4893a895ea92c85345017a04ed427cbd6a1710453338df26881a6019432febdd","impliedFormat":1},{"version":"c21dc52e277bcfc75fac0436ccb75c204f9e1b3fa5e12729670910639f27343e","impliedFormat":1},{"version":"13f6f39e12b1518c6650bbb220c8985999020fe0f21d818e28f512b7771d00f9","impliedFormat":1},{"version":"9b5369969f6e7175740bf51223112ff209f94ba43ecd3bb09eefff9fd675624a","impliedFormat":1},{"version":"4fe9e626e7164748e8769bbf74b538e09607f07ed17c2f20af8d680ee49fc1da","impliedFormat":1},{"version":"24515859bc0b836719105bb6cc3d68255042a9f02a6022b3187948b204946bd2","impliedFormat":1},{"version":"ea0148f897b45a76544ae179784c95af1bd6721b8610af9ffa467a518a086a43","impliedFormat":1},{"version":"24c6a117721e606c9984335f71711877293a9651e44f59f3d21c1ea0856f9cc9","impliedFormat":1},{"version":"dd3273ead9fbde62a72949c97dbec2247ea08e0c6952e701a483d74ef92d6a17","impliedFormat":1},{"version":"405822be75ad3e4d162e07439bac80c6bcc6dbae1929e179cf467ec0b9ee4e2e","impliedFormat":1},{"version":"0db18c6e78ea846316c012478888f33c11ffadab9efd1cc8bcc12daded7a60b6","impliedFormat":1},{"version":"4d2b0eb911816f66abe4970898f97a2cfc902bcd743cbfa5017fad79f7ef90d8","impliedFormat":1},{"version":"bd0532fd6556073727d28da0edfd1736417a3f9f394877b6d5ef6ad88fba1d1a","impliedFormat":1},{"version":"89167d696a849fce5ca508032aabfe901c0868f833a8625d5a9c6e861ef935d2","impliedFormat":1},{"version":"e53a3c2a9f624d90f24bf4588aacd223e7bec1b9d0d479b68d2f4a9e6011147f","impliedFormat":1},{"version":"24b8685c62562f5d98615c5a0c1d05f297cf5065f15246edfe99e81ec4c0e011","impliedFormat":1},{"version":"93507c745e8f29090efb99399c3f77bec07db17acd75634249dc92f961573387","impliedFormat":1},{"version":"339dc5265ee5ed92e536a93a04c4ebbc2128f45eeec6ed29f379e0085283542c","impliedFormat":1},{"version":"4732aec92b20fb28c5fe9ad99521fb59974289ed1e45aecb282616202184064f","impliedFormat":1},{"version":"2e85db9e6fd73cfa3d7f28e0ab6b55417ea18931423bd47b409a96e4a169e8e6","impliedFormat":1},{"version":"c46e079fe54c76f95c67fb89081b3e399da2c7d109e7dca8e4b58d83e332e605","impliedFormat":1},{"version":"bf67d53d168abc1298888693338cb82854bdb2e69ef83f8a0092093c2d562107","impliedFormat":1},{"version":"bb2cd9339d0201e7e78ccb6ff2f71aac103934bf35eaaa37e139ac2b68af0db8","affectsGlobalScope":true,"impliedFormat":1},{"version":"76103716ba397bbb61f9fa9c9090dca59f39f9047cb1352b2179c5d8e7f4e8d0","impliedFormat":1},{"version":"53eac70430b30089a3a1959d8306b0f9cfaf0de75224b68ef25243e0b5ad1ca3","affectsGlobalScope":true,"impliedFormat":1},{"version":"4314c7a11517e221f7296b46547dbc4df047115b182f544d072bdccffa57fc72","impliedFormat":1},{"version":"115971d64632ea4742b5b115fb64ed04bcaae2c3c342f13d9ba7e3f9ee39c4e7","impliedFormat":1},{"version":"c2510f124c0293ab80b1777c44d80f812b75612f297b9857406468c0f4dafe29","affectsGlobalScope":true,"impliedFormat":1},{"version":"a40826e8476694e90da94aa008283a7de50d1dafd37beada623863f1901cb7fb","impliedFormat":1},{"version":"46e07db372dd75edc1a26e68f16d1b7ffb34b7ab3db5cdb3e391a3604ad7bb7c","affectsGlobalScope":true,"impliedFormat":1},{"version":"24642567d3729bcc545bacb65ee7c0db423400c7f1ef757cab25d05650064f98","impliedFormat":1},{"version":"e6f5a38687bebe43a4cef426b69d34373ef68be9a6b1538ec0a371e69f309354","impliedFormat":1},{"version":"a6bf63d17324010ca1fbf0389cab83f93389bb0b9a01dc8a346d092f65b3605f","impliedFormat":1},{"version":"e009777bef4b023a999b2e5b9a136ff2cde37dc3f77c744a02840f05b18be8ff","impliedFormat":1},{"version":"1e0d1f8b0adfa0b0330e028c7941b5a98c08b600efe7f14d2d2a00854fb2f393","impliedFormat":1},{"version":"ee1ee365d88c4c6c0c0a5a5701d66ebc27ccd0bcfcfaa482c6e2e7fe7b98edf7","affectsGlobalScope":true,"impliedFormat":1},{"version":"f501a53b94ba382d9ba396a5c486969a3abc68309828fa67f916035f5d37fe2b","affectsGlobalScope":true,"impliedFormat":1},{"version":"c956ba45704d4a97f7a96923a307a6203bc0e7c4c532930d4c8ca261eaaff32a","impliedFormat":1},{"version":"ab0e88d33ccf15d8b3c891038b5a16094b0dd7e860ab0e2ba08da4384afce02b","impliedFormat":1},{"version":"954580f86c8e2a4abd5dcd1bcdf1a4c7e012495f1c39e058dc738bc93024642a","impliedFormat":1},{"version":"fa56be9b96f747e93b895d8dc2aa4fb9f0816743e6e2abb9d60705e88d4743a2","impliedFormat":1},{"version":"8257c55ff6bff6169142a35fce6811b511d857b4ae4f522cdb6ce20fd2116b2c","impliedFormat":1},{"version":"6d386bc0d7f3afa1d401afc3e00ed6b09205a354a9795196caed937494a713e6","impliedFormat":1},{"version":"3a9e5dddbd6ca9507d0c06a557535ba2224a94a2b0f3e146e8215f93b7e5b3a8","affectsGlobalScope":true,"impliedFormat":1},{"version":"d8b56de03a9f79f3fc1ac3a01a0d63bb48cc15f95a6b95549b4fb420e6030973","impliedFormat":1},{"version":"b1b6ee0d012aeebe11d776a155d8979730440082797695fc8e2a5c326285678f","impliedFormat":1},{"version":"45875bcae57270aeb3ebc73a5e3fb4c7b9d91d6b045f107c1d8513c28ece71c0","impliedFormat":1},{"version":"3c36ab47df4668254ccc170fc42e7d5116fd86a7e408d8dc220e559837cd2bbb","affectsGlobalScope":true,"impliedFormat":1},{"version":"6f6abdaf8764ef01a552a958f45e795b5e79153b87ddad3af5264b86d2681b72","affectsGlobalScope":true,"impliedFormat":1},{"version":"3f16a7e4deafa527ed9995a772bb380eb7d3c2c0fd4ae178c5263ed18394db2c","impliedFormat":1},{"version":"c6b4e0a02545304935ecbf7de7a8e056a31bb50939b5b321c9d50a405b5a0bba","impliedFormat":1},{"version":"c86b9afa9b39b12db8e877d23b48888d80f26e1fe72a95f58552746a6e1fa4fe","impliedFormat":1},{"version":"e432b0e3761ca9ba734bdd41e19a75fec1454ca8e9769bfdf8b31011854cf06a","impliedFormat":1},{"version":"e1120271ebbc9952fdc7b2dd3e145560e52e06956345e6fdf91d70ca4886464f","impliedFormat":1},{"version":"15c5e91b5f08be34a78e3d976179bf5b7a9cc28dc0ef1ffebffeb3c7812a2dca","impliedFormat":1},{"version":"a8f06c2382a30b7cb89ad2dfc48fc3b2b490f3dafcd839dadc008e4e5d57031d","impliedFormat":1},{"version":"07b9d3b7204d931acc29269c98ac3aac87ebcba6e05141552d42a4c17f895aa4","impliedFormat":1},{"version":"269929a24b2816343a178008ac9ae9248304d92a8ba8e233055e0ed6dbe6ef71","impliedFormat":1},{"version":"93452d394fdd1dc551ec62f5042366f011a00d342d36d50793b3529bfc9bd633","impliedFormat":1},{"version":"1425f76ac97ce8617d1e2fa79e9a14e0fd1cfdaa155e13d4e92403a468177bc2","affectsGlobalScope":true,"impliedFormat":1},{"version":"2754d8221d77c7b382096651925eb476f1066b3348da4b73fe71ced7801edada","impliedFormat":1},{"version":"cca97c55398b8699fa3a96ef261b01d200ed2a44d2983586ab1a81d7d7b23cd9","affectsGlobalScope":true,"impliedFormat":1},{"version":"bef91efa0baea5d0e0f0f27b574a8bc100ce62a6d7e70220a0d58af6acab5e89","affectsGlobalScope":true,"impliedFormat":1},{"version":"f59493f68eade5200559e5016b5855f7d12e6381eb6cab9ad8a379af367b3b2d","impliedFormat":1},{"version":"125e3472965f529de239d2bc85b54579fed8e0b060d1d04de6576fb910a6ec7f","impliedFormat":1},{"version":"66ba1b2c3e3a3644a1011cd530fb444a96b1b2dfe2f5e837a002d41a1a799e60","impliedFormat":1},{"version":"7e514f5b852fdbc166b539fdd1f4e9114f29911592a5eb10a94bb3a13ccac3c4","impliedFormat":1},{"version":"18f5c7c4ad71748cffdd42e829398acdfd2d150a887e5f07aae4f2acab68e71b","affectsGlobalScope":true,"impliedFormat":1},{"version":"72ed3074450a4a315063278f046637afdeea90aa72b2292a7976958ceafc344a","affectsGlobalScope":true,"impliedFormat":1},{"version":"a5c09990a37469b0311a92ce8feeb8682e83918723aedbd445bd7a0f510eaaa3","impliedFormat":1},{"version":"6b29aea17044029b257e5bd4e3e4f765cd72b8d3c11c753f363ab92cc3f9f947","impliedFormat":1},{"version":"ac5ed35e649cdd8143131964336ab9076937fa91802ec760b3ea63b59175c10a","impliedFormat":1},{"version":"d008cf1330c86b37a8128265c80795397c287cecff273bc3ce3a4883405f5112","affectsGlobalScope":true,"impliedFormat":1},{"version":"78dc0513cc4f1642906b74dda42146bcbd9df7401717d6e89ea6d72d12ecb539","impliedFormat":1},{"version":"ab9b9a36e5284fd8d3bf2f7d5fcbc60052f25f27e4d20954782099282c60d23e","affectsGlobalScope":true,"impliedFormat":1},{"version":"f2b6058d3dd78c1b4dafc97083c5d44bdfbf4155194044bd17b8fcca554e766a","impliedFormat":1}],"root":[[47,191]],"options":{"allowJs":true,"alwaysStrict":true,"declaration":true,"emitDeclarationOnly":true,"emitDecoratorMetadata":true,"esModuleInterop":true,"experimentalDecorators":true,"importHelpers":true,"module":1,"noEmitHelpers":true,"noImplicitAny":false,"removeComments":true,"skipLibCheck":true,"strict":true,"target":1},"referencedMap":[[45,1],[46,1],[8,1],[10,1],[9,1],[2,1],[11,1],[12,1],[13,1],[14,1],[15,1],[16,1],[17,1],[18,1],[3,1],[19,1],[4,1],[20,1],[24,1],[21,1],[22,1],[23,1],[25,1],[26,1],[27,1],[5,1],[28,1],[29,1],[30,1],[31,1],[6,1],[35,1],[32,1],[33,1],[34,1],[36,1],[7,1],[37,1],[42,1],[43,1],[38,1],[39,1],[40,1],[41,1],[1,1],[44,1],[50,2],[51,2],[96,3],[102,4],[100,1],[103,1],[104,1],[105,1],[94,1],[106,1],[63,5],[107,5],[61,1],[108,1],[90,6],[111,7],[153,1],[155,8],[154,1],[156,9],[157,1],[158,10],[159,1],[160,11],[109,12],[165,13],[164,12],[166,14],[161,1],[167,8],[162,1],[168,15],[163,1],[169,10],[86,1],[112,16],[91,1],[113,1],[52,1],[114,17],[53,1],[115,18],[56,1],[120,19],[118,1],[121,1],[119,1],[122,1],[116,1],[123,20],[117,1],[124,20],[97,1],[125,1],[85,1],[126,1],[55,21],[127,21],[54,1],[128,1],[59,1],[129,5],[98,1],[130,22],[57,1],[131,1],[99,1],[132,23],[92,1],[133,24],[58,1],[134,25],[60,1],[135,26],[62,1],[136,1],[80,1],[170,1],[65,27],[171,27],[64,1],[172,1],[73,1],[173,1],[72,27],[174,27],[74,1],[175,1],[75,1],[176,1],[76,1],[177,1],[178,1],[179,1],[77,1],[180,1],[78,1],[181,1],[79,1],[182,1],[81,1],[183,1],[82,1],[184,1],[66,27],[185,27],[67,27],[186,27],[68,27],[187,27],[69,27],[188,27],[70,27],[189,27],[71,27],[190,27],[83,28],[138,29],[48,1],[139,1],[101,1],[140,1],[88,30],[141,31],[137,1],[142,1],[93,1],[143,1],[87,1],[144,32],[49,1],[145,1],[89,30],[146,33],[84,1],[147,1],[148,1],[149,1],[110,1],[150,34],[47,1],[151,1],[95,1],[152,1],[193,35],[194,1],[195,35],[198,36],[199,36],[238,37],[239,38],[240,39],[241,40],[242,41],[243,42],[244,43],[245,44],[246,45],[247,46],[248,46],[250,47],[249,48],[251,49],[252,50],[253,51],[237,52],[289,1],[254,53],[255,54],[256,55],[257,56],[258,57],[259,58],[260,59],[261,60],[262,61],[263,62],[264,63],[265,64],[266,65],[267,65],[268,66],[269,1],[270,1],[271,67],[273,68],[272,69],[274,70],[275,71],[276,72],[277,73],[278,74],[279,75],[280,76],[197,77],[196,1],[290,78],[281,79],[282,80],[283,81],[284,82],[285,83],[286,84],[287,85],[288,86],[192,1],[215,87],[225,88],[214,87],[235,89],[206,90],[205,91],[234,92],[228,93],[233,94],[208,95],[222,96],[207,97],[231,98],[203,99],[202,92],[232,100],[204,101],[209,102],[210,1],[213,102],[200,1],[236,103],[226,104],[217,105],[218,106],[220,107],[216,108],[219,109],[229,92],[211,110],[212,111],[221,112],[201,113],[224,104],[223,102],[227,1],[230,114],[191,113]],"semanticDiagnosticsPerFile":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290],"version":"5.6.2"} \ No newline at end of file diff --git a/typings/index.d.ts b/typings/index.d.ts index dd3cb105e..d28cb911e 100644 --- a/typings/index.d.ts +++ b/typings/index.d.ts @@ -1,1933 +1,14614 @@ -declare module 'admin' { - export const User: typeof import("user"); - export const Privilege: typeof import("privilege"); - export const Role: typeof import("role"); - -} -declare module 'aerospike' { - export const filter: typeof import("filter"); - export const exp: typeof import("exp"); - export namespace regex { - const BASIC: number; - const EXTENDED: number; - const ICASE: number; - const NEWLINE: number; - } - export type regex = number; - export const info: typeof import("info"); - export const admin: typeof import("admin"); - export const lists: typeof import("lists"); - export const hll: typeof import("hll"); - export const maps: typeof import("maps"); - export namespace cdt { - const Context: typeof import("cdt_context"); - } - export const bitwise: typeof import("bitwise"); - export const operations: typeof import("operations"); - export const policy: typeof import("policy"); - export const BasePolicy: typeof import("policies/base_policy"); - export const ApplyPolicy: typeof import("policies/apply_policy"); - export const BatchPolicy: typeof import("policies/batch_policy"); - export const OperatePolicy: typeof import("policies/operate_policy"); - export const QueryPolicy: typeof import("policies/query_policy"); - export const ReadPolicy: typeof import("policies/read_policy"); - export const RemovePolicy: typeof import("policies/remove_policy"); - export const ScanPolicy: typeof import("policies/scan_policy"); - export const WritePolicy: typeof import("policies/write_policy"); - export const BatchApplyPolicy: typeof import("policies/batch_apply_policy"); - export const BatchReadPolicy: typeof import("policies/batch_read_policy"); - export const BatchRemovePolicy: typeof import("policies/batch_remove_policy"); - export const BatchWritePolicy: typeof import("policies/batch_write_policy"); - export const CommandQueuePolicy: typeof import("policies/command_queue_policy"); - export const InfoPolicy: typeof import("policies/info_policy"); - export const ListPolicy: typeof import("policies/list_policy"); - export const MapPolicy: typeof import("policies/map_policy"); - export const AdminPolicy: typeof import("policies/admin_policy"); - export const status: typeof import("status"); - export const features: typeof import("features"); - export { AerospikeError }; - export const Client: typeof import("client"); - export const Config: typeof import("config"); - export const Double: typeof import("double"); - export const GeoJSON: typeof import("geojson"); - export const Key: typeof import("key"); - export const Record: typeof import("record"); - export const Bin: typeof import("bin"); - export type auth = number; - export type language = number; - export type log = number; - export type ttl = number; - export type jobStatus = number; - export type indexDataType = number; - export type indexType = number; - export const print: typeof import("utils").print; - export const releaseEventLoop: typeof EventLoop.releaseEventLoop; - export function client(config?: any): import("client"); - export function connect(config?: any, callback?: connectCallback | undefined): Promise | null; - export function setDefaultLogging(logInfo: any): void; - export function setupGlobalCommandQueue(policy: CommandQueuePolicy): void; - export const batchType: { - BATCH_READ: any; - BATCH_WRITE: any; - BATCH_APPLY: any; - BATCH_REMOVE: any; - }; - export const privilegeCode: { - USER_ADMIN: any; - SYS_ADMIN: any; - DATA_ADMIN: any; - UDF_ADMIN: any; - SINDEX_ADMIN: any; - READ: any; - READ_WRITE: any; - READ_WRITE_UDF: any; - WRITE: any; - TRUNCATE: any; - }; - import AerospikeError = require("error"); - import EventLoop = require("event_loop"); -} -declare module 'batch_type' { - export const BATCH_READ: any; - export const BATCH_WRITE: any; - export const BATCH_APPLY: any; - export const BATCH_REMOVE: any; -} -declare module 'bigint' { - export const BigInt: BigIntConstructor; - export const bigIntSupported: true; - export function isInt64(value: any): boolean; -} -declare module 'bin' { - export = Bin; - class Bin { - private constructor(); - name: any; - value: any; - } -} -declare module 'bitwise' { - export function resize(bin: string, size: number, flags?: number | undefined): BitwiseOperation; - export function insert(bin: string, byteOffset: any, value: Buffer): BitwiseOperation; - export function remove(bin: string, byteOffset: number, byteSize: number): BitwiseOperation; - export function set(bin: string, bitOffset: number, bitSize: number, value: number | Buffer): BitwiseOperation; - export function or(bin: string, bitOffset: number, bitSize: number, value: Buffer): BitwiseOperation; - export function xor(bin: string, bitOffset: number, bitSize: number, value: Buffer): BitwiseOperation; - export function and(bin: string, bitOffset: number, bitSize: number, value: Buffer): BitwiseOperation; - export function not(bin: string, bitOffset: number, bitSize: number): BitwiseOperation; - export function lshift(bin: string, bitOffset: number, bitSize: number, shift: number): BitwiseOperation; - export function rshift(bin: string, bitOffset: number, bitSize: number, shift: number): BitwiseOperation; - export function add(bin: string, bitOffset: number, bitSize: number, value: number, sign: boolean): OverflowableBitwiseOp; - export function subtract(bin: string, bitOffset: number, bitSize: number, value: number, sign: boolean): OverflowableBitwiseOp; - export function get(bin: string, bitOffset: number, bitSize: number): BitwiseOperation; - export function getInt(bin: string, bitOffset: number, bitSize: number, sign: boolean): BitwiseOperation; - export function lscan(bin: string, bitOffset: number, bitSize: number, value: boolean): BitwiseOperation; - export function rscan(bin: string, bitOffset: number, bitSize: number, value: boolean): BitwiseOperation; - class BitwiseOperation extends Operation { - withPolicy(policy: BitwisePolicy): BitwiseOperation; - policy: any; - } - class OverflowableBitwiseOp extends BitwiseOperation { - overflowAction: any; - onOverflow(action: number): OverflowableBitwiseOp; - } - import Operation_1 = require("operations"); - import Operation = Operation_1.Operation; - export {}; -} -declare module 'cdt_context' { - export = CdtContext; - class CdtContext { - static getContextType(ctx: CdtContext, type: number): number; - items: any[]; - addListIndex(index: number): CdtContext; - addListIndexCreate(index: number, order: number, pad: boolean): CdtContext; - addListRank(rank: number): CdtContext; - addListValue(value: any): CdtContext; - addMapIndex(index: number): CdtContext; - addMapRank(rank: number): CdtContext; - addMapKey(key: any): CdtContext; - addMapKeyCreate(key: any, order: number): CdtContext; - addMapValue(value: any): CdtContext; - private add; - } -} -declare module 'client' { - export = Client; - function Client(config: Config): void; - class Client { - constructor(config: Config); - config: Config; - private as_client; - private connected; - captureStackTraces: boolean; - private asExec; - getNodes(): Array<{ - name: string; - address: string; - }>; - contextToBase64(context: any): string; - contextFromBase64(serializedContext: string): CdtContext; - changePassword(user: string, password: string, policy: any): void; - createUser(user: string, password: string, roles: Array, policy: any): void; - createRole(roleName: string, privileges: Array, policy: any, whitelist: Array, readQuota: number, writeQuota: number): void; - dropRole(roleName: string, policy: any): void; - dropUser(user: string, policy: any): void; - grantPrivileges(roleName: string, privileges: Array, policy: any): void; - grantRoles(user: string, roles: Array, policy: any): void; - queryRole(roleName: string, policy: any): Role; - queryRoles(policy: any): Array; - queryUser(user: string, policy: any): User; - queryUsers(policy: any): Array; - revokePrivileges(roleName: string, privileges: Array, policy: any): void; - revokeRoles(user: string, roles: Array, policy: any): void; - setQuotas(roleName: string, readQuota: number, writeQuota: number, policy: any): void; - setWhitelist(roleName: string, whitelist: Array, policy: any): void; - addSeedHost(hostname: string, port?: number | undefined): void; - removeSeedHost(hostname: string, port?: number | undefined): void; - batchExists(keys: Key[], policy?: BatchPolicy, callback?: batchRecordsCallback | undefined): Promise | null; - batchGet(keys: Key[], policy?: BatchPolicy, callback?: batchRecordsCallback | undefined): Promise | null; - batchRead(records: { - type: number; - key: Key; - bins?: string[]; - readAllBins?: boolean; - ops?: any[]; - }, policy?: BatchPolicy, callback?: batchRecordsCallback | undefined): Promise | null; - batchWrite(records: { - type: number; - key: Key; - }, policy?: BatchPolicy, callback?: batchRecordsCallback | undefined): Promise | null; - batchApply(keys: Key[], udf: object[], batchPolicy?: BatchPolicy, batchApplyPolicy?: BatchApplyPolicy, callback?: batchRecordsCallback | undefined): Promise | null; - batchRemove(keys: Key[], batchPolicy?: BatchPolicy, batchRemovePolicy?: BatchRemovePolicy, callback?: batchRecordsCallback | undefined): Promise | null; - batchSelect(keys: Key[], bins: string[], policy?: BatchPolicy, callback?: batchRecordsCallback | undefined): Promise | null; - close(releaseEventLoop?: boolean | undefined): void; - connect(callback?: connectCallback | undefined): Promise | null; - createIndex(options: { - ns: string; - set: string; - bin: string; - index: string; - type?: any; - datatype: any; - context: any; - }, policy?: InfoPolicy, callback?: jobCallback | undefined): Promise | null; - createIntegerIndex(options: { - ns: string; - set: string; - bin: string; - index: string; - type?: any; - }, policy?: InfoPolicy, callback?: jobCallback | undefined): Promise | null; - createStringIndex(options: { - ns: string; - set: string; - bin: string; - index: string; - type?: any; - }, policy?: InfoPolicy, callback?: jobCallback | undefined): Promise | null; - createGeo2DSphereIndex(options: { - ns: string; - set: string; - bin: string; - index: string; - type?: any; - }, policy?: InfoPolicy, callback?: jobCallback | undefined): Promise | null; - createBlobIndex(options: { - ns: string; - set: string; - bin: string; - index: string; - type?: any; - }, policy?: InfoPolicy, callback?: jobCallback | undefined): Promise | null; - apply(key: Key, udfArgs: { - module: string; - funcname: string; - args: Array<(number | string)>; - }, policy?: ApplyPolicy, callback?: valueCallback | undefined): Promise | null; - exists(key: Key, policy?: ReadPolicy, callback?: valueCallback | undefined): Promise | null; - get(key: Key, policy?: ReadPolicy, callback?: recordCallback | undefined): Promise | null; - indexRemove(namespace: string, index: string, policy?: InfoPolicy, callback?: doneCallback | undefined): Promise | null; - info(request: string | null, host: { - addr: string; - port?: number | undefined; - }, policy?: InfoPolicy, callback?: infoCallback | undefined): any; - infoAny(request?: string | undefined, policy?: InfoPolicy, callback?: infoCallback | undefined): any; - infoAll(request?: string | undefined, policy?: InfoPolicy, callback?: infoCallback | undefined): any; - infoNode(request: string | null, node: { - name: string; - }, policy?: InfoPolicy, callback?: infoCallback | undefined): any; - isConnected(checkTenderErrors?: boolean | undefined): boolean; - operate(key: Key, operations: any, metadata?: any, policy?: OperatePolicy, callback?: recordCallback | undefined): any; - incr: any; - put(key: Key, bins: object, meta?: object, policy?: WritePolicy, callback?: writeCallback | undefined): any; - query(ns: string, set: string, options?: object): Query; - remove(key: Key, policy?: RemovePolicy, callback?: writeCallback | undefined): any; - scan(ns: string, set: string, options?: object): Scan; - select(key: Key, bins: string[], policy?: ReadPolicy, callback?: recordCallback | undefined): any; - truncate(ns: string, set: string, beforeNanos: any, policy?: InfoPolicy, callback?: doneCallback | undefined): any; - udfRegister(udfPath: any, udfType?: number | undefined, policy?: InfoPolicy, callback?: jobCallback | undefined): any; - stats(): ClientStats; - udfRemove(udfModule: string, policy?: InfoPolicy, callback?: jobCallback | undefined): any; - updateLogging(logConfig: any): void; - } - import Config = require("config"); - import Query = require("query"); - import Scan = require("scan"); - -} -declare module 'commands/batch_command' { - function _exports(asCommand: any): { - new (): { - convertResult(results: any): any; - }; - }; - export = _exports; - -} -declare module 'commands/connect_command' { - function _exports(asCommand: any): { - new (client: any, callback: any): { - ensureConnected: boolean; - }; - }; - export = _exports; - -} -declare module 'commands/exists_command' { - function _exports(asCommand: any): { - new (): { - convertResponse(error: any): any[]; - }; - }; - export = _exports; - -} -declare module 'commands/index' { - class ApplyCommand { - } - const BatchExistsCommand_base: { - new (): { - convertResult(results: any): any; - }; - }; - class BatchExistsCommand extends BatchExistsCommand_base { - } - const BatchGetCommand_base: { - new (): { - convertResult(results: any): any; - }; - }; - class BatchGetCommand extends BatchGetCommand_base { - } - const BatchReadCommand_base: { - new (): { - convertResult(results: any): any; - }; - }; - class BatchReadCommand extends BatchReadCommand_base { - } - const BatchWriteCommand_base: { - new (): { - convertResult(results: any): any; - }; - }; - class BatchWriteCommand extends BatchWriteCommand_base { - } - const BatchApplyCommand_base: { - new (): { - convertResult(results: any): any; - }; - }; - class BatchApplyCommand extends BatchApplyCommand_base { - } - const BatchRemoveCommand_base: { - new (): { - convertResult(results: any): any; - }; - }; - class BatchRemoveCommand extends BatchRemoveCommand_base { - } - const BatchSelectCommand_base: { - new (): { - convertResult(results: any): any; - }; - }; - class BatchSelectCommand extends BatchSelectCommand_base { - } - class ChangePasswordCommand { - } - const ConnectCommand_base: { - new (client: any, callback: any): { - ensureConnected: boolean; - }; - }; - class ConnectCommand extends ConnectCommand_base { - } - const ExistsCommand_base: { - new (): { - convertResponse(error: any): any[]; - }; - }; - class ExistsCommand extends ExistsCommand_base { - } - const GetCommand_base: { - new (client: any, key: any, args: any, callback: any): { - key: any; - convertResult(bins: any, metadata: any): any; - }; - }; - class GetCommand extends GetCommand_base { - } - class IndexCreateCommand { - } - class IndexRemoveCommand { - } - class InfoAnyCommand { - } - class InfoForeachCommand { - } - class InfoHostCommand { - } - class InfoNodeCommand { - } - class JobInfoCommand { - } - const OperateCommand_base: { - new (client: any, key: any, args: any, callback: any): { - key: any; - convertResult(bins: any, metadata: any): any; - }; - }; - class OperateCommand extends OperateCommand_base { - } - class PrivilegeGrantCommand { - } - class PrivilegeRevokeCommand { - } - const PutCommand_base: { - new (client: any, key: any, args: any, callback: any): { - key: any; - convertResult(): any; - }; - }; - class PutCommand extends PutCommand_base { - } - const QueryCommand_base: { - new (stream: any, args: any): { - stream: any; - callback(error: any, record: any): boolean; - convertResult(bins: any, meta: any, asKey: any): any; - }; - }; - class QueryCommand extends QueryCommand_base { - } - const QueryPagesCommand_base: { - new (stream: any, args: any): { - stream: any; - callback(error: any, record: any): boolean; - convertResult(bins: any, meta: any, asKey: any): any; - }; - }; - class QueryPagesCommand extends QueryPagesCommand_base { - } - class QueryApplyCommand { - } - const QueryBackgroundCommand_base: { - new (client: any, ns: any, set: any, queryObj: any, policy: any, queryID: any, callback: any): { - client: any; - queryID: any; - queryObj: any; - convertResult(): import("job"); - }; - }; - class QueryBackgroundCommand extends QueryBackgroundCommand_base { - } - const QueryOperateCommand_base: { - new (client: any, ns: any, set: any, queryObj: any, policy: any, queryID: any, callback: any): { - client: any; - queryID: any; - queryObj: any; - convertResult(): import("job"); - }; - }; - class QueryOperateCommand extends QueryOperateCommand_base { - } - const QueryForeachCommand_base: { - new (stream: any, args: any): { - stream: any; - callback(error: any, record: any): boolean; - convertResult(bins: any, meta: any, asKey: any): any; - }; - }; - class QueryForeachCommand extends QueryForeachCommand_base { - } - class QueryRoleCommand { - } - class QueryRolesCommand { - } - class QueryUserCommand { - } - class QueryUsersCommand { - } - const RemoveCommand_base: { - new (client: any, key: any, args: any, callback: any): { - key: any; - convertResult(): any; - }; - }; - class RemoveCommand extends RemoveCommand_base { - } - class RoleCreateCommand { - } - class RoleDropCommand { - } - class RoleGrantCommand { - } - class RoleRevokeCommand { - } - class RoleSetWhitelistCommand { - } - class RoleSetQuotasCommand { - } - const ScanCommand_base: { - new (stream: any, args: any): { - stream: any; - callback(error: any, record: any): boolean; - convertResult(bins: any, meta: any, asKey: any): any; - }; - }; - class ScanCommand extends ScanCommand_base { - } - const ScanPagesCommand_base: { - new (stream: any, args: any): { - stream: any; - callback(error: any, record: any): boolean; - convertResult(bins: any, meta: any, asKey: any): any; - }; - }; - class ScanPagesCommand extends ScanPagesCommand_base { - } - const ScanBackgroundCommand_base: { - new (client: any, ns: any, set: any, queryObj: any, policy: any, queryID: any, callback: any): { - client: any; - queryID: any; - queryObj: any; - convertResult(): import("job"); - }; - }; - class ScanBackgroundCommand extends ScanBackgroundCommand_base { - } - const ScanOperateCommand_base: { - new (client: any, ns: any, set: any, queryObj: any, policy: any, queryID: any, callback: any): { - client: any; - queryID: any; - queryObj: any; - convertResult(): import("job"); - }; - }; - class ScanOperateCommand extends ScanOperateCommand_base { - } - const SelectCommand_base: { - new (client: any, key: any, args: any, callback: any): { - key: any; - convertResult(bins: any, metadata: any): any; - }; - }; - class SelectCommand extends SelectCommand_base { - } - class TruncateCommand { - } - class UdfRegisterCommand { - } - class UdfRemoveCommand { - } - class UserCreateCommand { - } - class UserDropCommand { - } - export { ApplyCommand as Apply, BatchExistsCommand as BatchExists, BatchGetCommand as BatchGet, BatchReadCommand as BatchRead, BatchWriteCommand as BatchWrite, BatchApplyCommand as BatchApply, BatchRemoveCommand as BatchRemove, BatchSelectCommand as BatchSelect, ChangePasswordCommand as ChangePassword, ConnectCommand as Connect, ExistsCommand as Exists, GetCommand as Get, IndexCreateCommand as IndexCreate, IndexRemoveCommand as IndexRemove, InfoAnyCommand as InfoAny, InfoForeachCommand as InfoForeach, InfoHostCommand as InfoHost, InfoNodeCommand as InfoNode, JobInfoCommand as JobInfo, OperateCommand as Operate, PrivilegeGrantCommand as PrivilegeGrant, PrivilegeRevokeCommand as PrivilegeRevoke, PutCommand as Put, QueryCommand as Query, QueryPagesCommand as QueryPages, QueryApplyCommand as QueryApply, QueryBackgroundCommand as QueryBackground, QueryOperateCommand as QueryOperate, QueryForeachCommand as QueryForeach, QueryRoleCommand as QueryRole, QueryRolesCommand as QueryRoles, QueryUserCommand as QueryUser, QueryUsersCommand as QueryUsers, RemoveCommand as Remove, RoleCreateCommand as RoleCreate, RoleDropCommand as RoleDrop, RoleGrantCommand as RoleGrant, RoleRevokeCommand as RoleRevoke, RoleSetWhitelistCommand as RoleSetWhitelist, RoleSetQuotasCommand as RoleSetQuotas, ScanCommand as Scan, ScanPagesCommand as ScanPages, ScanBackgroundCommand as ScanBackground, ScanOperateCommand as ScanOperate, SelectCommand as Select, TruncateCommand as Truncate, UdfRegisterCommand as UdfRegister, UdfRemoveCommand as UdfRemove, UserCreateCommand as UserCreate, UserDropCommand as UserDrop }; - -} -declare module 'commands/query_background_command' { - function _exports(asCommand: any): { - new (client: any, ns: any, set: any, queryObj: any, policy: any, queryID: any, callback: any): { - client: any; - queryID: any; - queryObj: any; - convertResult(): Job; - }; - }; - export = _exports; - import Job = require("job"); - -} -declare module 'commands/read_record_command' { - function _exports(asCommand: any): { - new (client: any, key: any, args: any, callback: any): { - key: any; - convertResult(bins: any, metadata: any): any; - }; - }; - export = _exports; - -} -declare module 'commands/stream_command' { - function _exports(asCommand: any): { - new (stream: any, args: any): { - stream: any; - callback(error: any, record: any): boolean; - convertResult(bins: any, meta: any, asKey: any): any; - }; - }; - export = _exports; - -} -declare module 'commands/write_record_command' { - function _exports(asCommand: any): { - new (client: any, key: any, args: any, callback: any): { - key: any; - convertResult(): any; - }; - }; - export = _exports; - -} -declare module 'config' { - export = Config; - class Config { - constructor(config?: any); - user: any; - password: any; - authMode: any; - clusterName: string; - port: any; - tls: any; - hosts: (Host[] | string); - policies: Policies; - log: any; - connTimeoutMs: any; - loginTimeoutMs: any; - maxSocketIdle: any; - tenderInterval: any; - maxConnsPerNode: any; - maxErrorRate: any; - errorRateWindow: any; - minConnsPerNode: any; - modlua: any; - sharedMemory: any; - useAlternateAccessAddress: boolean; - rackAware: boolean; - rackId: any; - setDefaultPolicies(policies: any): void; - private [inspect]; - } - const inspect: unique symbol; - -} -declare module 'double' { - export = Double; - function Double(value: number): void; - class Double { - constructor(value: number); - Double: number; - value(): number; - } - -} -declare module 'error' { - export = AerospikeError; - class AerospikeError extends Error { - private static fromASError; - private static copyASErrorProperties; - private static formatMessage; - private constructor(); - code: number; - command: any | null; - func: string | null; - file: string | null; - line: number | null; - inDoubt: boolean; - private setStackTrace; - isServerError(): boolean; - get client(): any; - } - -} -declare module 'event_loop' { - export function releaseEventLoop(): void; - export function registerASEventLoop(): void; - export function referenceEventLoop(): void; - export function unreferenceEventLoop(): void; - export function setCommandQueuePolicy(policy: any): void; - -} -declare module 'exp' { - export function bool(value: any): { - [x: number]: any; - op: any; - }[]; - export function int(value: any): { - [x: number]: any; - op: any; - }[]; - export function uint(value: any): { - [x: number]: any; - op: any; - }[]; - export function float(value: any): { - [x: number]: any; - op: any; - }[]; - export function str(value: any): { - [x: number]: any; - op: any; - }[]; - export function bytes(value: any, size: any): { - [x: number]: any; - op: any; - }[]; - export function geo(value: any): { - [x: number]: any; - op: any; - }[]; - export function nil(): { - op: any; - value: null; - }[]; - export function inf(): { - op: any; - value: null; - }[]; - export function wildcard(): { - op: any; - value: null; - }[]; - export function keyInt(): { - [x: number]: any; - op: any; - }[]; - export function keyStr(): { - [x: number]: any; - op: any; - }[]; - export function keyBlob(): { - [x: number]: any; - op: any; - }[]; - export function keyExist(): AerospikeExp; - export function binBool(binName: any): { - [x: number]: any; - op: any; - }[]; - export function binInt(binName: any): { - [x: number]: any; - op: any; - }[]; - export function binFloat(binName: any): { - [x: number]: any; - op: any; - }[]; - export function binStr(binName: any): { - [x: number]: any; - op: any; - }[]; - export function binBlob(binName: any): { - [x: number]: any; - op: any; - }[]; - export function binGeo(binName: any): { - [x: number]: any; - op: any; - }[]; - export function binList(binName: any): { - [x: number]: any; - op: any; - }[]; - export function binMap(binName: any): { - [x: number]: any; - op: any; - }[]; - export function binHll(binName: any): { - [x: number]: any; - op: any; - }[]; - export function binType(binName: any): ({ - op: any; - strVal: any; - } | { - op: any; - count: number; - })[]; - export function binExists(binName: string): boolean; - export function setName(): { - op: any; - count: number; - }[]; - export function deviceSize(): { - op: any; - count: number; - }[]; - export function lastUpdate(): { - op: any; - count: number; - }[]; - export function sinceUpdate(): { - op: any; - count: number; - }[]; - export function voidTime(): { - op: any; - count: number; - }[]; - export function ttl(): { - op: any; - count: number; - }[]; - export function isTombstone(): { - op: any; - count: number; - }[]; - export function memorySize(): { - op: any; - count: number; - }[]; - export function recordSize(): { - op: any; - count: number; - }[]; - export function digestModulo(): { - op: any; - count: number; - }[]; - export function eq(left: any, right: any): { - op: any; - count: number; - }[]; - export function ne(left: any, right: any): { - op: any; - count: number; - }[]; - export function gt(left: any, right: any): { - op: any; - count: number; - }[]; - export function ge(left: any, right: any): { - op: any; - count: number; - }[]; - export function lt(left: any, right: any): { - op: any; - count: number; - }[]; - export function le(left: any, right: any): { - op: any; - count: number; - }[]; - export function cmpRegex(options: number, regex: string, cmpStr: AerospikeExp): AerospikeExp; - export function cmpGeo(left: any, right: any): { - op: any; - count: number; - }[]; - export function not(expr: AerospikeExp): AerospikeExp; - export function and(...expr: any[]): never[]; - export function or(...expr: any[]): never[]; - export function exclusive(...expr: any[]): never[]; - export function add(...expr: any[]): never[]; - export function sub(...expr: any[]): never[]; - export function mul(...expr: any[]): never[]; - export function div(...expr: any[]): never[]; - export function pow(...params: any[]): any[]; - export function log(...params: any[]): any[]; - export function mod(...params: any[]): any[]; - export function abs(...params: any[]): any[]; - export function floor(...params: any[]): any[]; - export function ceil(...params: any[]): any[]; - export function toInt(...params: any[]): any[]; - export function toFloat(...params: any[]): any[]; - export function intAnd(...expr: any[]): never[]; - export function intOr(...expr: any[]): never[]; - export function intXor(...expr: any[]): never[]; - export function intNot(...params: any[]): any[]; - export function intLshift(...params: any[]): any[]; - export function intRshift(...params: any[]): any[]; - export function intArshift(...params: any[]): any[]; - export function intCount(...params: any[]): any[]; - export function intLscan(...params: any[]): any[]; - export function intRscan(...params: any[]): any[]; - export function min(...expr: any[]): never[]; - export function max(...expr: any[]): never[]; - export function cond(...expr: any[]): never[]; - function _let(...expr: any[]): never[]; - export function def(varName: string, expr: AerospikeExp): AerospikeExp; - function _var(varName: string): AerospikeExp; - export const lists: { - size: (bin: any, ctx?: any) => any; - getByValue: (bin: any, value: any, returnType: any, ctx?: any) => any; - getByValueRange: (bin: any, begin: any, end: any, returnType: any, ctx?: any) => any; - getByValueList: (bin: any, value: any, returnType: any, ctx?: any) => any; - getByRelRankRangeToEnd: (bin: any, value: any, rank: any, returnType: any, ctx?: any) => any; - getByRelRankRange: (bin: any, value: any, rank: any, count: any, returnType: any, ctx?: any) => any; - getByIndex: (bin: any, index: any, valueType: any, returnType: any, ctx?: any) => any; - getByIndexRangeToEnd: (bin: any, index: any, returnType: any, ctx?: any) => any; - getByIndexRange: (bin: any, index: any, count: any, returnType: any, ctx?: any) => any; - getByRank: (bin: any, rank: any, valueType: any, returnType: any, ctx?: any) => any; - getByRankRangeToEnd: (bin: any, rank: any, returnType: any, ctx?: any) => any; - getByRankRange: (bin: any, rank: any, count: any, returnType: any, ctx?: any) => any; - append: (bin: any, value: any, policy?: any, ctx?: any) => any; - appendItems: (bin: any, value: any, policy?: any, ctx?: any) => any; - insert: (bin: any, value: any, idx: any, policy?: any, ctx?: any) => any; - insertItems: (bin: any, value: any, idx: any, policy?: any, ctx?: any) => any; - increment: (bin: any, value: any, idx: any, policy?: any, ctx?: any) => any; - set: (bin: any, value: any, idx: any, policy?: any, ctx?: any) => any; - clear: (bin: any, ctx?: any) => any; - create: (bin: any, order?: number, pad?: boolean, persistIndex?: boolean, ctx?: any) => any; - sort: (bin: any, order: number, ctx?: any) => any; - removeByValue: (bin: any, value: any, ctx?: any, returnType?: any) => any; - removeByValueList: (bin: any, values: any, ctx?: any, returnType?: any) => any; - removeByValueRange: (bin: any, end: any, begin: any, ctx?: any, returnType?: any) => any; - removeByRelRankRangeToEnd: (bin: any, rank: any, value: any, ctx?: any, returnType?: any) => any; - removeByRelRankRange: (bin: any, count: any, rank: any, value: any, ctx?: any, returnType?: any) => any; - removeByIndex: (bin: any, idx: any, ctx?: any, returnType?: any) => any; - removeByIndexRangeToEnd: (bin: any, idx: any, ctx?: any, returnType?: any) => any; - removeByIndexRange: (bin: any, count: any, idx: any, ctx?: any, returnType?: any) => any; - removeByRank: (bin: any, rank: any, ctx?: any, returnType?: any) => any; - removeByRankRangeToEnd: (bin: any, rank: any, ctx?: any, returnType?: any) => any; - removeByRankRange: (bin: any, count: any, rank: any, ctx?: any, returnType?: any) => any; - }; - export const maps: { - put: (bin: any, value: any, key: any, policy?: any, ctx?: any) => any; - putItems: (bin: any, map: any, policy?: any, ctx?: any) => any; - increment: (bin: any, value: any, key: any, policy?: any, ctx?: any) => any; - clear: (bin: any, ctx?: any) => any; - create: (bin: any, order?: number, persistIndex?: boolean, ctx?: any) => any; - removeByKey: (bin: any, key: any, ctx?: any, returnType?: any) => any; - removeByKeyList: (bin: any, keys: any, ctx?: any, returnType?: any) => any; - removeByKeyRange: (bin: any, end: any, begin: any, ctx?: any, returnType?: any) => any; - removeByKeyRelIndexRangeToEnd: (bin: any, idx: any, key: any, ctx?: any, returnType?: any) => any; - removeByKeyRelIndexRange: (bin: any, count: any, idx: any, key: any, ctx?: any, returnType?: any) => any; - removeByValue: (bin: any, value: any, ctx?: any, returnType?: any) => any; - removeByValueList: (bin: any, values: any, ctx?: any, returnType?: any) => any; - removeByValueRange: (bin: any, end: any, begin: any, ctx?: any, returnType?: any) => any; - removeByValueRelRankRangeToEnd: (bin: any, rank: any, value: any, ctx?: any, returnType?: any) => any; - removeByValueRelRankRange: (bin: any, count: any, rank: any, value: any, ctx?: any, returnType?: any) => any; - removeByIndex: (bin: any, idx: any, ctx?: any, returnType?: any) => any; - removeByIndexRangeToEnd: (bin: any, idx: any, ctx?: any, returnType?: any) => any; - removeByIndexRange: (bin: any, count: any, idx: any, ctx?: any, returnType?: any) => any; - removeByRank: (bin: any, rank: any, ctx?: any, returnType?: any) => any; - removeByRankRangeToEnd: (bin: any, rank: any, ctx?: any, returnType?: any) => any; - removeByRankRange: (bin: any, count: any, rank: any, ctx?: any, returnType?: any) => any; - size: (bin: any, ctx?: any) => any; - getByKey: (bin: any, key: any, valueType: any, returnType: any, ctx?: any) => any; - getByKeyRange: (bin: any, end: any, begin: any, returnType: any, ctx?: any) => any; - getByKeyList: (bin: any, keys: any, returnType: any, ctx?: any) => any; - getByKeyRelIndexRangeToEnd: (bin: any, idx: any, key: any, returnType: any, ctx?: any) => any; - getByKeyRelIndexRange: (bin: any, count: any, idx: any, key: any, returnType: any, ctx?: any) => any; - getByValue: (bin: any, value: any, returnType: any, ctx?: any) => any; - getByValueRange: (bin: any, end: any, begin: any, returnType: any, ctx?: any) => any; - getByValueList: (bin: any, values: any, returnType: any, ctx?: any) => any; - getByValueRelRankRangeToEnd: (bin: any, rank: any, value: any, returnType: any, ctx?: any) => any; - getByValueRelRankRange: (bin: any, count: any, rank: any, value: any, returnType: any, ctx?: any) => any; - getByIndex: (bin: any, idx: any, valueType: any, returnType: any, ctx?: any) => any; - getByIndexRangeToEnd: (bin: any, idx: any, returnType: any, ctx?: any) => any; - getByIndexRange: (bin: any, count: any, idx: any, returnType: any, ctx?: any) => any; - getByRank: (bin: any, rank: any, valueType: any, returnType: any, ctx?: any) => any; - getByRankRangeToEnd: (bin: any, rank: any, returnType: any, ctx?: any) => any; - getByRankRange: (bin: any, count: any, rank: any, returnType: any, ctx?: any) => any; - }; - export const bit: { - reSize: (bin: any, flags: number, byteSize: number, policy?: any) => any; - insert: (bin: any, value: any, byteOffset: any, policy?: any) => any; - remove: (bin: any, byteSize: number, byteOffset: any, policy?: any) => any; - set: (bin: any, value: any, bitSize: any, bitOffset: any, policy?: any) => any; - or: (bin: any, value: any, bitSize: any, bitOffset: any, policy?: any) => any; - xor: (bin: any, value: any, bitSize: any, bitOffset: any, policy?: any) => any; - and: (bin: any, value: any, bitSize: any, bitOffset: any, policy?: any) => any; - not: (bin: any, bitSize: any, bitOffset: any, policy?: any) => any; - lShift: (bin: any, shift: number, bitSize: any, bitOffset: any, policy?: any) => any; - rShift: (bin: any, shift: number, bitSize: any, bitOffset: any, policy?: any) => any; - add: (bin: any, action: number, value: any, bitSize: any, bitOffset: any, policy?: any) => any; - subtract: (bin: any, action: number, value: any, bitSize: any, bitOffset: any, policy?: any) => any; - setInt: (bin: any, value: any, bitSize: any, bitOffset: any, policy?: any) => any; - get: (bin: any, bitSize: any, bitOffset: any) => any; - count: (bin: any, bitSize: any, bitOffset: any) => number; - lScan: (bin: any, value: any, bitSize: any, bitOffset: any) => number; - rScan: (bin: any, value: any, bitSize: any, bitOffset: any) => number; - getInt: (bin: any, sign: boolean, bitSize: any, bitOffset: any) => any; - }; - export const hll: { - initMH: (bin: any, mhBitCount: number, indexBitCount: number, policy?: any) => any; - init: (bin: any, indexBitCount: number, policy?: any) => any; - addMH: (bin: any, mhBitCount: number, indexBitCount: number, list: any, policy?: any) => any; - add: (bin: any, indexBitCount: number, list: any, policy?: any) => any; - update: (bin: any, list: any, policy?: any) => any; - getCount: (bin: any) => any; - getUnion: (bin: any, list: any) => any; - getUnionCount: (bin: any, list: any) => any; - getIntersectCount: (bin: any, list: any) => any; - getSimilarity: (bin: any, list: any) => any[]; - describe: (bin: any) => any; - mayContain: (bin: any, list: any) => any; - }; - export const expReadFlags: { - DEFAULT: 0; - EVAL_NO_FAIL: 16; - } - export const expWriteFlags: { - DEFAULT: 0; - CREATE_ONLY: 1; - UPDATE_ONLY: 2; - ALLOW_DELETE: 4; - POLICY_NO_FAIL: 8; - EVAL_NO_FAIL: 16; - } - function _val(value: any): { - [x: number]: any; - op: any; - }[]; - export { _val as list, _val as map, _let as let, _var as var }; - -} -declare module 'exp_bit' { - export function reSize(bin: any, flags: number, byteSize: number, policy?: any): any; - export function insert(bin: any, value: any, byteOffset: any, policy?: any): any; - export function remove(bin: any, byteSize: number, byteOffset: any, policy?: any): any; - export function set(bin: any, value: any, bitSize: any, bitOffset: any, policy?: any): any; - export function or(bin: any, value: any, bitSize: any, bitOffset: any, policy?: any): any; - export function xor(bin: any, value: any, bitSize: any, bitOffset: any, policy?: any): any; - export function and(bin: any, value: any, bitSize: any, bitOffset: any, policy?: any): any; - export function not(bin: any, bitSize: any, bitOffset: any, policy?: any): any; - export function lShift(bin: any, shift: number, bitSize: any, bitOffset: any, policy?: any): any; - export function rShift(bin: any, shift: number, bitSize: any, bitOffset: any, policy?: any): any; - export function add(bin: any, action: number, value: any, bitSize: any, bitOffset: any, policy?: any): any; - export function subtract(bin: any, action: number, value: any, bitSize: any, bitOffset: any, policy?: any): any; - export function setInt(bin: any, value: any, bitSize: any, bitOffset: any, policy?: any): any; - export function get(bin: any, bitSize: any, bitOffset: any): any; - export function count(bin: any, bitSize: any, bitOffset: any): number; - export function lScan(bin: any, value: any, bitSize: any, bitOffset: any): number; - export function rScan(bin: any, value: any, bitSize: any, bitOffset: any): number; - export function getInt(bin: any, sign: boolean, bitSize: any, bitOffset: any): any; - -} -declare module 'exp_hll' { - export function initMH(bin: any, mhBitCount: number, indexBitCount: number, policy?: any): any; - export function init(bin: any, indexBitCount: number, policy?: any): any; - export function addMH(bin: any, mhBitCount: number, indexBitCount: number, list: any, policy?: any): any; - export function add(bin: any, indexBitCount: number, list: any, policy?: any): any; - export function update(bin: any, list: any, policy?: any): any; - export function getCount(bin: any): any; - export function getUnion(bin: any, list: any): any; - export function getUnionCount(bin: any, list: any): any; - export function getIntersectCount(bin: any, list: any): any; - export function getSimilarity(bin: any, list: any): any[]; - export function describe(bin: any): any; - export function mayContain(bin: any, list: any): any; - -} -declare module 'exp_lists' { - export function size(bin: any, ctx?: any): any; - export function getByValue(bin: any, value: any, returnType: any, ctx?: any): any; - export function getByValueRange(bin: any, begin: any, end: any, returnType: any, ctx?: any): any; - export function getByValueList(bin: any, value: any, returnType: any, ctx?: any): any; - export function getByRelRankRangeToEnd(bin: any, value: any, rank: any, returnType: any, ctx?: any): any; - export function getByRelRankRange(bin: any, value: any, rank: any, count: any, returnType: any, ctx?: any): any; - export function getByIndex(bin: any, index: any, valueType: any, returnType: any, ctx?: any): any; - export function getByIndexRangeToEnd(bin: any, index: any, returnType: any, ctx?: any): any; - export function getByIndexRange(bin: any, index: any, count: any, returnType: any, ctx?: any): any; - export function getByRank(bin: any, rank: any, valueType: any, returnType: any, ctx?: any): any; - export function getByRankRangeToEnd(bin: any, rank: any, returnType: any, ctx?: any): any; - export function getByRankRange(bin: any, rank: any, count: any, returnType: any, ctx?: any): any; - export function append(bin: any, value: any, policy?: any, ctx?: any): any; - export function appendItems(bin: any, value: any, policy?: any, ctx?: any): any; - export function insert(bin: any, value: any, idx: any, policy?: any, ctx?: any): any; - export function insertItems(bin: any, value: any, idx: any, policy?: any, ctx?: any): any; - export function increment(bin: any, value: any, idx: any, policy?: any, ctx?: any): any; - export function set(bin: any, value: any, idx: any, policy?: any, ctx?: any): any; - export function clear(bin: any, ctx?: any): any; - export function sort(bin: any, order: number, ctx?: any): any; - export function removeByValue(bin: any, value: any, ctx?: any, returnType?: any): any; - export function removeByValueList(bin: any, values: any, ctx?: any, returnType?: any): any; - export function removeByValueRange(bin: any, end: any, begin: any, ctx?: any, returnType?: any): any; - export function removeByRelRankRangeToEnd(bin: any, rank: any, value: any, ctx?: any, returnType?: any): any; - export function removeByRelRankRange(bin: any, count: any, rank: any, value: any, ctx?: any, returnType?: any): any; - export function removeByIndex(bin: any, idx: any, ctx?: any, returnType?: any): any; - export function removeByIndexRangeToEnd(bin: any, idx: any, ctx?: any, returnType?: any): any; - export function removeByIndexRange(bin: any, count: any, idx: any, ctx?: any, returnType?: any): any; - export function removeByRank(bin: any, rank: any, ctx?: any, returnType?: any): any; - export function removeByRankRangeToEnd(bin: any, rank: any, ctx?: any, returnType?: any): any; - export function removeByRankRange(bin: any, count: any, rank: any, ctx?: any, returnType?: any): any; - -} -declare module 'exp_maps' { - export function put(bin: any, value: any, key: any, policy?: any, ctx?: any): any; - export function putItems(bin: any, map: any, policy?: any, ctx?: any): any; - export function increment(bin: any, value: any, key: any, policy?: any, ctx?: any): any; - export function clear(bin: any, ctx?: any): any; - export function removeByKey(bin: any, key: any, ctx?: any, returnType?: any): any; - export function removeByKeyList(bin: any, keys: any, ctx?: any, returnType?: any): any; - export function removeByKeyRange(bin: any, end: any, begin: any, ctx?: any, returnType?: any): any; - export function removeByKeyRelIndexRangeToEnd(bin: any, idx: any, key: any, ctx?: any, returnType?: any): any; - export function removeByKeyRelIndexRange(bin: any, count: any, idx: any, key: any, ctx?: any, returnType?: any): any; - export function removeByValue(bin: any, value: any, ctx?: any, returnType?: any): any; - export function removeByValueList(bin: any, values: any, ctx?: any, returnType?: any): any; - export function removeByValueRange(bin: any, end: any, begin: any, ctx?: any, returnType?: any): any; - export function removeByValueRelRankRangeToEnd(bin: any, rank: any, value: any, ctx?: any, returnType?: any): any; - export function removeByValueRelRankRange(bin: any, count: any, rank: any, value: any, ctx?: any, returnType?: any): any; - export function removeByIndex(bin: any, idx: any, ctx?: any, returnType?: any): any; - export function removeByIndexRangeToEnd(bin: any, idx: any, ctx?: any, returnType?: any): any; - export function removeByIndexRange(bin: any, count: any, idx: any, ctx?: any, returnType?: any): any; - export function removeByRank(bin: any, rank: any, ctx?: any, returnType?: any): any; - export function removeByRankRangeToEnd(bin: any, rank: any, ctx?: any, returnType?: any): any; - export function removeByRankRange(bin: any, count: any, rank: any, ctx?: any, returnType?: any): any; - export function size(bin: any, ctx?: any): any; - export function getByKey(bin: any, key: any, valueType: any, returnType: any, ctx?: any): any; - export function getByKeyRange(bin: any, end: any, begin: any, returnType: any, ctx?: any): any; - export function getByKeyList(bin: any, keys: any, returnType: any, ctx?: any): any; - export function getByKeyRelIndexRangeToEnd(bin: any, idx: any, key: any, returnType: any, ctx?: any): any; - export function getByKeyRelIndexRange(bin: any, count: any, idx: any, key: any, returnType: any, ctx?: any): any; - export function getByValue(bin: any, value: any, returnType: any, ctx?: any): any; - export function getByValueRange(bin: any, end: any, begin: any, returnType: any, ctx?: any): any; - export function getByValueList(bin: any, values: any, returnType: any, ctx?: any): any; - export function getByValueRelRankRangeToEnd(bin: any, rank: any, value: any, returnType: any, ctx?: any): any; - export function getByValueRelRankRange(bin: any, count: any, rank: any, value: any, returnType: any, ctx?: any): any; - export function getByIndex(bin: any, idx: any, valueType: any, returnType: any, ctx?: any): any; - export function getByIndexRangeToEnd(bin: any, idx: any, returnType: any, ctx?: any): any; - export function getByIndexRange(bin: any, count: any, idx: any, returnType: any, ctx?: any): any; - export function getByRank(bin: any, rank: any, valueType: any, returnType: any, ctx?: any): any; - export function getByRankRangeToEnd(bin: any, rank: any, returnType: any, ctx?: any): any; - export function getByRankRange(bin: any, count: any, rank: any, returnType: any, ctx?: any): any; - -} -declare module 'exp_operations' { - export function read(bin: string, exp: AerospikeExp, flags: number): Operation; - export function write(bin: string, exp: AerospikeExp, flags: number): Operation; - export class ExpOperation { - protected constructor(); - op: any; - bin: any; - exp: any; - flags: any; - } - -} -declare module 'features' { - export const CDT_MAP: "cdt-map"; - export const CDT_LIST: "cdt-list"; - export const BLOB_BITS: "blob-bits"; - -} -declare module 'filter' { - export function range(bin: string, min: number, max: number, indexType?: number | undefined, context?: any): any; - export function equal(bin: string, value: string): any; - export function contains(bin: string, value: (string | number), indexType: number, context?: any): any; - export function geoWithinGeoJSONRegion(bin: string, value: GeoJSON, indexType?: number | undefined, context?: any): any; - export function geoContainsGeoJSONPoint(bin: string, value: GeoJSON, indexType?: number | undefined, context?: any): any; - export function geoWithinRadius(bin: string, lon: any, lat: number, radius: number, indexType?: number | undefined, context?: any): any; - export function geoContainsPoint(bin: string, lon: any, lat: number, indexType?: number | undefined, context?: any): any; - export class SindexFilterPredicate { - constructor(predicate: any, bin: any, dataType: any, indexType: any, context: any, props: any); - predicate: any; - bin: any; - datatype: any; - type: any; - context: any; - } - import GeoJSON = require("geojson"); - -} -declare module 'geojson' { - export = GeoJSON; - function GeoJSON(json: any): GeoJSON; - class GeoJSON { - constructor(json: any); - str: string | undefined; - toJSON(): any; - toString(): string; - value(): any; - } - namespace GeoJSON { - function Point(lng: number, lat: number): GeoJSON; - function Polygon(...args: number[][]): GeoJSON; - function Circle(lng: number, lat: number, radius: number): GeoJSON; - } - -} -declare module 'hll' { - export function init(bin: string, indexBits: number, minhashBits?: number | undefined): any; - export function add(bin: string, list: any[], indexBits?: number | undefined, minhashBits?: number | undefined): any; - export function setUnion(bin: string, list: any[]): any; - export function refreshCount(bin: string): any; - export function fold(bin: string, indexBits: number): any; - export function getCount(bin: string): any; - export function getUnion(bin: string, list: any[]): any; - export function getUnionCount(bin: string, list: any[]): any; - export function getIntersectCount(bin: string, list: any[]): any; - export function getSimilarity(bin: string, list: any[]): any; - export function describe(bin: string): any; - -} -declare module 'index_job' { - export = IndexJob; - function IndexJob(client: any, namespace: any, indexName: any): void; - class IndexJob { - constructor(client: any, namespace: any, indexName: any); - client: any; - namespace: any; - indexName: any; - private hasCompleted; - private info; - } - -} -declare module 'info' { - export function parse(info: string): any; - export const separators: { - bins: (string | typeof splitBins)[]; - 'bins/*': (typeof splitBins)[]; - 'namespace/*': string[]; - service: string[]; - sindex: string[]; - 'sindex/*': string[]; - 'sindex/*/**': string[]; - 'udf-list': string[]; - 'get-dc-config': string[]; - sets: string[]; - 'sets/*': string[]; - 'sets/*/**': (string | typeof chop)[]; - }; - function splitBins(str: any): { - stats: {}; - names: any[]; - }; - function chop(str: any): any; - export {}; - -} -declare module 'job' { - export = Job; - function Job(client: any, jobID: any, module: any): void; - class Job { - constructor(client: any, jobID: any, module: any); - client: any; - jobID: any; - module: any; - private hasCompleted; - private checkStatus; - info(policy: any, callback?: JobinfoCallback | undefined): Promise | null; - wait(pollInterval?: number | undefined, callback?: JobdoneCallback | undefined): Promise | null; - waitUntilDone: any; - } - namespace Job { - function safeRandomJobID(): number; - function pollUntilDone(statusFunction: any, pollInterval: any): Promise; - } - -} -declare module 'key' { - export = Key; - function Key(ns: string, set: string, key: (string | number | Buffer), digest?: string | undefined): void; - class Key { - constructor(ns: string, set: string, key: (string | number | Buffer), digest?: string | undefined); - ns: string; - set: string; - key: string | number | Buffer; - digest: string | null; - equals(other: any): any; - } - namespace Key { - function fromASKey(keyObj: any): Key | null; - } - -} -declare module 'lists' { - export function setOrder(bin: string, order: number): any; - export function sort(bin: string, flags: number): any; - export function append(bin: string, value: any, policy?: any): any; - export function appendItems(bin: string, list: Array, policy?: any): any; - export function insert(bin: string, index: number, value: any, policy?: any): any; - export function insertItems(bin: string, index: number, list: Array, policy: any): any; - export function pop(bin: string, index: number): any; - export function popRange(bin: string, index: number, count?: number | undefined): any; - export function remove(bin: string, index: number): any; - export function removeRange(bin: string, index: number, count?: number | undefined): any; - export function removeByIndex(bin: string, index: number, returnType?: number | undefined): any; - export function removeByIndexRange(bin: string, index: number, count?: number | undefined, returnType?: number | undefined): any; - export function removeByValue(bin: string, value: any, returnType?: number | undefined): any; - export function removeByValueList(bin: string, values: Array, returnType?: number | undefined): any; - export function removeByValueRange(bin: string, begin: any | null, end: any | null, returnType?: number | undefined): any; - export function removeByValueRelRankRange(bin: string, value: any, rank: number, count?: number | undefined, returnType?: number | undefined): any; - export function removeByRank(bin: string, rank: number, returnType?: number | undefined): any; - export function removeByRankRange(bin: string, rank: any, count?: number | undefined, returnType?: number | undefined): any; - export function clear(bin: string): any; - export function set(bin: string, index: number, value: any, policy?: any): any; - export function trim(bin: string, index: number, count: number): any; - export function get(bin: string, index: number): any; - export function getRange(bin: string, index: number, count?: number | undefined): any; - export function getByIndex(bin: string, index: number, returnType?: number | undefined): any; - export function getByIndexRange(bin: string, index: number, count?: number | undefined, returnType?: number | undefined): any; - export function getByValue(bin: string, value: any, returnType?: number | undefined): any; - export function getByValueList(bin: string, values: Array, returnType?: number | undefined): any; - export function getByValueRange(bin: string, begin: any | null, end: any | null, returnType?: number | undefined): any; - export function getByValueRelRankRange(bin: string, value: any, rank: number, count?: number | undefined, returnType?: number | undefined): any; - export function getByRank(bin: string, rank: number, returnType?: number | undefined): any; - export function getByRankRange(bin: string, rank: any, count?: number | undefined, returnType?: number | undefined): any; - export function increment(bin: string, index: number, value?: number | undefined, policy?: any): any; - export function size(bin: string): any; - -} -declare module 'maps' { - export function setPolicy(bin: string, policy: MapPolicy): any; - export function put(bin: string, key: any, value: any, policy?: MapPolicy): any; - export function putItems(bin: string, items: object, policy?: MapPolicy): any; - export function increment(bin: string, key: any, incr: number, policy?: MapPolicy): any; - export function decrement(bin: string, key: any, decr: number, policy?: MapPolicy): any; - export function clear(bin: string): any; - export function removeByKey(bin: string, key: any, returnType?: number | undefined): any; - export function removeByKeyList(bin: string, keys: Array, returnType?: number | undefined): any; - export function removeByKeyRange(bin: string, begin: any | null, end: any | null, returnType?: number | undefined): any; - export function removeByKeyRelIndexRange(bin: string, key: any, index: number, count?: number | undefined, returnType?: number | undefined): any; - export function removeByValue(bin: string, value: any, returnType?: number | undefined): any; - export function removeByValueList(bin: string, values: Array, returnType?: number | undefined): any; - export function removeByValueRange(bin: string, begin: any | null, end: any | null, returnType?: number | undefined): any; - export function removeByValueRelRankRange(bin: string, value: any, rank: number, count?: number | undefined, returnType?: number | undefined): any; - export function removeByIndex(bin: string, index: number, returnType?: number | undefined): any; - export function removeByIndexRange(bin: string, index: number, count?: number | undefined, returnType?: number | undefined): any; - export function removeByRank(bin: string, rank: number, returnType?: number | undefined): any; - export function removeByRankRange(bin: string, rank: any, count?: number | undefined, returnType?: number | undefined): any; - export function size(bin: string): any; - export function getByKey(bin: string, key: any, returnType?: number | undefined): any; - export function getByKeyList(bin: string, keys: any, returnType?: number | undefined): any; - export function getByKeyRange(bin: string, begin: any | null, end: any | null, returnType?: number | undefined): any; - export function getByKeyRelIndexRange(bin: string, key: any, index: number, count?: number | undefined, returnType?: number | undefined): any; - export function getByValue(bin: string, value: any, returnType?: number | undefined): any; - export function getByValueList(bin: string, values: any, returnType?: number | undefined): any; - export function getByValueRange(bin: string, begin: any | null, end: any | null, returnType?: number | undefined): any; - export function getByValueRelRankRange(bin: string, value: any, rank: number, count?: number | undefined, returnType?: number | undefined): any; - export function getByIndex(bin: string, index: number, returnType?: number | undefined): any; - export function getByIndexRange(bin: string, index: number, count?: number | undefined, returnType?: number | undefined): any; - export function getByRank(bin: string, rank: number, returnType?: number | undefined): any; - export function getByRankRange(bin: string, rank: any, count: number, returnType?: number | undefined): any; - export function create(bin: string, order: number, persistIndex: boolean | undefined, ctx: any): any; - -} -declare module 'operations' { - export function read(bin: string): Operation; - export function write(bin: string, value: any): Operation; - export function add(bin: string, value: (number | any)): Operation; - export function incr(bin: any, value: any): Operation; - export function append(bin: string, value: (string | Buffer)): Operation; - export function prepend(bin: string, value: (string | Buffer)): Operation; - export function touch(ttl?: number | undefined): Operation; - function _delete(): Operation; - export class Operation { - protected constructor(); - op: any; - bin: any; - } - export { _delete as delete }; - -} -declare module 'policies/admin_policy' { - export = AdminPolicy; - class AdminPolicy { - constructor(props?: any); - timeout: number; - } - -} -declare module 'policies/apply_policy' { - export = ApplyPolicy; - class ApplyPolicy extends BasePolicy { - key: number; - commitLevel: number; - ttl: number; - durableDelete: boolean; - } - import BasePolicy = require("policies/base_policy"); - -} -declare module 'policies/base_policy' { - export = BasePolicy; - class BasePolicy { - constructor(props: any); - socketTimeout: number; - totalTimeout: number; - maxRetries: number; - filterExpression: any; - compress: boolean; - } - -} -declare module 'policies/batch_apply_policy' { - export = BatchApplyPolicy; - class BatchApplyPolicy { - constructor(props?: any); - filterExpression: any; - key: number; - commitLevel: number; - ttl: number; - durableDelete: boolean; - } - -} -declare module 'policies/batch_policy' { - export = BatchPolicy; - class BatchPolicy extends BasePolicy { - replica: number; - readModeAP: number; - readModeSC: number; - concurrent: boolean; - allowInline: boolean; - allowInlineSSD: boolean; - respondAllKeys: boolean; - sendSetName: boolean; - deserialize: boolean; - } - import BasePolicy = require("policies/base_policy"); - -} -declare module 'policies/batch_read_policy' { - export = BatchReadPolicy; - class BatchReadPolicy { - constructor(props?: any); - filterExpression: any; - readModeAP: number; - readModeSC: number; - } - -} -declare module 'policies/batch_remove_policy' { - export = BatchRemovePolicy; - class BatchRemovePolicy { - constructor(props?: any); - filterExpression: any; - key: number; - commitLevel: number; - gen: number; - generation: number; - durableDelete: boolean; - } - -} -declare module 'policies/batch_write_policy' { - export = BatchWritePolicy; - class BatchWritePolicy { - constructor(props?: any); - filterExpression: any; - key: number; - commitLevel: number; - gen: number; - exists: number; - durableDelete: boolean; - } - -} -declare module 'policies/bitwise_policy' { - export = BitwisePolicy; - class BitwisePolicy { - constructor(props?: any); - writeFlags: number; - } - -} -declare module 'policies/command_queue_policy' { - export = CommandQueuePolicy; - class CommandQueuePolicy { - constructor(props?: { - maxCommandsInProcess?: number | undefined; - maxCommandsInQueue?: number | undefined; - queueInitialCapacity?: number | undefined; - } | undefined); - maxCommandsInProcess: number; - maxCommandsInQueue: number; - queueInitialCapacity: number; - } - -} -declare module 'policies/hll_policy' { - export = HLLPolicy; - class HLLPolicy { - constructor(props?: any); - writeFlags: number; - } - -} -declare module 'policies/info_policy' { - export = InfoPolicy; - class InfoPolicy { - constructor(props?: any); - timeout: number; - sendAsIs: boolean; - checkBounds: boolean; - } - -} -declare module 'policies/list_policy' { - export = ListPolicy; - class ListPolicy { - constructor(props?: any); - order: number; - writeFlags: number; - } - -} -declare module 'policies/map_policy' { - export = MapPolicy; - class MapPolicy { - constructor(props?: any); - order: number; - writeMode: number; - writeFlags: number; - } - -} -declare module 'policies/operate_policy' { - export = OperatePolicy; - class OperatePolicy extends BasePolicy { - key: number; - gen: number; - exists: number; - replica: number; - commitLevel: number; - deserialize: boolean; - durableDelete: boolean; - readModeAP: number; - readModeSC: number; - } - import BasePolicy = require("policies/base_policy"); - -} -declare module 'policies/query_policy' { - export = QueryPolicy; - class QueryPolicy extends BasePolicy { - replica: number; - deserialize: boolean; - failOnClusterChange: boolean; - infoTimeout: number; - expectedDuration: number; - } - import BasePolicy = require("policies/base_policy"); - -} -declare module 'policies/read_policy' { - export = ReadPolicy; - class ReadPolicy extends BasePolicy { - key: number; - replica: number; - readModeAP: number; - readModeSC: number; - deserialize: boolean; - } - import BasePolicy = require("policies/base_policy"); - -} -declare module 'policies/remove_policy' { - export = RemovePolicy; - class RemovePolicy extends BasePolicy { - generation: number; - key: number; - gen: number; - commitLevel: number; - durableDelete: boolean; - } - import BasePolicy = require("policies/base_policy"); - -} -declare module 'policies/scan_policy' { - export = ScanPolicy; - class ScanPolicy extends BasePolicy { - replica: number; - durableDelete: boolean; - recordsPerSecond: number; - maxRecords: number; - } - import BasePolicy = require("policies/base_policy"); - -} -declare module 'policies/write_policy' { - export = WritePolicy; - class WritePolicy extends BasePolicy { - compressionThreshold: number; - key: number; - gen: number; - exists: number; - commitLevel: number; - durableDelete: boolean; - } - import BasePolicy = require("policies/base_policy"); - -} -declare module 'policy' { - export function createPolicy(type: any, values: any): CommandQueuePolicy | BasePolicy | BatchApplyPolicy | BatchReadPolicy | BatchRemovePolicy | BatchWritePolicy | HLLPolicy | InfoPolicy | AdminPolicy | undefined; - import BasePolicy = require("policies/base_policy"); - import ApplyPolicy = require("policies/apply_policy"); - import OperatePolicy = require("policies/operate_policy"); - import QueryPolicy = require("policies/query_policy"); - import ReadPolicy = require("policies/read_policy"); - import RemovePolicy = require("policies/remove_policy"); - import ScanPolicy = require("policies/scan_policy"); - import WritePolicy = require("policies/write_policy"); - import BatchPolicy = require("policies/batch_policy"); - import BatchApplyPolicy = require("policies/batch_apply_policy"); - import BatchReadPolicy = require("policies/batch_read_policy"); - import BatchRemovePolicy = require("policies/batch_remove_policy"); - import BatchWritePolicy = require("policies/batch_write_policy"); - import CommandQueuePolicy = require("policies/command_queue_policy"); - import HLLPolicy = require("policies/hll_policy"); - import InfoPolicy = require("policies/info_policy"); - import AdminPolicy = require("policies/admin_policy"); - import ListPolicy = require("policies/list_policy"); - import MapPolicy = require("policies/map_policy"); - export { BasePolicy, ApplyPolicy, OperatePolicy, QueryPolicy, ReadPolicy, RemovePolicy, ScanPolicy, WritePolicy, BatchPolicy, BatchApplyPolicy, BatchReadPolicy, BatchRemovePolicy, BatchWritePolicy, CommandQueuePolicy, HLLPolicy, InfoPolicy, AdminPolicy, ListPolicy, MapPolicy }; - -} -declare module 'privilege' { - export = Privilege; - function Privilege(code: any, options: any): void; - class Privilege { - constructor(code: any, options: any); - code: any; - namespace: any; - set: any; - } - -} -declare module 'privilege_code' { - export const USER_ADMIN: any; - export const SYS_ADMIN: any; - export const DATA_ADMIN: any; - export const UDF_ADMIN: any; - export const SINDEX_ADMIN: any; - export const READ: any; - export const READ_WRITE: any; - export const READ_WRITE_UDF: any; - export const WRITE: any; - export const TRUNCATE: any; - -} -declare module 'query' { - export = Query; - function Query(client: Client, ns: string, set: string, options?: { - filters?: any[] | undefined; - select?: string[] | undefined; - nobins?: boolean | undefined; - ttl?: number | undefined; - } | undefined): void; - class Query { - constructor(client: Client, ns: string, set: string, options?: { - filters?: any[] | undefined; - select?: string[] | undefined; - nobins?: boolean | undefined; - ttl?: number | undefined; - } | undefined); - client: Client; - ns: string; - set: string; - filters: any[]; - selected: string[] | undefined; - nobins: boolean | undefined; - udf: any; - private pfEnabled; - paginate: boolean; - maxRecords: number; - queryState: any; - ttl: number | undefined; - nextPage(state: object[]): void; - hasNextPage(): boolean; - select(...args: string[]): void; - where(indexFilter: any): void; - setSindexFilter(sindexFilter: any): void; - setUdf(udfModule: string, udfFunction: string, udfArgs?: any[] | undefined): void; - partitions(begin: number, count: number, digest: string): void; - partFilter: { - begin: number; - count: number; - digest: string; - } | undefined; - foreach(policy?: QueryPolicy, dataCb?: recordCallback | undefined, errorCb?: errorCallback | undefined, endCb?: doneCallback | undefined): RecordStream; - results(policy?: QueryPolicy): Promise; - apply(udfModule: string, udfFunction: string, udfArgs?: any[] | undefined, policy?: QueryPolicy, callback?: QueryaggregationResultCallback | undefined): Promise | null; - background(udfModule: string, udfFunction: string, udfArgs?: any[] | undefined, policy?: QueryPolicy, queryID?: number | undefined, callback?: jobCallback | undefined): Promise | null; - operate(operations: any, policy?: QueryPolicy, queryID?: number | undefined, callback?: jobCallback | undefined): Promise | null; - ops: any; - } - import RecordStream = require("record_stream"); - -} -declare module 'query_duration' { - export const LONG: any; - export const SHORT: any; - export const LONG_RELAX_AP: any; -} -declare module 'record' { - export = Record; - class Record { - private constructor(); - key: any; - bins: any; - ttl: any; - gen: any; - type: any; - policy: any; - readAllBins: any; - ops: any; - udf: any; - } - -} -declare module 'record_stream' { - export = RecordStream; - function RecordStream(client: any): void; - class RecordStream { - constructor(client: any); - aborted: boolean; - client: any; - writable: boolean; - readable: boolean; - _read(): void; - abort(): void; - } - -} -declare module 'role' { - export = Role; - function Role(options: any): void; - class Role { - constructor(options: any); - name: any; - readQuota: any; - writeQuota: any; - whitelist: any; - privileges: any; - } - -} -declare module 'scan' { - export = Scan; - function Scan(client: Client, ns: string, set: string, options?: { - select?: string[] | undefined; - nobins?: boolean | undefined; - concurrent?: boolean | undefined; - ttl?: number | undefined; - } | undefined): void; - class Scan { - constructor(client: Client, ns: string, set: string, options?: { - select?: string[] | undefined; - nobins?: boolean | undefined; - concurrent?: boolean | undefined; - ttl?: number | undefined; - } | undefined); - client: Client; - ns: string; - set: string; - selected: string[] | undefined; - nobins: boolean | undefined; - concurrent: boolean | undefined; - private pfEnabled; - paginate: boolean; - scanState: any; - ttl: number | undefined; - nextPage(state: object[]): void; - hasNextPage(): boolean; - select(...args: string[]): void; - partitions(begin: number, count: number, digest: string): void; - partFilter: { - begin: number; - count: number; - digest: string; - } | undefined; - background(udfModule: string, udfFunction: string, udfArgs?: any[] | undefined, policy?: ScanPolicy, scanID?: number | undefined, callback?: jobCallback | undefined): Promise | null; - udf: { - module: string; - funcname: string; - args: any[] | undefined; - } | undefined; - operate(operations: any, policy?: ScanPolicy, scanID?: number | undefined, callback?: jobCallback | undefined): Promise | null; - ops: any; - foreach(policy?: ScanPolicy, dataCb?: recordCallback | undefined, errorCb?: errorCallback | undefined, endCb?: doneCallback | undefined): RecordStream; - results(policy?: ScanPolicy): Promise; - } - import RecordStream = require("record_stream"); - -} -declare module 'status' { - export const ERR_ASYNC_QUEUE_FULL: any; - export const ERR_CONNECTION: any; - export const ERR_INVALID_NODE: any; - export const ERR_NO_MORE_CONNECTIONS: any; - export const ERR_ASYNC_CONNECTION: any; - export const ERR_CLIENT_ABORT: any; - export const ERR_INVALID_HOST: any; - export const NO_MORE_RECORDS: any; - export const ERR_PARAM: any; - export const ERR_CLIENT: any; - export const OK: any; - export const ERR_SERVER: any; - export const ERR_RECORD_NOT_FOUND: any; - export const ERR_RECORD_GENERATION: any; - export const ERR_REQUEST_INVALID: any; - export const ERR_RECORD_EXISTS: any; - export const ERR_BIN_EXISTS: any; - export const ERR_CLUSTER_CHANGE: any; - export const ERR_SERVER_FULL: any; - export const ERR_TIMEOUT: any; - export const ERR_ALWAYS_FORBIDDEN: any; - export const ERR_CLUSTER: any; - export const ERR_BIN_INCOMPATIBLE_TYPE: any; - export const ERR_RECORD_TOO_BIG: any; - export const ERR_RECORD_BUSY: any; - export const ERR_SCAN_ABORTED: any; - export const ERR_UNSUPPORTED_FEATURE: any; - export const ERR_BIN_NOT_FOUND: any; - export const ERR_DEVICE_OVERLOAD: any; - export const ERR_RECORD_KEY_MISMATCH: any; - export const ERR_NAMESPACE_NOT_FOUND: any; - export const ERR_BIN_NAME: any; - export const ERR_FAIL_FORBIDDEN: any; - export const ERR_FAIL_ELEMENT_NOT_FOUND: any; - export const ERR_FAIL_ELEMENT_EXISTS: any; - export const ERR_ENTERPRISE_ONLY: any; - export const ERR_FAIL_ENTERPRISE_ONLY: any; - export const ERR_OP_NOT_APPLICABLE: any; - export const FILTERED_OUT: any; - export const LOST_CONFLICT: any; - export const QUERY_END: any; - export const SECURITY_NOT_SUPPORTED: any; - export const SECURITY_NOT_ENABLED: any; - export const SECURITY_SCHEME_NOT_SUPPORTED: any; - export const INVALID_COMMAND: any; - export const INVALID_FIELD: any; - export const ILLEGAL_STATE: any; - export const INVALID_USER: any; - export const USER_ALREADY_EXISTS: any; - export const INVALID_PASSWORD: any; - export const EXPIRED_PASSWORD: any; - export const FORBIDDEN_PASSWORD: any; - export const INVALID_CREDENTIAL: any; - export const INVALID_ROLE: any; - export const ROLE_ALREADY_EXISTS: any; - export const INVALID_PRIVILEGE: any; - export const INVALID_WHITELIST: any; - export const QUOTAS_NOT_ENABLED: any; - export const INVALID_QUOTA: any; - export const NOT_AUTHENTICATED: any; - export const ROLE_VIOLATION: any; - export const ERR_UDF: any; - export const ERR_BATCH_DISABLED: any; - export const ERR_BATCH_MAX_REQUESTS_EXCEEDED: any; - export const ERR_BATCH_QUEUES_FULL: any; - export const ERR_GEO_INVALID_GEOJSON: any; - export const ERR_INDEX_FOUND: any; - export const ERR_INDEX_NOT_FOUND: any; - export const ERR_INDEX_OOM: any; - export const ERR_INDEX_NOT_READABLE: any; - export const ERR_INDEX: any; - export const ERR_INDEX_NAME_MAXLEN: any; - export const ERR_INDEX_MAXCOUNT: any; - export const ERR_QUERY_ABORTED: any; - export const ERR_QUERY_QUEUE_FULL: any; - export const ERR_QUERY_TIMEOUT: any; - export const ERR_QUERY: any; - export const ERR_UDF_NOT_FOUND: any; - export const ERR_LUA_FILE_NOT_FOUND: any; - export function getMessage(code: any): string; - -} - -type Host = object; -type ClientStats = any; -type doneCallback = () => any; -type errorCallback = () => any; -type recordCallback = () => any; -type valueCallback = () => any; -type writeCallback = () => any; -type batchRecordsCallback = () => any; -type connectCallback = () => any; -type infoCallback = () => any; -type infoAllCallback = () => any; -type jobCallback = () => any; -type JobdoneCallback = () => any; -type JobinfoCallback = () => any; -type QueryaggregationResultCallback = () => any; -type AerospikeExp = object; -type ApplyPolicy = object; -type BatchPolicy = object; -type BatchApplyPolicy = object; -type BatchRemovePolicy = object; -type BatchWritePolicy = object; -type InfoPolicy = object; -type OperatePolicy = object; -type ReadPolicy = object; -type RemovePolicy = object; -type ScanPolicy = object; -type QueryPolicy = object; -type WritePolicy = object; -type BitwisePolicy = object; -type MapPolicy = object; -type ListPolicy = object; -type CommandQueuePolicy = object; -type Policies = { - apply: ApplyPolicy; - batch: BatchPolicy; - batchApply: BatchApplyPolicy; - batchRemove: BatchRemovePolicy; - batchWrite: BatchWritePolicy; - info: InfoPolicy; - operate: OperatePolicy; - read: ReadPolicy; - remove: RemovePolicy; - scan: ScanPolicy; - query: QueryPolicy; - write: WritePolicy; - map: MapPolicy; - list: ListPolicy; - commandQueue: CommandQueuePolicy; +import * as Buffer from "buffer"; +import { EventEmitter, Stream } from "stream"; + +/** + * Codes representing each of the various scalar operation types. + */ +export enum ScalarOperations { + WRITE, + READ, + INCR, + PREPEND, + APPEND, + TOUCH, + DELETE +} + +/* TYPES */ + +/** + * Represents a basic value in an Aerospike bin. + */ +export type PartialAerospikeBinValue = null | undefined | boolean | string | number | Double | BigInt | Buffer | GeoJSON | Array | object; + +/** + * Represents an object containing one or more `AerospikeBinValues` with associated string keys. + */ +export type AerospikeBins = { + [key: string]: AerospikeBinValue }; -type Operation = object; -type Client = object; -type Key = object; -type RecordObject = object; -type Privilege = object; -type Role = object; -type User = object; -type role = object; -type CdtContext = object -declare module 'udf_job' { - export = UdfJob; - function UdfJob(client: any, udfModule: any, command: any): void; - class UdfJob { - constructor(client: any, udfModule: any, command: any); - client: any; - udfModule: any; - command: any; - private hasCompleted; - private info; - } - namespace UdfJob { - const REGISTER: string; - const UNREGISTER: string; - } - -} -declare module 'user' { - export = User; - function User(options: any): void; - class User { - constructor(options: any); - connsInUse: any; - name: any; - readInfo: any; - writeInfo: any; - roles: any; - } - -} -declare module 'utils' { - export function parseHostString(hostString: any): { - addr: any; - tlsname: any; - port: number; - }; - export function print(err: any, result: any): void; +/** + * Represents a complete Aerospike bin value. Bin values can included nested lists and maps. + */ +export type AerospikeBinValue = PartialAerospikeBinValue | PartialAerospikeBinValue[] | Record; + +/** + * Represents an Aerospike Expression. Contains an op number which specifiies the operation type, and properties with values relevant to the operation. + */ +export type AerospikeExp = { op: number, [key: string]: any }[] + + +/** + * Contains geolocation information releavant to the GEOJSON Aerospike type. + */ +export type GeoJSONType = { + type: string, + coordinates: NumberArray +} + +/** + * Represents an array which can contain number or nested number array. + */ +export type NumberArray = number | NumberArray[]; + + +/** + * Callback used to return results in synchronous Aerospike database operations + */ +export type TypedCallback = (error?: AerospikeError, result?: T) => void; + + +/* CLASSES */ + +/** + * A record with the Aerospike database consists of one or more record "bins" + * (name-value pairs) and meta-data, including time-to-live and generation; a + * record is uniquely identified by it's key within a given namespace. + * + * @example Writing a new record with 5 bins while setting a record TTL. + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * } + * } + * + * let bins = { + * int: 123, + * double: 3.1415, + * string: 'xyz', + * bytes: Buffer.from('hello world!'), + * list: [1, 2, 3], + * map: {num: 123, str: 'abc', list: ['a', 'b', 'c']} + * } + * let meta = { + * ttl: 386400 // 1 day + * } + * let key = new Aerospike.Key('test', 'demo', 'myKey') + * + * Aerospike.connect(config) + * .then(client => { + * return client.put(key, bins, meta) + * .then(() => { + * client.get(key) + * .then((record) => { + * console.log(record) + * client.close() + * }) + * .catch(error => { + * console.log(record) + * client.close() + * return Promise.reject(error) + * }) + * }) + * .catch(error => { + * client.close() + * return Promise.reject(error) + * }) + * }) + * .catch(error => console.error('Error:', error)) + * + * @example Fetching a single database record by it's key. + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * read : new Aerospike.ReadPolicy({socketTimeout : 0, totalTimeout : 0}), + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * } + * } + * + * let key = new Aerospike.Key('test', 'demo', 'myKey') + * + * Aerospike.connect(config) + * .then(client => { + * client.put(key, {tags : ['blue', 'pink']}) + * .then(() => { + * client.get(key) + * .then(record => { + * console.info('Key:', record.key) + * console.info('Bins:', record.bins) + * console.info('TTL:', record.ttl) + * console.info('Gen:', record.gen) + * }) + * .then(() => client.close()) + * .catch(error => { + * client.close() + * return Promise.reject(error) + * }) + * }) + * .catch(error => { + * client.close() + * return Promise.reject(error) + * }) + * }) + * .catch(error => console.error('Error:', error)) + * + * @since v5.0.0 + * + * @example Fetching a batch of records. + * + * const Aerospike = require('aerospike') + * const op = Aerospike.operations + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * read : new Aerospike.ReadPolicy({socketTimeout : 0, totalTimeout : 0}), + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * batch : new Aerospike.BatchPolicy({socketTimeout : 0, totalTimeout : 0}) + * + * } + * } + * + * var batchRecords = [ + * { type: Aerospike.batchType.BATCH_READ, + * key: new Aerospike.Key('test', 'demo', 'key1'), bins: ['i', 's'] }, + * { type: Aerospike.batchType.BATCH_READ, + * key: new Aerospike.Key('test', 'demo', 'key2'), readAllBins: true }, + * { type: Aerospike.batchType.BATCH_READ, + * key: new Aerospike.Key('test', 'demo', 'key3'), + * ops:[ + * op.read('blob-bin') + * ]} + * ] + * Aerospike.connect(config, function (error, client) { + * if (error) throw error + * client.batchRead(batchRecords, function (error, results) { + * if (error) throw error + * results.forEach(function (result) { + * console.log(result) + * + * }) + * client.close() + * }) + * + * }) + * + * @since v5.0.0 + * + * @example Applying functions on batch of records. + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * read : new Aerospike.ReadPolicy({socketTimeout : 0, totalTimeout : 0}), + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * + * } + * } + * + * const batchType = Aerospike.batchType + * var batchRecords = [ + * { type: batchType.BATCH_READ, + * key: new Aerospike.Key('test', 'demo', 'key1'), + * bins: ['i', 's'] }, + * { type: batchType.BATCH_READ, + * key: new Aerospike.Key('test', 'demo', 'key2'), + * readAllBins: true }, + * { type: batchType.BATCH_APPLY, + * key: new Aerospike.Key('test', 'demo', 'key4'), + * policy: new Aerospike.BatchApplyPolicy({ + * filterExpression: exp.eq(exp.binInt('i'), exp.int(37)), + * key: Aerospike.policy.key.SEND, + * commitLevel: Aerospike.policy.commitLevel.ALL, + * durableDelete: true + * }), + * udf: { + * module: 'udf', + * funcname: 'function1', + * args: [[1, 2, 3]] + * } + * }, + * { type: batchType.BATCH_APPLY, + * key: new Aerospike.Key('test', 'demo', 'key5'), + * policy: new Aerospike.BatchApplyPolicy({ + * filterExpression: exp.eq(exp.binInt('i'), exp.int(37)), + * key: Aerospike.policy.key.SEND, + * commitLevel: Aerospike.policy.commitLevel.ALL, + * durableDelete: true + * }), + * udf: { + * module: 'udf', + * funcname: 'function2', + * args: [[1, 2, 3]] + * } + * } + * ] + * Aerospike.connect(config, function (error, client) { + * if (error) throw error + * client.batchApply(batchRecords, udf, function (error, results) { + * if (error) throw error + * results.forEach(function (result) { + * console.log(result) + * }) + * }) + * }) + */ +export class AerospikeRecord { + /** + * Unique record identifier. + * + * @type {Key} + */ + public key: Key; + /** + * Map of bin name to bin value. + * + * @type {AerospikeBins} + */ + public bins: AerospikeBins; + + /** + * The record's remaining time-to-live in seconds before it expires. + * + * @type {number} + */ + public ttl: number; + /** + * Record modification count. + * + * @type {number} + */ + public gen: number; + + /** + * Construct a new Aerospike Record instance. + */ + constructor(key: KeyOptions, bins: AerospikeBins, metadata?: RecordMetadata); +} + +/** + * In the Aerospike database, each record (similar to a row in a relational database) stores + * data using one or more bins (like columns in a relational database). The major difference + * between bins and RDBMS columns is that you don't need to define a schema. Each record can + * have multiple bins. Bins accept the data types listed {@link https://docs.aerospike.com/apidocs/nodejs/#toc4__anchor|here}. + * + * For information about these data types and how bins support them, see {@link https://docs.aerospike.com/server/guide/data-types/scalar-data-types|this}. + * + * Although the bin for a given record or object must be typed, bins in different rows do not + * have to be the same type. There are some internal performance optimizations for single-bin namespaces. + * + */ +export class Bin { + /** + * Construct a new Aerospike Bin instance. + */ + public constructor(name: string, value: AerospikeBinValue, mapOrder?: maps.order); + /** + * Bin name. + */ + name: string; + /** + * Bin name. + */ + value: AerospikeBinValue; +} + +export class BatchResult { + /** + * Construct a new BatchResult instance. + */ + public constructor(status: typeof statusNamespace[keyof typeof statusNamespace], record: AerospikeRecord, inDoubt: boolean); + /** + * Database operation status code assoicated with the batch result. + */ + status: typeof statusNamespace[keyof typeof statusNamespace]; + /** + * Aerospike Record result from a batch operation. + */ + record: AerospikeRecord; + /** + * It is possible that a write transaction completed even though the client + * returned this error. This may be the case when a client error occurs + * (like timeout) after the command was sent to the server. + */ + inDoubt: boolean; + +} + +/** + * Aerospike Query operations perform value-based searches using + * secondary indexes (SI). A Query object, created by calling {@link Client#query}, + * is used to execute queries on the specified namespace and set (optional). + * Queries can return a set of records as a {@link RecordStream} or be + * processed using Aeorspike User-Defined Functions (UDFs) before returning to + * the client. + * + * For more information, please refer to the section on + * ⇑Queries + * in the Aerospike technical documentation. + * + * To scan _all_ records in a database namespace or set, it is more efficient + * to use {@link Scan operations}, which provide more fine-grained control over + * execution priority, concurrency, etc. + * + * #### SI Filters + * + * With a SI, the following queries can be made: + * + * - [Equal query]{@link filter.equal} against string or + * numeric indexes + * - [Range query]{@link filter.range} against numeric + * indexes + * - [Point-In-Region query]{@link filter.geoWithinGeoJSONRegion} + * or [Region-Contain-Point query]{@link filter.geoContainsGeoJSONPoint} against geo indexes + * + * See {@link filter} for a list of all supported secondary + * index filter. + * + * Before a secondary index filter can be applied, a SI needs to be + * created on the bins which the index filter matches on. Using the Node.js + * client, a SI can be created using {@link Client#createIndex}. + * + * Currently, only a single SI index filter is supported for + * each query. To do more advanced filtering, a expressions can be + * applied to the query using policy (see below). Alternatively, User-Defined Functions + * (UDFs) can be used to further process the query results on the server. + * + * Previously, predicate filtering was used to perform secondary index queries. + * SI filter predicates have been deprecated since server 5.2, and obsolete since + * server 6.0. + * + * For more information about Predicate Filtering, please refer to the ⇑Predicate + * Filtering documentation in the Aerospike Feature Guide. + * + * #### Selecting Bins + * + * Using {@link Query#select} it is possible to select a subset of bins which + * should be returned by the query. If no bins are selected, then the whole + * record will be returned. If the {@link Query#nobins} property is set to + * true the only the record meta data (ttl, generation, etc.) will + * be returned. + * + * #### Executing a Query + * + * A query is executed using {@link Query#foreach}. The method returns a {@link + * RecordStream} which emits a data event for each record returned + * by the query. The query can be aborted at any time by calling + * {@link RecordStream#abort}. + * + * #### Applying User-Defined Functions + * + * User-defined functions (UDFs) can be used to filter, transform, and + * aggregate query results. Stream UDFs can process a stream of data by + * defining a sequence of operations to perform. Stream UDFs perform read-only + * operations on a collection of records. Use {@link Query#setUdf} to set the + * UDF parameters (module name, function name and optional list of arguments) + * before executing the query using {@link Query#foreach}. + * + * The feature guides on + * ⇑User-Defined Functions and + * ⇑Stream UDFs + * contain more detailed information and examples. + * + * #### Query Aggregation using Stream UDFs + * + * Use Aerospike Stream UDFs to aggregate query results using {@link + * Query#apply}. Aggregation queries work similar to a MapReduce system and + * return a single result value instead of stream of records. Aggregation + * results can be basic data types (string, number, byte array) or collection + * types (list, map). + * + * Please refer to the technical documentation on + * ⇑Aggregation + * for more information. + * + * #### Executing Record UDFs using Background Queries + * + * Record UDFs perform operations on a single record such as updating records + * based on a set of parameters. Using {@link Query#background} you can run a + * Record UDF on the result set of a query. Queries using Records UDFs are run + * in the background on the server and do not return the records to the client. + * + * For additional information please refer to the section on + * ⇑Record UDFs + * in the Aerospike technical documentation. + * + * #### Query pagination + * + * Query pagination allows for queries return records in pages rather than all at once. + * To enable query pagination, the query property {@link paginate} must be true + * and the previously stated query property {@link Query.maxRecords} must be set to a + * nonzero positive integer in order to specify a maximum page size. + * + * When a page is complete, {@link RecordStream} event {@link RecordStream#on 'error'} will + * emit a {@link Query#queryState} object containing a serialized version of the query. + * This serialized query, if be assigned back to {@link Query#queryState}, allows the query + * to retrieve the next page of records in the query upon calling {@link Query#foreach}. + * If {@link Query#queryState} is undefined, pagination is not enabled or the query has completed. + * If {@link RecordStream#on 'error'} emits an undefined object, either {@link paginate} + * is not true, or the query has successfully returned all the specified records. + * + * For additional information and examples, please refer to the {@link paginate} section + * below. + * + * @see {@link Client#query} to create new instances of this class. + * + * @example + * + * const Aerospike = require('aerospike') + * const namespace = 'test' + * const set = 'demo' + * + * Aerospike.connect((error, client) => { + * if (error) throw error + * var index = { + * ns: namespace, + * set: set, + * bin: 'tags', + * index: 'tags_idx', + * type: Aerospike.indexType.LIST, + * datatype: Aerospike.indexDataType.STRING + * } + * client.createIndex(index, (error, job) => { + * if (error) throw error + * job.waitUntilDone((error) => { + * if (error) throw error + * + * var query = client.query('test', 'demo') + * const queryPolicy = { filterExpression: exp.keyExist('uniqueExpKey') } + * query.select('id', 'tags') + * query.where(Aerospike.filter.contains('tags', 'green', Aerospike.indexType.LIST)) + * var stream = query.foreach(queryPolicy) + * stream.on('error', (error) => { + * console.error(error) + * throw error + * }) + * stream.on('data', (record) => { + * console.info(record) + * }) + * stream.on('end', () => { + * client.close() + * }) + * }) + * }) + * }) + */ +export class Query { + /** + * Aerospike Client Instance + */ + public client: Client; + /** + * Namespace to query. + */ + public ns: string; + /** + * Name of the set to query. + */ + public set: string; + /** + * Filters to apply to the query. + * + * *Note:* Currently, a single index filter is supported. To do more + * advanced filtering, you need to use a user-defined function (UDF) to + * process the result set on the server. + */ + public filters: filter.SindexFilterPredicate[]; + /** + * List of bin names to be selected by the query. If a query specifies bins to + * be selected, then only those bins will be returned. If no bins are + * selected, then all bins will be returned (unless {@link Query#nobins} is + * set to `true`). + */ + public selected: string[]; + /** + * If set to `true`, the query will return only meta data, and exclude bins. + */ + public nobins: boolean; + /** + * User-defined function parameters to be applied to the query executed using + * {@link Query#foreach}. + */ + public udf: UDF; + /** + * Approximate number of records to return to client. + * + * When {@link paginate} is true, + * then maxRecords will be the page size if there are enough records remaining in the query to fill the page size. + * + * When {@link paginate} is false, this number is divided by the number of nodes involved in the scan, + * and actual number of records returned may be less than maxRecords if node record counts are small and unbalanced across nodes. + */ + public maxRecords?: number; + /** + * Specifies operations to be executed when {@link operate} is called. + */ + public ops?: operations.Operation[]; + /** + * If set to true, paginated queries are enabled. In order to receive paginated + * results, the {@link maxRecords} property must assign a nonzero integer value. + * + * @example Asynchronous pagination over a set of thirty records with {@link Query#foreach}. + * + * const Aerospike = require('./lib/aerospike'); + * // Define host configuration + * let config = { + * hosts: '34.213.88.142:3000', + * policies: { + * batchWrite : new Aerospike.BatchWritePolicy({socketTimeout : 0, totalTimeout : 0}), + * } + * }; + * + * var batchRecords = [] + * for(let i = 0; i < 30; i++){ + * batchRecords.push({ + * type: Aerospike.batchType;.BATCH_WRITE, + * key: new Aerospike.Key('test', 'demo', 'key' + i), + * ops:[Aerospike.operations.write('exampleBin', i)] + * }) + * } + * + * ;(async function() { + * try { + * client = await Aerospike.connect(config) + * await client.truncate('test', 'demo', 0) + * await client.batchWrite(batchRecords, {socketTimeout : 0, totalTimeout : 0}) + * + * const query = client.query('test', 'demo', { paginate: true, maxRecords: 10}) + * do { + * const stream = query.foreach() + * stream.on('error', (error) => { throw error }) + * stream.on('data', (record) => { + * console.log(record.bins) + * }) + * await new Promise(resolve => { + * stream.on('end', (queryState) => { + * query.queryState = queryState + * resolve() + * }) + * }) + * } while (query.queryState !== undefined) + * + * } catch (error) { + * console.error('An error occurred at some point.', error) + * process.exit(1) + * } finally { + * if (client) client.close() + * } + * })() + * + * @example Asynchronous pagination over a set of thirty records with {@link Query#results} + * + * + * const Aerospike = require('./lib/aerospike'); + * // Define host configuration + * let config = { + * hosts: '34.213.88.142:3000', + * policies: { + * batchWrite : new Aerospike.BatchWritePolicy({socketTimeout : 0, totalTimeout : 0}), + * } + * }; + * + * var batchRecords = [] + * for(let i = 0; i < 30; i++){ + * batchRecords.push({ + * type: Aerospike.batchType.BATCH_WRITE, + * key: new Aerospike.Key('test', 'demo', 'key' + i), + * ops:[Aerospike.operations.write('exampleBin', i)] + * }) + * } + * + * + * ;(async function() { + * try { + * client = await Aerospike.connect(config) + * await client.truncate('test', 'demo', 0) + * await client.batchWrite(batchRecords, {socketTimeout : 0, totalTimeout : 0}) + * + * const query = client.query('test', 'demo', { paginate: true, maxRecords: 11}) + * + * let allResults = [] + * let results = await query.results() + * allResults = [...allResults, ...results] + * + * + * results = await query.results() + * allResults = [...allResults, ...results] + * + * results = await query.results() + * allResults = [...allResults, ...results] + * + * console.log("Records returned in total: " + allResults.length) // Should be 30 records + * } catch (error) { + * console.error('An error occurred at some point.', error) + * process.exit(1) + * } finally { + * if (client) client.close() + * } + * })() + * + */ + public paginate?: boolean; + /** + * Used when querying partitions to manage the query. For internal use only. + */ + public partFilter?: PartFilter; + /** + * If set to true, the query will return records belonging to the partitions specified + * in {@link Query#partFilter}. + */ + public pfEnabled?: boolean; + /** + * The time-to-live (expiration) of the record in seconds. + * + * There are also special values that can be set in the record TTL For details + * + * Note that the TTL value will be employed ONLY on background query writes. + */ + public ttl: number; + /** + * If set to a valid serialized query, calling {@link Query.foreach} will allow the next page of records to be queried while preserving the progress + * of the previous query. If set to null, calling {@link Query.foreach} will begin a new query. + */ + public queryState?: number[]; + /** + * Construct a Query instance. + * + * @param client - A client instance. + * @param ns - The namescape. + * @param set - The name of a set. + * @param options - Query options. + * * + */ + constructor(client: Client, ns: string, set: string, options?: QueryOptions | null); + /** + * + * Checks compiliation status of a paginated query. + * + * If false is returned, there are no more records left in the query, and the query is complete. + * If true is returned, calling {@link Query#foreach} will continue from the state specified by {@link Query#queryState}. + * + * @returns `true` if another page remains. + */ + public hasNextPage(): boolean; + /** + * Sets {@link Query#queryState} to the value specified by the state argument. + * + * setter function for the {@link Query#queryState} member variable. + * + * @param state - serialized query emitted from the {@link RecordStream#on 'error'} event. + */ + public nextPage(state: number[]): void; + /** + * Specify the begin and count of the partitions + * to be queried by the Query foreach op. + * + * If a Query specifies partitions begin and count, + * then only those partitons will be queried and returned. + * If no partitions are specified, + * then all partitions will be queried and returned. + * + * @param begin - Start partition number to query. + * @param count - Number of partitions from the start to query. + * @param digest - Start from this digest if it is specified. + */ + public partitions(begin: number, count: number, digest?: Buffer | null): void; + /** + * Specify the names of bins to be selected by the query. + * + * If a query specifies bins to be selected, then only those bins + * will be returned. If no bins are selected, then all bins will be returned. + * (Unless {@link Query.nobins} is set to true.) + * + * @param bins - List of bin names or multiple bin names to return. + * @return {void} + */ + public select(bins: string[]): void; + /** + * + * @param bins - A spread of bin names to return. + * @return {void} + */ + public select(...bins: string[]): void; + /** + * Applies a SI to the query. + * + * Use a SI to limit the results returned by the query. + * This method takes SI created using the {@link + * filter | filter module} as argument. + * + * @param predicate - The index filter to + * apply to the function. + * + * @example Applying a SI filter to find all records + * where the 'tags' list bin contains the value 'blue': + * + * const Aerospike = require('aerospike') + * + * Aerospike.connect().then(client => { + * let query = client.query('test', 'demo') + * + * let tagsFilter = Aerospike.filter.contains('tags', 'blue', Aerospike.indexType.LIST) + * query.where(tagsFilter) + * + * let stream = query.foreach() + * stream.on('data', record => { console.info(record.bins.tags) }) + * stream.on('error', error => { throw error }) + * stream.on('end', () => client.close()) + * }) + * + * @see {@link filter} to create SI filters. + */ + public where(predicate: filter.SindexFilterPredicate): void; + private setSindexFilter(sindexFilter: filter.SindexFilterPredicate): void; + /** + * + * Set user-defined function parameters to be applied to the query. + * + * @param udfModule - UDF module name. + * @param udfFunction - UDF function name. + * @param udfArgs - Arguments for the function. + */ + public setUdf(udfModule: string, udfFunction: string, udfArgs?: AerospikeBinValue[] | null): void; + /** + * Asynchronously executes the query and returns each result item + * through the stream. + * + * *Applying a Stream UDF to the query results* + * + * A stream UDF can be applied to the query to filter, transform and aggregate + * the query results. The UDF parameters need to be set on the query object + * using {@link Query#setUdf} before the query is executed. + * + * If a UDF is applied to the query, the resulting stream will return + * the results of the UDF stream function. Record meta data and the record keys + * will not be returned. + * + * For aggregation queries that return a single result value instead of a + * stream of values, you should use the {@link Query#apply} method instead. + * + * @param policy - The Query Policy to use for this operation. + * @param dataCb - The function to call when the + * operation completes with the results of the operation; if no callback + * function is provided, the method returns a Promise instead. + * @param errorCb - Callback function called when there is an error. + * @param endCb - Callback function called when an operation has completed. + * + * @returns {@link RecordStream} + */ + public foreach(policy?: policy.QueryPolicy | null, dataCb?: (data: AerospikeRecord) => void, errorCb?: (error: Error) => void, endCb?: () => void): RecordStream; + /** + * Executes the query and collects the results into an array. On paginated queries, + * preparing the next page is also handled automatically. + * + * + * This method returns a Promise that contains the query results + * as an array of records, when fulfilled. It should only be used if the query + * is expected to return only few records; otherwise it is recommended to use + * {@link Query.foreach}, which returns the results as a {@link RecordStream} + * instead. + * + * If pagination is enabled, the data emitted from the {@link RecordStream#on 'error'} + * event will automatically be assigned to {@link Query.queryState}, allowing the next page + * of records to be queried if {@link Query.foreach} or {@link Query.results} is called. + * + * + * @param policy - The Query Policy to use for this operation. + * + * @returns A promise that resolves with an Aerospike Record. + */ + public results(policy?: policy.QueryPolicy | null): Promise; + /** + * Applies a user-defined function (UDF) to aggregate the query results. + * + * The aggregation function is called on both server and client (final reduce). Therefore, the Lua script files must also reside on both server and client. + * + * @param udfModule - UDF module name. + * @param udfFunction - UDF function name. + * @param udfArgs - Arguments for the function. + * @param policy - The Query Policy to use for this operation. + * + * @returns A promise that resolves with an Aerospike bin value. + * + */ + public apply(udfModule: string, udfFunction: string, udfArgs?: AerospikeBinValue[] | null, policy?: policy.QueryPolicy | null): Promise; + /** + * @param udfModule - UDF module name. + * @param udfFunction - UDF function name. + * @param callback - The function to call when the operation completes. + * + */ + public apply(udfModule: string, udfFunction: string, callback: TypedCallback): void; + /** + * @param udfModule - UDF module name. + * @param udfFunction - UDF function name. + * @param udfArgs - Arguments for the function. + * @param callback - The function to call when the operation completes. + * + */ + public apply(udfModule: string, udfFunction: string, udfArgs?: AerospikeBinValue[] | null, callback?: TypedCallback): void; + /** + * @param udfModule - UDF module name. + * @param udfFunction - UDF function name. + * @param udfArgs - Arguments for the function. + * @param policy - The Query Policy to use for this operation. + * @param callback - The function to call when the operation completes. + * + */ + public apply(udfModule: string, udfFunction: string, udfArgs?: AerospikeBinValue[], policy?: policy.QueryPolicy | null, callback?: TypedCallback): void; + /** + * Applies a user-defined function (UDF) on records that match the query filter. + * Records are not returned to the client. + * + * When a background query is initiated, the client will not wait + * for results from the database. Instead a {@link Job} instance will be + * returned, which can be used to query the query status on the database. + * + * @param udfModule - UDF module name. + * @param udfFunction - UDF function name. + * @param udfArgs - Arguments for the function. + * @param policy - The Write Policy to use for this operation. + * @param queryID - Job ID to use for the query; will be assigned + * randomly if zero or undefined. + * + * @returns Promise that resolves to a {@link Job} instance. + */ + public background(udfModule: string, udfFunction: string, udfArgs?: AerospikeBinValue[] | null, policy?: policy.WritePolicy | null, queryID?: number | null): Promise; + /** + * @param udfModule - UDF module name. + * @param udfFunction - UDF function name. + * @param callback - The function to call when the operation completes. + * + */ + public background(udfModule: string, udfFunction: string, callback: TypedCallback): void; + /** + * @param udfModule - UDF module name. + * @param udfFunction - UDF function name. + * @param udfArgs - Arguments for the function. + * @param callback - The function to call when the operation completes. + * + */ + public background(udfModule: string, udfFunction: string, udfArgs?: AerospikeBinValue[] | null, callback?: TypedCallback): void; + /** + * @param udfModule - UDF module name. + * @param udfFunction - UDF function name. + * @param udfArgs - Arguments for the function. + * @param policy - The Write Policy to use for this operation. + * @param callback - The function to call when the operation completes. + * + */ + public background(udfModule: string, udfFunction: string, udfArgs?: AerospikeBinValue[] | null, policy?: policy.WritePolicy | null, callback?: TypedCallback): void; + /** + * @param udfModule - UDF module name. + * @param udfFunction - UDF function name. + * @param udfArgs - Arguments for the function. + * @param policy - The Write Policy to use for this operation. + * @param queryID - Job ID to use for the query; will be assigned + * randomly if zero or undefined. + * @param callback - The function to call when the operation completes. + * + */ + public background(udfModule: string, udfFunction: string, udfArgs?: AerospikeBinValue[] | null, policy?: policy.WritePolicy | null, queryID?: number | null, callback?: TypedCallback | null): void; + /** + * Applies write operations to all matching records. + * + * Performs a background query and applies one or more write + * operations to all records that match the query filter(s). Neither the + * records nor the results of the operations are returned to the client. + * Instead a {@link Job} instance will be returned, which can be used to query + * the query status. + * + * This method requires server >= 3.7.0. + * + * @param operations - List of write + * operations to perform on the matching records. + * @param policy - The Query Policy to use for this operation. + * @param queryID - Job ID to use for the query; will be assigned + * randomly if zero or undefined. + * + * @returns Promise that resolves to a Job instance. + * + * @since v3.14.0 + * + * @example Increment count bin on all matching records using a background query + * + * const Aerospike = require('aerospike') + * + * Aerospike.connect().then(async (client) => { + * const query = client.query('namespace', 'set') + * query.where(Aerospike.filter.range('age', 18, 42)) + * const ops = [Aerospike.operations.incr('count', 1)] + * const job = await query.operate(ops) + * await job.waitUntilDone() + * client.close() + * }) + */ + public operate(operations: operations.Operation[], policy?: policy.QueryPolicy | null, queryID?: number| null): Promise; + /** + * @param operations - List of write + * operations to perform on the matching records. + * @param callback - The function to call when the operation completes. + * + * @returns Promise that resolves to a Job instance. + */ + public operate(operations: operations.Operation[], callback?: TypedCallback): void; + /** + * @param operations - List of write + * operations to perform on the matching records. + * @param policy - The Query Policy to use for this operation. + * @param callback - The function to call when the operation completes. + * + * @returns Promise that resolves to a Job instance. + */ + public operate(operations: operations.Operation[], policy: policy.QueryPolicy | null, callback?: TypedCallback): void; + /** + * @param operations - List of write + * operations to perform on the matching records. + * @param policy - The Query Policy to use for this operation. + * @param queryID - Job ID to use for the query; will be assigned + * randomly if zero or undefined. + * @param callback - The function to call when the operation completes. + * + * @returns Promise that resolves to a Job instance. + */ + public operate(operations: operations.Operation[], policy: policy.QueryPolicy | null, queryID: number| null, callback?: TypedCallback): void; +} + +export namespace cdt { + + /** + * Codes used to distinguish CDT item types. + */ +// export enum CdtItemTypes { +// LIST_INDEX = 0x10, +// LIST_RANK, +// LIST_VALUE = 0x13, +// MAP_INDEX = 0x20, +// MAP_RANK, +// MAP_KEY, +// MAP_VALUE +// } + /** + * List of {link cdt.Context} instances. + */ + export type CdtItems = CdtContext[]; + +// export class CdtItems extends Array { +// public push(v: [number, CdtContext]); +// } + + /** + * Nested CDT context type. + * + * @see {@link lists~ListOperation#withContext|ListOperation#withContext} Adding context to list operations + * @see {@link maps~MapOperation#withContext|Map#Operation#withContext} Adding context to map operations + * + * @since v3.12.0 + */ + class CdtContext { + constructor(); + /** + * List of {link cdt.Context} instances. + */ + public items: CdtItems; + private add(type: /* CdtItemTypes */ number, value: CdtContext): CdtContext; + /** + * Lookup list by index offset. + * + * @remarks If the index is negative, the resolved index starts backwards + * from end of list. If an index is out of bounds, a parameter error will be + * returned. Examples: + * + * - 0: First item. + * - 4: Fifth item. + * - -1: Last item. + * - -3: Third to last item. + * + * @param {number} index - List index + * @return {CdtContext} Updated CDT context, so calls can be chained. + */ + public addListIndex(index: number): CdtContext; + /** + * Lookup list by base list's index offset. + * + * @remarks If the list at index offset is not found, + * create it with the given sort order at that index offset. + * If pad is true and the index offset is greater than the + * bounds of the base list, nil entries will be inserted before the newly + * created list. + * + * @param {number} index - List index + * @param {number} order - Sort order used if a list is created + * @param {boolean} pad - Pads list entries between index and the + * final list entry with zeros. + * @return {CdtContext} Updated CDT context, so calls can be chained. + */ + public addListIndexCreate(index: number, order?: lists.order, pad?: boolean): CdtContext; + /** + * Lookup list by rank. + * + * @remarks Examples: + * + * - 0 = smallest value + * - N = Nth smallest value + * - -1 = largest value + * + * @param {number} rank - List rank + * @return {CdtContext} Updated CDT context, so calls can be chained. + */ + public addListRank(rank: number): CdtContext; + /** + * Lookup list by value. + * + * @param {any} value - List value + * @return {CdtContext} Updated CDT context, so calls can be chained. + */ + public addListValue(value: AerospikeBinValue): CdtContext; + /** + * Lookup map by index offset. + * + * @remarks If the index is negative, the resolved index starts backwards + * from end of list. If an index is out of bounds, a parameter error will be + * returned. Examples: + * + * - 0: First item. + * - 4: Fifth item. + * - -1: Last item. + * - -3: Third to last item. + * + * @param {number} index - Map index + * @return {CdtContext} Updated CDT context, so calls can be chained. + */ + public addMapIndex(index: number): CdtContext; + /** + * Lookup map by rank. + * + * @remarks Examples: + * + * - 0 = smallest value + * - N = Nth smallest value + * - -1 = largest value + * + * @param {number} rank - Map rank + * @return {CdtContext} Updated CDT context, so calls can be chained. + */ + public addMapRank(rank: number): CdtContext; + /** + * Lookup map by key. + * + * @param {any} key - Map key + * @return {CdtContext} Updated CDT context, so calls can be chained. + */ + public addMapKey(key: string): CdtContext; + /** + * Lookup map by base map's key. If the map at key is not found, + * create it with the given sort order at that key. + * + * @param {any} key - Map key + * @param {number} order - Sort order used if a map is created + * @return {CdtContext} Updated CDT context, so calls can be chained. + */ + public addMapKeyCreate(key: string, order?: maps.order): CdtContext; + /** + * Lookup map by value. + * + * @param {any} value - Map value + * @return {CdtContext} Updated CDT context, so calls can be chained. + */ + public addMapValue(value: AerospikeBinValue): CdtContext; + /** + * Retrieve expression type list/map from ctx or from type. + * + * @param ctx - ctx value object. + * @param type -{@link exp.type} default expression type. + * @return {@link exp.type} expression type. + */ + static getContextType(ctx: CdtContext, type: /* CdtItemTypes */ number): exp.type | /* CdtItemTypes */ number; + } + export {CdtContext as Context} + +} + +export class AdminPolicy extends policy.AdminPolicy {} +export class ApplyPolicy extends policy.ApplyPolicy {} +export class BasePolicy extends policy.BasePolicy {} +export class BatchPolicy extends policy.BatchPolicy {} +export class BatchApplyPolicy extends policy.BatchApplyPolicy {} +export class BatchReadPolicy extends policy.BatchReadPolicy {} +export class BatchRemovePolicy extends policy.BatchRemovePolicy {} +export class BatchWritePolicy extends policy.BatchWritePolicy {} +export class CommandQueuePolicy extends policy.CommandQueuePolicy {} +export class HLLPolicy extends policy.HLLPolicy {} +export class InfoPolicy extends policy.InfoPolicy {} +export class ListPolicy extends policy.ListPolicy {} +export class MapPolicy extends policy.MapPolicy {} +export class OperatePolicy extends policy.OperatePolicy {} +export class QueryPolicy extends policy.QueryPolicy {} +export class ReadPolicy extends policy.ReadPolicy {} +export class RemovePolicy extends policy.RemovePolicy {} +export class ScanPolicy extends policy.ScanPolicy {} +export class WritePolicy extends policy.WritePolicy {} + + +/** + * The policy module defines policies and policy values that + * define the behavior of database operations. Most {@link Client} methods, + * including scans and queries, accept a policy object, that affects how the + * database operation is executed, by specifying timeouts, transactional + * behavior, etc. Global defaults for specific types of database operations can + * also be set through the client config, when a new {@link Client} instance is + * created. + * + * Different policies apply to different types of database operations: + * + * * {@link ApplyPolicy} - Applies to {@link Client.apply}. + * * {@link OperatePolicy} - Applies to {@link Client.operate} as well as {@link Client.append}, {@link Client.prepend} and {@link Client.add}. + * * {@link QueryPolicy} - Applies to {@link Query.apply}, {@link Query.background} and {@link Query.foreach}. + * * {@link ReadPolicy} - Applies to {@link Client.exists}, {@link Client.get} and {@link Client.select}. + * * {@link RemovePolicy} - Applies to {@link Client.remove}. + * * {@link ScanPolicy} - Applies to {@link Scan.background} and {@link Scan.foreach}. + * * {@link WritePolicy} - Applies to {@link Client.put}. + * * {@link BatchPolicy} - Applies to {@link Client.batchRead} as well as the + * deprecated {@link Client.batchExists}, {@link Client.batchGet}, and {@link + * Client.batchSelect} operations. Also used when providing batchParentWrite policy to a client configuration. + * * {@link BatchApplyPolicy} - Applies to {@link Client.batchApply}. + * * {@link BatchReadPolicy} - Applies to {@link Client.batchRead}. + * * {@link BatchRemovePolicy} - Applies to {@link Client.batchRemove}. + * * {@link BatchWritePolicy} - Applies to {@link Client.batchWrite}. + * * {@link CommandQueuePolicy} - Applies to global command queue {@link setupGlobalCommandQueue + * Aerospike.setupGlobalCommandQueue} + * * {@link HLLPolicy} - Applies to {@link hll|HLL} operations + * * {@link InfoPolicy} - Applies to {@link Client.info}, {@link + * Client.infoAny}, {@link Client.infoAll} as well as {@link + * Client.createIndex}, {@link Client.indexRemove}, {@link Client.truncate}, + * {@link Client.udfRegister} and {@link Client.udfRemove}. + * * {@link ListPolicy} - Applies to List operations defined in {@link lists}. + * * {@link MapPolicy} - Applies to Map operations defined in {@link maps}. + * * {@link AdminPolicy} - Applies to {@link Client.changePassword}, {@link Client.changePassword}, + * {@link Client.createUser}, {@link Client.createRole}, {@link Client.dropRole}, {@link Client.dropUser}, + * {@link Client.grantPrivileges}, {@link Client.grantRoles}, {@link Client.queryRole}, + * {@link Client.queryRoles}, {@link Client.queryUser}, {@link Client.queryUsers}, + * {@link Client.revokePrivileges}, {@link Client.revokeRoles}, {@link Client.setQuotas}, + * and {@link Client.setWhitelist}, . + * + * Base policy {@link BasePolicy} class which defines common policy + * values that apply to all database operations + * (except `InfoPolicy`, `AdminPolicy`, `MapPolicy` and `ListPolicy`). + * + * This module also defines global values for the following policy settings: + * + * * {@link policy.commitLevel|commitLevel} - Specifies the + * number of replicas required to be successfully committed before returning + * success in a write operation to provide the desired consistency guarantee. + * * {@link policy.exists|exists} - Specifies the behavior for + * writing the record depending whether or not it exists. + * * {@link policy.gen|gen} - Specifies the behavior of record + * modifications with regard to the generation value. + * * {@link policy.key|key} - Specifies the behavior for + * whether keys or digests should be sent to the cluster. + * * {@link policy.readModeAP|readModeAP} - How duplicates + * should be consulted in a read operation. + * * {@link policy.readModeSC|readModeSC} - Determines SC read + * consistency options. + * * {@link policy.replica|replica} - Specifies which + * partition replica to read from. + * + * @example + * + * const Aerospike = require('aerospike') + * + * const config = { + * hosts: '192.168.33.10:3000' + * } + * + * const key = new Aerospike.Key('test', 'demo', 'k1') + * + * Aerospike.connect(config) + * .then(client => { + * let record = {i: 1234} + * + * // Override policy for put command + * let policy = new Aerospike.policy.WritePolicy({ + * exists: Aerospike.policy.exists.CREATE, + * key: Aerospike.policy.key.SEND, + * socketTimeout: 0, + * totalTimeout: 0 + * }) + * + * return client.put(key, record, {}, policy) + * .then(() => client.close()) + * .catch(error => { + * client.close() + * if (error.code === Aerospike.status.ERR_RECORD_EXISTS) { + * console.info('record already exists') + * } else { + * return Promise.reject(error) + * } + * }) + * }) + * .catch(error => console.error('Error:', error)) + */ +export namespace policy { + + /** + * A policy affecting the behavior of adminstraation operations. + * + * Please note that `AdminPolicy` does not derive from {@link BasePolicy}. + * + * @since v3.0.0 + */ + export class AdminPolicy extends BasePolicy { + /** + * Maximum time in milliseconds to wait for the operation to complete. + * + * @type number + */ + public timeout?: number; + /** + * Initializes a new AdminPolicy from the provided policy values. + * + * @param props - AdminPolicy values + */ + constructor(props?: AdminPolicyOptions); + } + + /** + * Initializes a new ApplyPolicy from the provided policy values. + * + */ + export class ApplyPolicy extends BasePolicy { + /** + * Specifies the number of replicas required to be committed successfully + * when writing before returning transaction succeeded. + * + * @see {@link policy.commitLevel} for supported policy values. + */ + public commitLevel?: policy.commitLevel; + /** + * Specifies whether a {@link + * http://www.aerospike.com/docs/guide/durable_deletes.html|tombstone} + * should be written in place of a record that gets deleted as a result of + * this operation. + * + * @default false (do not tombstone deleted records) + */ + public durableDelete?: boolean; + /** + * Specifies the behavior for the key. + * + * @see {@link policy.key} for supported policy values. + */ + public key?: policy.key; + /** + * The time-to-live (expiration) of the record in seconds. + * + */ + public ttl?: number; + /** + * Initializes a new ApplyPolicy from the provided policy values. + * + * @param props - ApplyPolicy values + */ + constructor(props?: ApplyPolicyOptions); + } + + /** + * Base class for all client policies. The base policy defines general policy + * values that are supported by all client policies, including timeout and + * retry handling. + * Applies to {@link ApplyPolicy}, {@link BatchPolicy}, {@link OperatePolicy}, + * {@link QueryPolicy}, {@link ReadPolicy}, {@link RemovePolicy}, {@link ScanPolicy} and {@link WritePolicy}. + * + * @since v3.0.0 + */ + export class BasePolicy { + /** + * Use zlib compression on write or batch read commands when the command + * buffer size is greater than 128 bytes. In addition, tell the server to + * compress it's response on read commands. The server response compression + * threshold is also 128 bytes. + * + * This option will increase cpu and memory usage (for extra compressed + * buffers), but decrease the size of data sent over the network. + * + * Requires Enterprise Server version >= 4.8. + * + * @default: false + * @since v3.14.0 + */ + public compress?: boolean; + /** + * Optional expression filter. If filter exp exists and evaluates to false, the + * transaction is ignored. This can be used to eliminate a client/server roundtrip + * in some cases. + * + * expression filters can only be applied to the following commands: + * * {@link Client.apply} + * * {@link Client.batchExists} + * * {@link Client.batchGet} + * * {@link Client.batchRead} + * * {@link Client.batchSelect} + * * {@link Client.exists} + * * {@link Client.get} + * * {@link Client.operate} + * * {@link Client.put} + * * {@link Client.remove} + * * {@link Client.select} + */ + public filterExpression?: AerospikeExp; + /** + * Maximum number of retries before aborting the current transaction. + * The initial attempt is not counted as a retry. + * + * If maxRetries is exceeded, the transaction will return + * error {@link statusNamespace.ERR_TIMEOUT|ERR_TIMEOUT}. + * + * WARNING: Database writes that are not idempotent (such as "add") + * should not be retried because the write operation may be performed + * multiple times if the client timed out previous transaction attempts. + * It is important to use a distinct write policy for non-idempotent + * writes which sets maxRetries to zero. + * + * @default: 2 (initial attempt + 2 retries = 3 attempts) + */ + public maxRetries?: number; + /** + * Socket idle timeout in milliseconds when processing a database command. + * + * If socketTimeout is not zero and the socket has been idle + * for at least socketTimeout, both maxRetries + * and totalTimeout are checked. If maxRetries + * and totalTimeout are not exceeded, the transaction is + * retried. + * + * If both socketTimeout and totalTimeout are + * non-zero and socketTimeout > totalTimeout, + * then socketTimeout will be set to + * totalTimeout. If socketTimeout is zero, there + * will be no socket idle limit. + * + * @default 0 (no socket idle time limit). + */ + public socketTimeout?: number; + /** + * Total transaction timeout in milliseconds. + * + * The totalTimeout is tracked on the client and sent to the + * server along with the transaction in the wire protocol. The client will + * most likely timeout first, but the server also has the capability to + * timeout the transaction. + * + * If totalTimeout is not zero and totalTimeout + * is reached before the transaction completes, the transaction will return + * error {@link statusNamespace.ERR_TIMEOUT|ERR_TIMEOUT}. + * If totalTimeout is zero, there will be no total time limit. + * + * @default 1000 + */ + public totalTimeout?: number; + /** + * Initializes a new BasePolicy from the provided policy values. + * + * @param props - BasePolicy values + */ + + constructor(props?: BasePolicyOptions); + } + + + /** + * A policy affecting the behavior of batch apply operations. + * + * @since v5.0.0 + */ + export class BatchApplyPolicy { + /** + * Specifies the number of replicas required to be committed successfully + * when writing before returning transaction succeeded. + * + * @see {@link policy.commitLevel} for supported policy values. + */ + public commitLevel?: policy.commitLevel; + /** + * Specifies whether a {@link + * http://www.aerospike.com/docs/guide/durable_deletes.html|tombstone} + * should be written in place of a record that gets deleted as a result of + * this operation. + * + * @default false (do not tombstone deleted records) + */ + public durableDelete?: boolean; + /** + * Optional expression filter. If filter exp exists and evaluates to false, the + * transaction is ignored. This can be used to eliminate a client/server roundtrip + * in some cases. + */ + public filterExpression?: AerospikeExp; + /** + * Specifies the behavior for the key. + * + * @see {@link policy.key} for supported policy values. + */ + public key?: policy.key; + + /** + * The time-to-live (expiration) of the record in seconds. + */ + public ttl?: number; + /** + * Initializes a new BatchApplyPolicy from the provided policy values. + * + * @param props - BatchApplyPolicy values + */ + constructor(props?: BatchApplyPolicyOptions); + } + + /** + * Initializes a new BatchPolicy from the provided policy values. + * + * @param {Object} [props] - Policy values + */ + export class BatchPolicy extends BasePolicy { + /** + * Read policy for AP (availability) namespaces. + * + * @default Aerospike.policy.readModeAP.ONE + * @see {@link policy.readModeAP} for supported policy values. + */ + public allowInline?: boolean; + /** + * Allow batch to be processed immediately in the server's receiving thread for SSD + * namespaces. If false, the batch will always be processed in separate service threads. + * Server versions < 6.0 ignore this field. + * + * Inline processing can introduce the possibility of unfairness because the server + * can process the entire batch before moving onto the next command. + * + * @default false + */ + public allowInlineSSD?: boolean; + /** + * Determine if batch commands to each server are run in parallel threads. + * + * Values: + * false: Issue batch commands sequentially. This mode has a performance advantage for small + * to medium sized batch sizes because commands can be issued in the main transaction thread. + * This is the default. + * true: Issue batch commands in parallel threads. This mode has a performance + * advantage for large batch sizes because each node can process the command immediately. + * The downside is extra threads will need to be created (or taken from + * a thread pool). + * + * @default false + */ + public concurrent?: boolean; + /** + * Should CDT data types (Lists / Maps) be deserialized to JS data types + * (Arrays / Objects) or returned as raw bytes (Buffer). + * + * @default true + * @since v3.7.0 + */ + public deserialize?: boolean; + /** + * Read policy for AP (availability) namespaces. + * + * @default {@link policy.readModeAP.ONE} + * @see {@link policy.readModeAP} for supported policy values. + */ + public readModeAP?: policy.readModeAP; + /** + * Read policy for SC (strong consistency) namespaces. + * + * @default {@link policy.readModeSC.SESSION} + * @see {@link policy.readModeSC} for supported policy values. + */ + public readModeSC?: policy.readModeSC; + /** + * Determine how record TTL (time to live) is affected on reads. When enabled, the server can + * efficiently operate as a read-based LRU cache where the least recently used records are expired. + * The value is expressed as a percentage of the TTL sent on the most recent write such that a read + * within this interval of the record’s end of life will generate a touch. + * + * For example, if the most recent write had a TTL of 10 hours and read_touch_ttl_percent is set to + * 80, the next read within 8 hours of the record's end of life (equivalent to 2 hours after the most + * recent write) will result in a touch, resetting the TTL to another 10 hours. + * + * @default 0 + */ + public readTouchTtlPercent?: number; + /** + * Algorithm used to determine target node. + * + * @default {@link policy.replica.MASTER} + * @see {@link policy.replica} for supported policy values. + */ + public replica?: policy.replica; + /** + * Should all batch keys be attempted regardless of errors. This field is used on both + * the client and server. The client handles node specific errors and the server handles + * key specific errors. + * + * If true, every batch key is attempted regardless of previous key specific errors. + * Node specific errors such as timeouts stop keys to that node, but keys directed at + * other nodes will continue to be processed. + * + * If false, the server will stop the batch to its node on most key specific errors. + * The exceptions are AEROSPIKE_ERR_RECORD_NOT_FOUND and AEROSPIKE_FILTERED_OUT + * which never stop the batch. The client will stop the entire batch on node specific + * errors for sync commands that are run in sequence (concurrent == false). The client + * will not stop the entire batch for async commands or sync commands run in parallel. + * + * Server versions < 6.0 do not support this field and treat this value as false + * for key specific errors. + * + * @default true + */ + public respondAllKeys?: boolean; + /** + * Send set name field to server for every key in the batch. This is only + * necessary when authentication is enabled and security roles are defined + * on a per-set basis. + * + * @default false + */ + public sendSetName?: boolean; + + /** + * Initializes a new BatchPolicy from the provided policy values. + * + * @param props - BatchPolicy values + */ + constructor(props?: BatchPolicyOptions) + } + + export class BatchReadPolicy { + /** + * Optional expression filter. If filter exp exists and evaluates to false, the + * transaction is ignored. This can be used to eliminate a client/server roundtrip + * in some cases. + */ + public filterExpression?: AerospikeExp; + /** + * Read policy for AP (availability) namespaces. + * + * @default {@link policy.readModeAP.ONE} + * @see {@link policy.readModeAP} for supported policy values. + */ + public readModeAP?: policy.readModeAP; + /** + * Read policy for SC (strong consistency) namespaces. + * + * @default {@link policy.readModeSC.SESSION} + * @see {@link policy.readModeSC} for supported policy values. + */ + public readModeSC?: policy.readModeSC; + /** + * Determine how record TTL (time to live) is affected on reads. When enabled, the server can + * efficiently operate as a read-based LRU cache where the least recently used records are expired. + * The value is expressed as a percentage of the TTL sent on the most recent write such that a read + * within this interval of the record’s end of life will generate a touch. + * + * For example, if the most recent write had a TTL of 10 hours and read_touch_ttl_percent is set to + * 80, the next read within 8 hours of the record's end of life (equivalent to 2 hours after the most + * recent write) will result in a touch, resetting the TTL to another 10 hours. + * + * @default 0 + */ + public readTouchTtlPercent?: number; + /** + * Initializes a new BatchReadPolicy from the provided policy values. + * + * @param props - BatchReadPolicy values + */ + constructor(props?: BatchReadPolicyOptions); + } + + export class BatchRemovePolicy { + /** + * Specifies the number of replicas required to be committed successfully + * when writing before returning transaction succeeded. + * + * @see {@link policy.commitLevel} for supported policy values. + */ + public commitLevel?: policy.commitLevel; + /** + * Specifies whether a {@link + * http://www.aerospike.com/docs/guide/durable_deletes.html|tombstone} + * should be written in place of a record that gets deleted as a result of + * this operation. + * + * @default false (do not tombstone deleted records) + */ + public durableDelete?: boolean; + /** + * Optional expression filter. If filter exp exists and evaluates to false, the + * transaction is ignored. This can be used to eliminate a client/server roundtrip + * in some cases. + * + */ + public filterExpression?: AerospikeExp; + /** + * Specifies the behavior for the generation value. + * + * @see {@link policy.gen} for supported policy values. + */ + public gen?: policy.gen; + /** + * The generation of the record. + */ + public generation?: number; + /** + * Specifies the behavior for the key. + * + * @see {@link policy.key} for supported policy values. + */ + public key?: policy.key; + + /** + * Initializes a new BatchRemovePolicy from the provided policy values. + * + * @param props - BatchRemovePolicy values + */ + constructor(props?: BatchRemovePolicyOptions); + } + + /** + * A policy affecting the behavior of batch write operations. + * + * @since v5.0.0 + */ + export class BatchWritePolicy { + /** + * Specifies the number of replicas required to be committed successfully + * when writing before returning transaction succeeded. + * + * @see {@link policy.commitLevel} for supported policy values. + */ + public commitLevel?: policy.commitLevel; + /** + * Specifies whether a {@link + * http://www.aerospike.com/docs/guide/durable_deletes.html|tombstone} + * should be written in place of a record that gets deleted as a result of + * this operation. + * + * @default false (do not tombstone deleted records) + */ + public durableDelete?: boolean; + /** + * Specifies the behavior for the existence of the record. + * + * @see {@link policy.exists} for supported policy values. + */ + public exists?: policy.exists; + /** + * Optional expression filter. If filter exp exists and evaluates to false, the + * transaction is ignored. This can be used to eliminate a client/server roundtrip + * in some cases. + */ + public filterExpression?: AerospikeExp; + /** + * Specifies the behavior for the generation value. + * + * @see {@link policy.gen} for supported policy values. + */ + public gen?: policy.gen; + /** + * Specifies the behavior for the key. + * + * @see {@link policy.key} for supported policy values. + */ + public key?: policy.key; + /** + * The time-to-live (expiration) of the record in seconds. + */ + public ttl?: number; + /** + * Initializes a new BatchWritePolicy from the provided policy values. + * + * @param props - BatchWritePolicy values + */ + constructor(props?: BatchWritePolicyOptions); + } + + /** + * A policy affecting the behavior of {@link bitwise|bitwise} operations. + * + * @since v3.13.0 + */ + export class BitwisePolicy extends BasePolicy { + /** + * Specifies the behavior when writing byte values. + * + * @default bitwise.writeFlags.DEFAULT + * @see {@link bitwise.writeFlags} for supported policy values. + */ + public writeFlags: bitwise.writeFlags; + /** + * Initializes a new BitwisePolicy from the provided policy values. + * + * @param props - BitwisePolicy values + */ + constructor(props?: BitwisePolicyOptions); + } + + /** + * Policy governing the use of the global command queue. + * + * **Which commands are affected by the command queue?** + * + * Not all client commands use the command queue. Only single-key commands + * (e.g. Put, Get, etc.), the BatchRead, BatchWrite commands, and {@link Query#foreach}, + * {@link Scan#foreach} commands use the command queue (if enabled). + * + * Commands that are based on the Aerospike info protocol (Index + * Create/Remove, UDF Register/Remove, Truncate, Info), the legacy Batch + * Get/Select/Exists commands as well as all other Query and Scan commands do + * not use the command queue and will always be executed immediately. + * + * @see {@link setupGlobalCommandQueue + * Aerospike.setupGlobalCommandQueue} - function used to initialize the global + * command queue. + * + * @example + * + * const Aerospike = require('aerospike') + * + * const policy = { + * maxCommandsInProcess: 50, + * maxCommandsInQueue: 150 + * } + * Aerospike.setupGlobalCommandQueue(policy) + * + * Aerospike.connect() + * .then(client => { + * let commands = [] + * for (var i = 0; i < 100; i++) { + * let cmd = client.put(new Aerospike.Key('test', 'test', i), {i: i}) + * commands.push(cmd) + * } + * + * // First 50 commands will be executed immediately, + * // remaining commands will be queued and executed once the client frees up. + * Promise.all(commands) + * .then(() => console.info('All commands executed successfully')) + * .catch(error => console.error('Error:', error)) + * .then(() => client.close()) + * }) + */ + export class CommandQueuePolicy extends BasePolicy { + /** + * Maximum number of commands that can be processed at any point in time. + * Each executing command requires a socket connection. Consuming too many + * sockets can negatively affect application reliability and performance. + * If you do not limit command count in your application, this setting + * should be used to enforce a limit internally in the client. + * + * If this limit is reached, the next command will be placed on the + * client's command queue for later execution. If this limit is zero, all + * commands will be executed immediately and the command queue will not be + * used. (Note: {@link Config#maxConnsPerNode} may still limit number of + * connections per cluster node.) + * + * If defined, a reasonable value is 40. The optimal value will depend on + * the CPU speed and network bandwidth. + * + * @default 0 (execute all commands immediately) + */ + public maxCommandsInProcess?: number; + /** + * Maximum number of commands that can be stored in the global command + * queue for later execution. Queued commands consume memory, but they do + * not consume sockets. This limit should be defined when it's possible + * that the application executes so many commands that memory could be + * exhausted. + * + * If this limit is reached, the next command will be rejected with error + * code ERR_ASYNC_QUEUE_FULL. If this limit is zero, all + * commands will be accepted into the delay queue. + * + * The optimal value will depend on the application's magnitude of command + * bursts and the amount of memory available to store commands. + * + * @default 0 (no command queue limit) + */ + public maxCommandsInQueue?: number; + /** + * Initial capacity of the command queue. The command queue can resize + * beyond this initial capacity. + * + * @default 256 (if command queue is used) + */ + public queueInitialCapacity?: number; + /** + * Initializes a new CommandQueuePolicy from the provided policy values. + * + * @param props - CommandQueuePolicy values + */ + constructor(props?: CommandQueuePolicyOptions); + } + + /** + * A policy affecting the behavior of {@link hll|HLL} operations. + * + * @since v3.16.0 + */ + export class HLLPolicy extends BasePolicy { + /** + * Specifies the behavior when writing byte values. + * + * @default hll.writeFlags.DEFAULT + * @see {@link hll.writeFlags} for supported policy values. + */ + public writeFlags: hll.writeFlags; + /** + * Initializes a new HLLPolicy from the provided policy values. + * + * @param props - HLLPolicy values + */ + constructor(props?: HLLPolicyOptions); + } + + /** + * A policy affecting the behavior of info operations. + * + * Please note that `InfoPolicy` does not derive from {@link BasePolicy} and that + * info commands do not support automatic retry. + * + * @since v3.0.0 + */ + export class InfoPolicy extends BasePolicy { + /** + * Ensure the request is within allowable size limits. + */ + public checkBounds?: boolean; + /** + * Send request without any further processing. + */ + public sendAsIs?: boolean; + /** + * Maximum time in milliseconds to wait for the operation to complete. + */ + public timeout?: number + /** + * Initializes a new InfoPolicy from the provided policy values. + * + * @param props - InfoPolicy values + */ + constructor(props?: InfoPolicyOptions); + } + + /** + * A policy affecting the behavior of list operations. + * + * @since v3.3.0 + */ + export class ListPolicy extends BasePolicy { + /** + * Sort order for the list. + * + * @type number + * @default {@ link lists.order.UNORDERED} + * @see {@link lists.order} for supported policy values. + */ + public order?: lists.order; + /** + * Specifies the behavior when replacing or inserting list items. + * + * @type number + * @default {@link lists.writeFlags.DEFAULT} + * @see {@link lists.writeFlags} for supported policy values. + */ + public writeFlags?: lists.writeFlags; + /** + * Initializes a new ListPolicy from the provided policy values. + * + * @param props - ListPolicy values + */ + constructor(props?: ListPolicyOptions); + } + /** + * A policy affecting the behavior of map operations. + * + * @since v3.0.0 + */ + export class MapPolicy extends BasePolicy { + /** + * Specifies the behavior when replacing or inserting map items. + * + * Map write flags require server version v4.3 or later. For earier server + * versions, set the {@link MapPolicy.writeMode|writeMode} instead. + * + * @default {@link maps.writeFlags.DEFAULT} + * @see {@link maps.writeFlags} for supported policy values. + * @since v3.5 + */ + public order?: maps.order; + /** + * Specifies the behavior when replacing or inserting map items. + * + * Map write flags require server version v4.3 or later. For earier server + * versions, set the {@link MapPolicy.writeMode|writeMode} instead. + * + * @default {@link maps.writeFlags.DEFAULT} + * @see {@link maps.writeFlags} for supported policy values. + * @since v3.5 + */ + public writeFlags?: maps.writeFlags; + /** + * Specifies the behavior when replacing or inserting map items. + * + * Map write mode should only be used for server versions prior to v4.3. + * For server versions v4.3 or later, the use of {@link + * MapPolicy.writeFlags | writeFlags} is recommended. + * + * @default {@link maps.writeMode.UPDATE} + * @see {@link maps.writeMode} for supported policy values. + * @deprecated since v3.5 + */ + public writeMode?: maps.writeMode; + + /** + * Initializes a new MapPolicy from the provided policy values. + * + * @param props - MapPolicy values + */ + constructor(props?: MapPolicyOptions); + } + + /** + * A policy affecting the behavior of operate operations. + * + * @since v3.0.0 + */ + export class OperatePolicy extends BasePolicy { + /** + * Specifies the number of replicas required to be committed successfully + * when writing before returning transaction succeeded. + * + * @see {@link policy.commitLevel} for supported policy values. + */ + public commitLevel?: policy.commitLevel; + /** + * Should CDT data types (Lists / Maps) be deserialized to JS data types + * (Arrays / Objects) or returned as raw bytes (Buffer). + * + * @default true + * @since v3.7.0 + */ + public deserialize?: boolean; + /** + * Specifies whether a {@link + * http://www.aerospike.com/docs/guide/durable_deletes.html|tombstone} + * should be written in place of a record that gets deleted as a result of + * this operation. + * + * @default false (do not tombstone deleted records) + */ + public durableDelete?: boolean; + /** + * Specifies the behavior for the existence of the record. + * + * @see {@link policy.exists} for supported policy values. + */ + public exists?: policy.exists; + /** + * Specifies the behavior for the generation value. + * + * @see {@link policy.gen} for supported policy values. + */ + public gen?: policy.gen; + /** + * Specifies the behavior for the key. + * + * @see {@link policy.key} for supported policy values. + */ + public key?: policy.key; + /** + * Read policy for AP (availability) namespaces. + * + * @default Aerospike.policy.readModeAP.ONE + * @see {@link policy.readModeAP} for supported policy values. + */ + public readModeAP?: policy.readModeAP; + /** + * Read policy for SC (strong consistency) namespaces. + * + * @default Aerospike.policy.readModeSC.SESSION + * @see {@link policy.readModeSC} for supported policy values. + */ + public readModeSC?: policy.readModeSC; + /** + * Determine how record TTL (time to live) is affected on reads. When enabled, the server can + * efficiently operate as a read-based LRU cache where the least recently used records are expired. + * The value is expressed as a percentage of the TTL sent on the most recent write such that a read + * within this interval of the record’s end of life will generate a touch. + * + * For example, if the most recent write had a TTL of 10 hours and read_touch_ttl_percent is set to + * 80, the next read within 8 hours of the record's end of life (equivalent to 2 hours after the most + * recent write) will result in a touch, resetting the TTL to another 10 hours. + * + * @default 0 + */ + public readTouchTtlPercent?: number; + /** + * Specifies the replica to be consulted for the read operation. + * + * @see {@link policy.replica} for supported policy values. + */ + public replica?: policy.replica; + /** + * Initializes a new OperatePolicy from the provided policy values. + * + * @param props - OperatePolicy values + */ + constructor(props?: OperatePolicyOptions); + } + + + /** + * A policy affecting the behavior of query operations. + * + * @since v3.0.0 + */ + export class QueryPolicy extends BasePolicy { + /** + * Should CDT data types (Lists / Maps) be deserialized to JS data types + * (Arrays / Objects) or returned as raw bytes (Buffer). + * + * @default true + * @since v3.7.0 + */ + public deserialize?: boolean; + /** + * Expected query duration. The server treats the query in different ways depending on the expected duration. + * This field is ignored for aggregation queries, background queries and server versions < 6.0. + * + * @see {@link policy.queryDuration} for supported policy values. + * @default {@link policy.queryDuration.LONG} + */ + public expectedDuration?: policy.queryDuration; + /** + * Terminate the query if the cluster is in migration state. If the query's + * "where" clause is not defined (scan), this field is ignored. + * + * Requires Aerospike Server version 4.2.0.2 or later. + * + * @default false + * @since v3.4.0 + */ + public failOnClusterChange?: boolean; + /** + * Timeout in milliseconds used when the client sends info commands to + * check for cluster changes before and after the query. This timeout is + * only used when {@link + * QueryPolicy.failOnClusterChange | failOnClusterChange} is true and the + * query's "where" clause is defined. + * + * @default 10000 ms + * @since v3.16.5 + */ + public infoTimeout?: number; + /** + * Specifies the replica to be consulted for the query operation. + * + * @see {@link policy.replica} for supported policy values. + */ + public replica?: policy.replica; + /** + * Total transaction timeout in milliseconds. + * + * The totalTimeout is tracked on the client and sent to the + * server along with the transaction in the wire protocol. The client will + * most likely timeout first, but the server also has the capability to + * timeout the transaction. + * + * If totalTimeout is not zero and totalTimeout + * is reached before the transaction completes, the transaction will return + * error {@link status.ERR_TIMEOUT | ERR_TIMEOUT}. + * If totalTimeout is zero, there will be no total time limit. + * + * @default 0 + * @override + */ + public totalTimeout?: number; + /** + * Initializes a new OperatePolicy from the provided policy values. + * + * @param props - OperatePolicy values + */ + constructor(props?: QueryPolicyOptions); + } + + /** + * A policy affecting the behavior of read operations. + * + * @since v3.0.0 + */ + export class ReadPolicy extends BasePolicy { + /** + * Should CDT data types (Lists / Maps) be deserialized to JS data types + * (Arrays / Objects) or returned as raw bytes (Buffer). + * + * @type boolean + * @default true + * @since v3.7.0 + */ + public deserialize?: boolean; + /** + * Specifies the behavior for the key. + * + * @type number + * @see {@link policy.key} for supported policy values. + */ + public key?: policy.key; + /** + * Read policy for AP (availability) namespaces. + * + * @type number + * @default Aerospike.policy.readModeAP.ONE + * @see {@link policy.readModeAP} for supported policy values. + */ + public readModeAP?: policy.readModeAP; + + /** + * Read policy for SC (strong consistency) namespaces. + * + * @type number + * @default Aerospike.policy.readModeSC.SESSION + * @see {@link policy.readModeSC} for supported policy values. + */ + public readModeSC?: policy.readModeSC; + /** + * Determine how record TTL (time to live) is affected on reads. When enabled, the server can + * efficiently operate as a read-based LRU cache where the least recently used records are expired. + * The value is expressed as a percentage of the TTL sent on the most recent write such that a read + * within this interval of the record’s end of life will generate a touch. + * + * For example, if the most recent write had a TTL of 10 hours and read_touch_ttl_percent is set to + * 80, the next read within 8 hours of the record's end of life (equivalent to 2 hours after the most + * recent write) will result in a touch, resetting the TTL to another 10 hours. + * * + * @type number + * @default 0 + */ + public readTouchTtlPercent?: number; + /** + * Specifies the replica to be consulted for the read operation. + * + * @type number + * @see {@link policy.replica} for supported policy values. + */ + public replica?: policy.replica; + + /** + * Initializes a new ReadPolicy from the provided policy values. + * + * @param props - ReadPolicy values + */ + constructor(props?: ReadPolicyOptions); + } + + /** + * A policy affecting the behavior of remove operations. + * + * @since v3.0.0 + */ + export class RemovePolicy extends BasePolicy { + /** + * Specifies the number of replicas required to be committed successfully + * when writing before returning transaction succeeded. + * + * @see {@link policy.commitLevel} for supported policy values. + */ + public commitLevel?: policy.commitLevel; + /** + * Specifies whether a {@link + * http://www.aerospike.com/docs/guide/durable_deletes.html|tombstone} + * should be written in place of a record that gets deleted as a result of + * this operation. + * + * @default false (do not tombstone deleted records) + */ + public durableDelete?: boolean; + /** + * Specifies the behavior for the generation value. + * + * @see {@link policy.gen} for supported policy values. + */ + public gen?: policy.gen; + /** + * The generation of the record. + */ + public generation?: number; + /** + * Specifies the behavior for the key. + * + * @see {@link policy.key} for supported policy values. + */ + public key?: policy.key; + /** + * Initializes a new RemovePolicy from the provided policy values. + * + * @param props - RemovePolicy values + */ + constructor(props?: RemovePolicyOptions); + } + + /** + * A policy affecting the behavior of scan operations. + * + * @since v3.0.0 + */ + export class ScanPolicy extends BasePolicy { + /** + * Specifies whether a {@link + * http://www.aerospike.com/docs/guide/durable_deletes.html|tombstone} + * should be written in place of a record that gets deleted as a result of + * this operation. + * + * @default false (do not tombstone deleted records) + */ + public durableDelete?: boolean; + /** + * Approximate number of records to return to client. This number is + * divided by the number of nodes involved in the scan. The actual number + * of records returned may be less than maxRecords if node record counts + * are small and unbalanced across nodes. + * + * Requires server >= 4.9. + * + * @default 0 (do not limit record count) + * + * @since v3.16.0 + */ + public maxRecords?: number; + /** + * Limit returned records per second (RPS) rate for each server. Do not + * apply RPS limit if recordsPerSecond is zero. + * + * Requires server >= 4.7. + * + * @default 0 + * + * @since v3.14.0 + */ + public recordsPerSecond?: number; + /** + * Specifies the replica to be consulted for the scan operation. + * + * @see {@link policy.replica} for supported policy values. + */ + public replica?: policy.replica; + /** + * Total transaction timeout in milliseconds. + * + * The totalTimeout is tracked on the client and sent to the + * server along with the transaction in the wire protocol. The client will + * most likely timeout first, but the server also has the capability to + * timeout the transaction. + * + * If totalTimeout is not zero and totalTimeout + * is reached before the transaction completes, the transaction will return + * error {@link status.ERR_TIMEOUT | ERR_TIMEOUT}. + * If totalTimeout is zero, there will be no total time limit. + * + * @default 0 + * @override + */ + public totalTimeout?: number; + /** + * Initializes a new ScanPoliy from the provided policy values. + * + * @param props - ScanPolicy values + */ + constructor(props?: ScanPolicyOptions); + } + + + /** + * A policy affecting the behavior of write operations. + * + * @since v3.0.0 + */ + export class WritePolicy extends BasePolicy { + /** + * Specifies the number of replicas required to be committed successfully + * when writing before returning transaction succeeded. + * + * @see {@link policy.commitLevel} for supported policy values. + */ + public commitLevel?: policy.commitLevel; + + /** + * Minimum record size beyond which it is compressed and sent to the + * server. + */ + public compressionThreshold?: number; + /** + * Specifies whether a {@link + * http://www.aerospike.com/docs/guide/durable_deletes.html|tombstone} + * should be written in place of a record that gets deleted as a result of + * this operation. + * + * @default false (do not tombstone deleted records) + */ + public durableDelete?: boolean; + /** + * Specifies the behavior for the existence of the record. + * + * @see {@link policy.exists} for supported policy values. + */ + public exists?: policy.exists; + /** + * Specifies the behavior for the generation value. + * + * @see {@link policy.gen} for supported policy values. + */ + public gen?: policy.gen; + /** + * Specifies the behavior for the key. + * + * @see {@link policy.key} for supported policy values. + */ + public key?: policy.key; + + /** + * Initializes a new WritePolicy from the provided policy values. + * + * @param props - WritePolicy values + */ + constructor(props?: WritePolicyOptions); + } + + /** + * Represents any valid policy type in the Aerospike CLinet. + */ + export type AnyPolicy = BasePolicy | ApplyPolicy | BatchPolicy | OperatePolicy | QueryPolicy | ReadPolicy | RemovePolicy | ScanPolicy | WritePolicy | BatchReadPolicy | BatchRemovePolicy | BatchWritePolicy | BatchApplyPolicy | CommandQueuePolicy | HLLPolicy | InfoPolicy | ListPolicy | MapPolicy; + + /** + * Specifies the number of replicas required to be successfully committed + * before returning success in a write operation to provide the desired + * consistency guarantee. + */ + export enum commitLevel { + /** + * Return success only after successfully committing all replicas. + */ + ALL, + /** + * Return success after successfully committing the master replica. + */ + MASTER + } + /** + * Specifies the behavior for writing the record depending whether or not it + * exists. + */ + export enum exists { + /** + * Write the record, regardless of existence. (I.e. create or update.) + */ + IGNORE, + /** + * Create a record, ONLY if it doesn't exist. + */ + CREATE, + /** + * Update a record, ONLY if it exists. + */ + UPDATE, + /** + * Completely replace a record, ONLY if it exists. + */ + REPLACE, + /** + * Completely replace a record if it exists, otherwise create it. + */ + CREATE_OR_REPLACE + } + /** + * The generation policy specifies how to handle record writes based + * on record generation. + * + * @remarks To use the EQ or GT generation policy + * (see below), the generation value to use for the comparison needs to be + * specified in the metadata parameter (meta) of the {@link + * Client#put} operation. + * + * + * @example Update record, only if generation matches + * + * const Aerospike = require('aerospike') + * const key = new Aerospike.Key('test', 'test', 'myKey') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * Aerospike.connect(config).then(async (client) => { + * await client.put(key, { foo: 'bar' }) + * + * const record = await client.get(key) + * const gen = record.gen // Current generation of the record. (1 for new record.) + * // Perform some operation using record. Some other process might update the + * // record in the meantime, which would change the generation value. + * if (Math.random() < 0.1) await client.put(key, { foo: 'fox' }) + * + * try { + * // Update record only if generation is still the same. + * const meta = { gen } + * const policy = { gen: Aerospike.policy.gen.EQ } + * await client.put(key, { status: 'updated' }, meta, policy) + * console.log('Record updated successfully.') + * } catch (error) { + * if (error.code == Aerospike.status.ERR_RECORD_GENERATION) { + * console.error('Failed to update record, because generation did not match.') + * } + * } + * + * client.close() + * }) + */ + export enum gen { + /** + * Do not use record generation to restrict writes. + */ + IGNORE, + /** + * Update/delete record if expected generation is equal to + * server generation. Otherwise, fail. + */ + + EQ, + /** + * Update/delete record if expected generation greater than the + * server generation. Otherwise, fail. This is useful for restore after backup. + */ + GT + } + /** + * Specifies the behavior for whether keys or digests should be sent to the + * cluster. + */ + export enum key { + /** + * Send the digest value of the key. This is the recommended + * mode of operation. This calculates the digest and sends the digest to the + * server. The digest is only calculated on the client, and not the server. + */ + DIGEST, + /** + * Send the key, in addition to the digest value. If you want + * keys to be returned when scanning or querying, the keys must be stored on + * the server. This policy causes a write operation to store the key. Once the + * key is stored, the server will keep it - there is no need to use this policy + * on subsequent updates of the record. If this policy is used on read or + * delete operations, or on subsequent updates of a record with a stored key, + * the key sent will be compared with the key stored on the server. A mismatch + * will cause ERR_RECORD_KEY_MISMATCH to be returned. + */ + SEND + } + /** + * The {@link policy.queryDuration|aerospike/policy.query_duration} + * module contains a list of query duration enumerations. + * + * {@link policy.queryDuration|aerospike/policy.query_duration} module + */ + export enum queryDuration { + /** + * The query is expected to return more than 100 records per node. The server optimizes for a + * large record set in the following ways: + *
    + *
  • Allow query to be run in multiple threads using the server's query threading configuration.
  • + *
  • Do not relax read consistency for AP namespaces.
  • + *
  • Add the query to the server's query monitor.
  • + *
  • Do not add the overall latency to the server's latency histogram.
  • + *
  • Do not allow server timeouts.
  • + *
+ */ + LONG, + /** + * The query is expected to return less than 100 records per node. The server optimizes for a + * small record set in the following ways: + *
    + *
  • Always run the query in one thread and ignore the server's query threading configuration.
  • + *
  • Allow query to be inlined directly on the server's service thread.
  • + *
  • Relax read consistency for AP namespaces.
  • + *
  • Do not add the query to the server's query monitor.
  • + *
  • Add the overall latency to the server's latency histogram.
  • + *
  • Allow server timeouts. The default server timeout for a short query is 1 second.
  • + *
+ */ + SHORT, + /** + * Treat query as a LONG query, but relax read consistency for AP namespaces. + * This value is treated exactly like LONG for server versions < 7.1. + */ + LONG_RELAX_AP + } + /** + * Read policy for SC (strong consistency) namespaces. + * + * @remarks Determines SC read consistency options. + * + * @property SESSION - Ensures this client will only see an increasing sequence + * of record versions. Server only reads from master. This is the default. + * @property LINEARIZE - Ensures ALL clients will only see an increasing + * sequence of record versions. Server only reads from master. + * @property ALLOW_REPLICA - Server may read from master or any full + * (non-migrating) replica. Increasing sequence of record versions is not + * guaranteed. + * @property ALLOW_UNAVAILABLE - Server may read from master or any full + * (non-migrating) replica or from unavailable partitions. Increasing sequence + * of record versions is not guaranteed. + */ + export enum replica { + /** + * Ensures this client will only see an increasing sequence + * of record versions. Server only reads from master. This is the default. + */ + MASTER, + /** + * Ensures ALL clients will only see an increasing + * sequence of record versions. Server only reads from master. + */ + ANY, + /** + * Server may read from master or any full + * (non-migrating) replica. Increasing sequence of record versions is not + * guaranteed. + */ + SEQUENCE, + /** + * Server may read from master or any full + * (non-migrating) replica or from unavailable partitions. Increasing sequence + * of record versions is not guaranteed. + */ + PREFER_RACK + } + + /** + * Read policy for AP (availability) namespaces. + * + * @remarks How duplicates should be consulted in a read operation. + * Only makes a difference during migrations and only applicable in AP mode. + * + */ + export enum readModeAP { + /** + * Involve a single node in the read operation. + */ + ONE, + /** + * Involve all duplicates in the read operation. + */ + ALL + } + /** + * Read policy for SC (strong consistency) namespaces. + * + * @remarks Determines SC read consistency options. + */ + export enum readModeSC { + /** + * Ensures this client will only see an increasing sequence + * of record versions. Server only reads from master. This is the default. + */ + SESSION, + /** + * Ensures ALL clients will only see an increasing + * sequence of record versions. Server only reads from master. + */ + LINEARIZE, + /** + * Server may read from master or any full + * (non-migrating) replica. Increasing sequence of record versions is not + * guaranteed. + */ + ALLOW_REPLICA, + /** + * Server may read from master or any full + * (non-migrating) replica or from unavailable partitions. Increasing sequence + * of record versions is not guaranteed. + */ + ALLOW_UNAVAILABLE + } + + export function createPolicy(type: string, values: AnyPolicy): AnyPolicy; +} + +/** + * The Aerospike Node.js client enables you to build an application in Node.js or Typescript with an Aerospike cluster as its database. + * The client manages the connections to the cluster and handles the transactions performed against it. + */ +export class Client extends EventEmitter { + /** + * A copy of the configuration with which the client was initialized. + */ + public config: Config; + /** + * Add-on C++ client for internal use only. + */ + private as_client: any; + /** + * Describes connection status. + */ + private connected: boolean; + /** + * + * Set to true to enable capturing of debug stacktraces for + * every database command. + * + * @remarks The client will capture a stacktrace before each database + * command is executed, instead of capturing the stacktrace only when an + * error is raised. This generally results in much more useful stacktraces + * that include stackframes from the calling application issuing the database + * command. + * + * **Note:** Enabling this feature incurs a significant performance overhead for + * every database command. It is recommended to leave this feature disabled + * in production environments. + * + * By default, the client will set this flag to true, if the + * AEROSPIKE_DEBUG_STACKTRACES environment variable is set (to + * any value). + * + * @type {boolean} + * @default true, if + * process.env.AEROSPIKE_DEBUG_STACKTRACES is set; + * false otherwise. + */ + public captureStackTraces: boolean; + /** + * Construct a new Aerospike client instance. + * + * @param config - Configuration used to initialize the client. + */ + constructor(config: ConfigOptions); + /** + * @hidden + */ + private asExec(cmd: string, args?: any): any; + /** + * Returns a list of all cluster nodes known to the client. + * + * @return List of node objects + * + * @since v2.6.0 + * + * @example + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * } + * + * Aerospike.connect(config, (error, client) => { + * if (error) throw error + * console.log(client.getNodes()) // [ { name: 'SAMPLEADDRESS', address: 'SAMPLENAME' }, ...] + * client.close() + * }) + * + */ + public getNodes(): Node[]; + /** + * Adds a seed host to the cluster. + * + * @param hostname - Hostname/IP address of the new seed host + * @param port - Port number; defaults to {@link Config#port} or 3000. + * + * @since v2.6.0 + */ + public addSeedHost(hostname: string, port?: number): void; + /** + * Apply UDF (user defined function) on multiple keys. + * + * @remarks + * + * This method allows multiple sub-commands for each key in the batch. + * This method requires server >= 6.0.0. + * + * + * + * @param keys - An array of keys, used to locate the records in the cluster. + * @param udf - Server UDF module/function and argList to apply. + * @param batchPolicy - The Batch Policy to use for this operation. + * @param batchApplyPolicy - UDF policy configuration parameters. + * + * @returns A Promise that resolves to the results of the batch operation. + * + * + * + * @since v5.0.0 + * + * @example Simple batchApply example + * + * const Aerospike = require('aerospike') + * var path = require('path'); + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * const config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * batch : new Aerospike.BatchPolicy({socketTimeout : 0, totalTimeout : 0}), + * } + * } + * + * // This must be a path to a UDF file + * const scriptLocation = path.join(__dirname, 'udf-list.lua') + * + * ;(async () => { + * // Establishes a connection to the server + * let client = await Aerospike.connect(config); + * + * // Place some records for demonstration + * await client.put(new Aerospike.Key('test', 'demo', 'key1'), {example: 30}) + * await client.put(new Aerospike.Key('test', 'demo', 'key2'), {example: 35}) + * await client.udfRegister(scriptLocation) + * + * // Execute the UDF + * let batchResult = await client.batchApply([new Aerospike.Key('test', 'demo', 'key1'), new Aerospike.Key('test', 'demo', 'key2')], + * { + * module: 'udf-list', + * funcname: 'updateRecord', + * args: ['example', 45] + * } + * ); + * + * // Access the records + * batchResult.forEach(result => { + * // Do something + * console.info("New value of example bin is %o \n", result.record.bins.SUCCESS); + * }); + * + * //Additional verfication + * let result = await client.get(new Aerospike.Key('test', 'demo', 'key1')) + * console.log(result.bins) // { example: 45 } + * result = await client.get(new Aerospike.Key('test', 'demo', 'key2')) + * console.log(result.bins) // { example: 45 } + * + * // Close the connection to the server + * await client.close(); + * })(); * + * + * @example Simple lua script to be used in example above + * + * function updateRecord(rec, binName, binValue) + * rec[binName] = binValue + * aerospike:update(rec) + * return binValue + * end + */ + public batchApply(keys: KeyOptions[], udf: UDF, batchPolicy?: policy.BatchPolicy | null, batchApplyPolicy?: policy.BatchApplyPolicy | null): Promise; + + /** + * @param keys - An array of keys, used to locate the records in the cluster. + * @param udf - Server UDF module/function and argList to apply. + * @param callback - The function to call when + * the operation completes. Includes the results of the batch operation. + * + */ + public batchApply(keys: KeyOptions[], udf: UDF, callback?: TypedCallback): void; + /** + * @param keys - An array of keys, used to locate the records in the cluster. + * @param udf - Server UDF module/function and argList to apply. + * @param batchPolicy - The Batch Policy to use for this operation. + * @param callback - The function to call when + * the operation completes, with the results of the batch operation. + * + */ + public batchApply(keys: KeyOptions[], udf: UDF, batchPolicy?: policy.BatchPolicy, callback?: TypedCallback): void; + /** + * @param keys - An array of keys, used to locate the records in the cluster. + * @param udf - Server UDF module/function and argList to apply. + * @param batchPolicy - The Batch Policy to use for this operation. + * @param batchApplyPolicy - UDF policy configuration parameters. + * @param callback - The function to call when + * the operation completes, with the results of the batch operation. + * + */ + public batchApply(keys: KeyOptions[], udf: UDF, batchPolicy?: policy.BatchPolicy, batchApplyPolicy?: policy.BatchApplyPolicy, callback?: TypedCallback): void; + + /** + * Checks the existence of a batch of records from the database cluster. + * + * @param keys - An array of Keys used to locate the records in the cluster. + * @param policy - The Batch Policy to use for this operation. + * + * @returns A Promise that resolves to the results of the batch operation. + * + * @deprecated since v2.0 - use {@link Client#batchRead} instead. + * + * @example + * + * const Aerospike = require('aerospike') + * const Key = Aerospike.Key + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * batch : new Aerospike.BatchPolicy({socketTimeout : 0, totalTimeout : 0}), + * } + * } + * + * var keys = [ + * new Key('test', 'demo', 'key1'), + * new Key('test', 'demo', 'key2'), + * new Key('test', 'demo', 'key3') + * ] + * + * ;(async () => { + * // Establishes a connection to the server + * let client = await Aerospike.connect(config); + * + * // Place some records for demonstration + * await client.put(keys[0], {example: 30}) + * await client.put(keys[1], {example: 35}) + * await client.put(keys[2], {example: 40}) + * + * let results = await client.batchExists(keys) + * results.forEach((result) => { + * switch (result.status) { + * case Aerospike.status.OK: + * console.log("Record found") + * break + * case Aerospike.status.ERR_RECORD_NOT_FOUND: + * console.log("Record not found") + * break + * default: + * // error while reading record + * console.log("Other error") + * break + * } + * }) + * + * // Close the connection to the server + * await client.close(); + * })(); + * + * + */ + public batchExists(keys: KeyOptions[], policy?: policy.BatchPolicy | null): Promise; + /** + * @param keys - An array of Keys used to locate the records in the cluster. + * @param callback - The function to call when + * the operation completes, with the results of the batch operation. + */ + public batchExists(keys: KeyOptions[], callback: TypedCallback): void; + /** + * @param keys - An array of Keys used to locate the records in the cluster. + * @param policy - The Batch Policy to use for this operation. + * @param callback - The function to call when + * the operation completes, with the results of the batch operation. + */ + public batchExists(keys: KeyOptions[], policy: policy.BatchPolicy | null , callback: TypedCallback): void; + + /** + * + * Read multiple records for specified batch keys in one batch call. + * + * @remarks + * + * This method allows different namespaces/bins to be requested for each key in + * the batch. This method requires server >= 3.6.0. + * + * @param records - List of {@link BatchReadRecord} instances which each contain keys and bins to retrieve. + * @param policy - The Batch Policy to use for this operation. + * + * @returns {?Promise} - A Promise that resolves to the results of the batch operation. + * + * @since v2.0 + * + * @example + * + * const Aerospike = require('aerospike') + * const batchType = Aerospike.batchType + * const op = Aerospike.operations + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * batch : new Aerospike.BatchPolicy({socketTimeout : 0, totalTimeout : 0}), + * } + * } + * + * var batchRecords = [ + * { type: batchType.BATCH_READ, + * key: new Aerospike.Key('test', 'demo', 'key1'), bins: ['example'] }, + * { type: batchType.BATCH_READ, + * key: new Aerospike.Key('test', 'demo', 'key2'), readAllBins: true }, + * { type: batchType.BATCH_READ, + * key: new Aerospike.Key('test', 'demo', 'key3'), + * ops:[ + * op.read('example') + * ]}, + * { type: batchType.BATCH_READ, + * key: new Aerospike.Key('test', 'demo', 'key4')} + * ] + * + * + * ;(async () => { + * // Establishes a connection to the server + * let client = await Aerospike.connect(config); + * + * // Place some records for demonstration + * await client.put(batchRecords[0].key, {example: 30}) + * await client.put(batchRecords[1].key, {example: 35}) + * await client.put(batchRecords[2].key, {example: 40}) + * await client.put(batchRecords[3].key, {example: 45}) + * + * let results = await client.batchRead(batchRecords) + * results.forEach((result) => { + * + * switch (result.status) { + * case Aerospike.status.OK: + * console.log("Record found") + * // Since the fourth record didn't specify bins to read, + * // the fourth record will return no bins, eventhough the batchRead succeeded. + * console.log(result.record.bins) + * break + * case Aerospike.status.ERR_RECORD_NOT_FOUND: + * console.log("Record not found") + * break + * default: + * // error while reading record + * console.log("Other error") + * break + * } + * }) + * // Close the connection to the server + * await client.close(); + * })(); + */ + public batchRead(records: BatchReadRecord[], policy?: policy.BatchPolicy): Promise; + /** + * @param records - List of {@link BatchReadRecord} instances which each contain keys and bins to retrieve. + * @param callback - The function to call when + * the operation completes, with the results of the batch operation. + */ + public batchRead(records: BatchReadRecord[], callback?: TypedCallback): void; + /** + * @param records - List of {@link BatchReadRecord} instances which each contain keys and bins to retrieve. + * @param policy - The Batch Policy to use for this operation. + * @param callback - The function to call when + * the operation completes, with the results of the batch operation. + */ + public batchRead(records: BatchReadRecord[], policy?: policy.BatchPolicy | null, callback?: TypedCallback): void; + /** + * + * Reads a batch of records from the database cluster. + * + * @param keys - An array of {@link Key | Keys}, used to locate the records in the cluster. + * @param policy - The Batch Policy to use for this operation. + * + * @returns {?Promise} - A Promise that resolves to the results of the batch operation. + * + * @deprecated since v2.0 - use {@link Client#batchRead} instead. + * + * @example + * + * const Aerospike = require('aerospike') + * const Key = Aerospike.Key + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * batch : new Aerospike.BatchPolicy({socketTimeout : 0, totalTimeout : 0}), + * } + * } + * + * var keys = [ + * new Key('test', 'demo', 'key1'), + * new Key('test', 'demo', 'key2'), + * new Key('test', 'demo', 'key3') + * ] + * + * ;(async () => { + * // Establishes a connection to the server + * let client = await Aerospike.connect(config); + * + * // Place some records for demonstration + * await client.put(keys[0], {example: 30}) + * await client.put(keys[1], {example: 35}) + * await client.put(keys[2], {example: 40}) + * + * let results = await client.batchGet(keys) + * results.forEach((result) => { + * switch (result.status) { + * case Aerospike.status.OK: + * console.log("Record found") + * break + * case Aerospike.status.ERR_RECORD_NOT_FOUND: + * console.log("Record not found") + * break + * default: + * // error while reading record + * console.log("Other error") + * break + * } + * }) + * + * // Close the connection to the server + * await client.close(); + * })(); + * + */ + public batchGet(keys: KeyOptions[], policy?: policy.BatchPolicy | null): Promise; + /** + * + * @param keys - An array of {@link Key | Keys}, used to locate the records in the cluster. + * @param callback - The function to call when + * the operation completes, with the results of the batch operation. + */ + public batchGet(keys: KeyOptions[], callback: TypedCallback): void; + /** + * + * @param keys - An array of {@link Key | Keys}, used to locate the records in the cluster. + * @param policy - The Batch Policy to use for this operation. + * @param callback - The function to call when + * the operation completes, with the results of the batch operation. + */ + public batchGet(keys: KeyOptions[], policy: policy.BatchPolicy | null, callback: TypedCallback): void; + /** + * Remove multiple records. + * + * @remarks + * + * This method removes the specified records from the database. + * This method requires server >= 6.0.0. + * + * @param keys - {@link Key} An array of keys, used to locate the records in the cluster. + * @param batchPolicy - The Batch Policy to use for this operation. + * @param batchRemovePolicy Remove policy configuration parameters. + * + * @returns A Promise that resolves to the results of the batch operation. + * + * @since v5.0.0 + * + * @example + * + * const Aerospike = require('aerospike') + * const batchType = Aerospike.batchType + * const exp = Aerospike.exp + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * batch : new Aerospike.BatchPolicy({socketTimeout : 0, totalTimeout : 0}), + * } + * } + * + * var keys = [ + * new Aerospike.Key('test', 'demo', 'key1'), + * new Aerospike.Key('test', 'demo', 'key2'), + * new Aerospike.Key('test', 'demo', 'key3') + * ] + * + * + * ;(async () => { + * // Establishes a connection to the server + * let client = await Aerospike.connect(config); + * + * // Place some records for demonstration + * await client.put(keys[0], {example: 30}) + * await client.put(keys[1], {example: 35}) + * + * let results = await client.batchRemove(keys) + * results.forEach((result) => { + * switch (result.status) { + * case Aerospike.status.OK: + * console.log("Record deleted") + * break + * case Aerospike.status.ERR_RECORD_NOT_FOUND: + * console.log("Record not found") + * break + * default: + * // error while reading record + * console.log("Other error") + * break + * } + * }) + * // Close the connection to the server + * await client.close(); + * })(); + */ + public batchRemove(keys: KeyOptions[], batchPolicy?: policy.BatchPolicy | null, batchRemovePolicy?: policy.BatchRemovePolicy | null): Promise; + /** + * @param keys - {@link Key} An array of keys, used to locate the records in the cluster. + * @param callback - The function to call when + * the operation completes, with the results of the batch operation. + */ + public batchRemove(keys: KeyOptions[], callback?: TypedCallback): void; + /** + * @param keys - {@link Key} An array of keys, used to locate the records in the cluster. + * @param batchPolicy - The Batch Policy to use for this operation. + * @param callback - The function to call when + * the operation completes, with the results of the batch operation. + */ + public batchRemove(keys: KeyOptions[], batchPolicy?: policy.BatchPolicy | null, callback?: TypedCallback): void; + /** + * @param keys - {@link Key} An array of keys, used to locate the records in the cluster. + * @param batchPolicy - The Batch Policy to use for this operation. + * @param batchRemovePolicy Remove policy configuration parameters. + * @param callback - The function to call when + * the operation completes, with the results of the batch operation. + */ + public batchRemove(keys: KeyOptions[], batchPolicy?: policy.BatchPolicy | null, batchRemovePolicy?: policy.BatchRemovePolicy | null, callback?: TypedCallback): void; + + /** + * + * Reads a subset of bins for a batch of records from the database cluster. + * + * @param keys - An array of keys, used to locate the records in the cluster. + * @param bins - An array of bin names for the bins to be returned for the given keys. + * @param policy - The Batch Policy to use for this operation. + * + * @returns {?Promise} - If no callback function is passed, the function + * returns a Promise that resolves to the results of the batch operation. + * + * @deprecated since v2.0 - use {@link Client#batchRead} instead. + * + * @example + * + * const Aerospike = require('aerospike') + * const batchType = Aerospike.batchType + * const exp = Aerospike.exp + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * batch : new Aerospike.BatchPolicy({socketTimeout : 0, totalTimeout : 0}), + * } + * } + * + * var keys = [ + * new Aerospike.Key('test', 'demo', 'key1'), + * new Aerospike.Key('test', 'demo', 'key2'), + * new Aerospike.Key('test', 'demo', 'key3') + * ] + * + * var bins = ['example', 'user'] + * + * ;(async () => { + * // Establishes a connection to the server + * let client = await Aerospike.connect(config); + * + * // Place some records for demonstration + * await client.put(keys[0], {example: 30, user: 'Doug', extra: 'unused'}) + * await client.put(keys[1], {example: 35}) + * + * let results = await client.batchSelect(keys, bins) + * results.forEach((result) => { + * switch (result.status) { + * case Aerospike.status.OK: + * console.log("Record found") + * // Since the fourth record didn't specify bins to read, + * // the fourth record will return no bins, eventhough the batchRead succeeded. + * console.log(result.record.bins) + * break + * case Aerospike.status.ERR_RECORD_NOT_FOUND: + * console.log("Record not found") + * break + * default: + * // error while reading record + * console.log("Other error") + * break + * } + * }) + * // Close the connection to the server + * await client.close(); + * })(); + */ + public batchSelect(keys: KeyOptions[], bins: string[], policy?: policy.BatchPolicy): Promise; + /** + * @param keys - An array of keys, used to locate the records in the cluster. + * @param bins - An array of bin names for the bins to be returned for the given keys. + * @param callback - The function to call when + * the operation completes, with the results of the batch operation. + */ + public batchSelect(keys: KeyOptions[], bins: string[], callback: TypedCallback): void; + /** + * @param keys - An array of keys, used to locate the records in the cluster. + * @param bins - An array of bin names for the bins to be returned for the given keys. + * @param policy - The Batch Policy to use for this operation. + * @param callback - The function to call when + * the operation completes, with the results of the batch operation. + */ + public batchSelect(keys: KeyOptions[], bins: string[], policy: policy.BatchPolicy, callback: TypedCallback): void; + /** + * Read/Write multiple records for specified batch keys in one batch call. + * + * This method allows different sub-commands for each key in the batch. + * This method requires server >= 6.0.0. + * + * @param records - List of {@link BatchWriteRecord} instances which each contain keys and bins to retrieve. + * @param policy - The Batch Policy to use for this operation. + * + * @since v6.0.0 + * + * @example + * + * const Aerospike = require('aerospike') + * const batchType = Aerospike.batchType + * const Key = Aerospike.Key + * const op = Aerospike.operations + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * batch : new Aerospike.BatchPolicy({socketTimeout : 0, totalTimeout : 0}), + * } + * } + * + * const batchRecords = [ + * { + * type: batchType.BATCH_REMOVE, + * key: new Key("test", "demo", 'key1') + * }, + * { + * type: batchType.BATCH_WRITE, + * key: new Key("test", "demo", 'key2'), + * ops: [ + * op.write('example', 30), + * op.write('blob', Buffer.from('foo')) + * ], + * policy: new Aerospike.BatchWritePolicy({ + * exists: Aerospike.policy.exists.IGNORE + * }) + * }, + * { + * type: batchType.BATCH_WRITE, + * key: new Key("test", "demo", 'key3'), + * ops: [ + * op.write('example', 35), + * op.write('blob', Buffer.from('bar')) + * ], + * policy: new Aerospike.BatchWritePolicy({ + * exists: Aerospike.policy.exists.IGNORE + * }) + * } + * ] + * + * const batchReadRecords = [ + * { + * type: batchType.BATCH_READ, + * key: new Key("test", "demo", 'key1'), + * readAllBins: true + * }, + * { + * type: batchType.BATCH_READ, + * key: new Key("test", "demo", 'key2'), + * readAllBins: true + * }, + * { + * type: batchType.BATCH_READ, + * key: new Key("test", "demo", 'key3'), + * readAllBins: true + * } + * ] + * + * ;(async () => { + * // Establishes a connection to the server + * let client = await Aerospike.connect(config); + * + * // Place a record for demonstration + * await client.put(new Key("test", "demo", 'key1'), {example: 30, user: 'Doug', extra: 'unused'}) + * + * let results = await client.batchWrite(batchRecords) + * results.forEach((result) => { + * switch (result.status) { + * case Aerospike.status.OK: + * console.log("Record found") + * break + * case Aerospike.status.ERR_RECORD_NOT_FOUND: + * console.log("Record not found") + * break + * default: + * // error while reading record + * console.log("Other error") + * break + * } + * }) + * + * results = await client.batchWrite(batchRecords) + * results.forEach((result) => { + * switch (result.status) { + * case Aerospike.status.OK: + * console.log("Record found") + * break + * case Aerospike.status.ERR_RECORD_NOT_FOUND: + * console.log("Record not found") + * break + * default: + * // error while reading record + * console.log("Other error") + * break + * } + * }) + * // Close the connection to the server + * await client.close(); + * })(); + */ + public batchWrite(records: BatchWriteRecord[], policy?: policy.BatchPolicy | null): Promise; + /** + * @param records - List of {@link BatchWriteRecord} instances which each contain keys and bins to retrieve. + * @param callback - The function to call when the operation completes, Includes the results of the batch operation. + */ + public batchWrite(records: BatchWriteRecord[], callback?: TypedCallback): void; + /** + * @param records - List of {@link BatchWriteRecord} instances which each contain keys and bins to retrieve. + * @param policy - The Batch Policy to use for this operation. + * @param callback - The function to call when the operation completes, Includes the results of the batch operation. + */ + public batchWrite(records: BatchWriteRecord[], policy?: policy.BatchPolicy, callback?: TypedCallback): void; + /** + * + * Closes the client connection to the cluster. + * + * @param releaseEventLoop - Whether to release the event loop handle after the client is closed. Default is `false` + * + * @see {@link releaseEventLoop} + * + * @example + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * } + * Aerospike.connect(config) + * .then(client => { + * // client is ready to accept commands + * console.log("Connected. Now Closing Connection.") + * client.close() + * }) + * .catch(error => { + * client.close() + * console.error('Failed to connect to cluster: %s', error.message) + * }) + */ + public close(releaseEventLoop?: boolean): void; + /** + * Establishes the connection to the cluster. + * + * @remarks + * + * Once the client is connected to at least one server node, it will start + * polling each cluster node regularly to discover the current cluster status. + * As new nodes are added to the cluster, or existing nodes are removed, the + * client will establish or close down connections to these nodes. If the + * client gets disconnected from the cluster, it will keep polling the last + * known server endpoints, and will reconnect automatically if the connection + * is reestablished. + * + * @param callback - The function to call once the + * client connection has been established successfully and the client is ready + * to accept commands. + * + * @return {?Promise} If no callback function is passed, the function returns + * a Promise resolving to the connected client. + * + * @throws {AerospikeError} if event loop resources have already been released. + * + * @see {@link Config#connTimeoutMs} - Initial host connection timeout in milliseconds. + * @see {@link connect} - Initial host connection timeout in milliseconds. + * @see {@link releaseEventLoop} + * + * @example A connection established using callback function. + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * } + * Aerospike.connect(config, (error, client) => { + * if (error) { + * console.error('Failed to connect to cluster: %s', error.message) + * process.exit() + * } else { + * // client is ready to accept commands + * console.log("Connected. Now closing connection.") + * client.close() + * } + * }) + */ + public connect(callback?: TypedCallback): Promise; + /** + * Returns a deserialized CDT Context + * + * @param serializedContext - base64 serialized {link cdt.Context} + * + * @return Deserialized CDT Context + * + * @see {@link contextFromBase64} for a usage example. + * + * @since v5.6.0 + * + */ + public contextFromBase64(serializedContext: string): cdt.Context; + /** + * Returns a serialized CDT Context + * + * @param context - {@link cdt.Context} + * + * @return serialized context - base64 representation of the CDT Context + * + * @since v5.6.0 + * + * @example How to use CDT context serialization + * + * const Aerospike = require('aerospike'); + * const Context = Aerospike.cdt.Context + * // Define host configuration + * let config = { + * hosts: '192.168.33.10:3000', + * policies: { + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}), + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * read : new Aerospike.ReadPolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * + * + * Aerospike.connect(config, async (error, client) => { + * // Create a context + * let context = new Context().addMapKey('nested') + * + * // Create keys for records to be written + * let recordKey = new Aerospike.Key('test', 'demo', 'record') + * let contextKey = new Aerospike.Key('test', 'demo', 'context') + * + * // Put record with a CDT + * await client.put(recordKey, {exampleBin: {nested: {food: 'blueberry', drink: 'koolaid'}}}) + * + * // Test the context with client.operate() + * var ops = [ + * Aerospike.maps.getByKey('exampleBin', 'food', Aerospike.maps.returnType.KEY_VALUE).withContext(context) + * ] + * let results = await client.operate(recordKey, ops) + * console.log(results.bins.exampleBin) // [ 'food', 'blueberry' ] + * + * // Serialize CDT Context + * let serializedContext = client.contextToBase64(context) + * + * // Put record with bin containing the serialized record + * await client.put(contextKey, {context: serializedContext}) + * + * // Get context when needed for operation + * let contextRecord = await client.get(contextKey) + * + * // Deserialize CDT Context + * context = client.contextFromBase64(contextRecord.bins.context) + * + * // Test the context with client.operate() + * ops = [ + * Aerospike.maps.getByKey('exampleBin', 'food', Aerospike.maps.returnType.KEY_VALUE).withContext(context) + * ] + * results = await client.operate(recordKey, ops) + * console.log(results.bins.exampleBin) // [ 'food', 'blueberry' ] + * + * // Close the client + * client.close() + * }) + */ + public contextToBase64(context: cdt.Context): string; + /** + * Creates a blob secondary index index. + * + * This is a short-hand for calling {@link Client#createIndex} + * with the datatype option set to Aerospike.indexDataType.BLOB. + * + * @param options - Options for creating the index. + * @param policy - The Info Policy to use for this operation. + * + * @returns {?Promise} - A Promise that will resolve to an {@link IndexJob} instance. + * + * @see {@link Client#createIndex} + * + * @example + * + * const Aerospike = require('aerospike') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * } + * + * Aerospike.connect(config, (error, client) => { + * if (error) throw error + * + * var binName = 'location' + * var indexName = 'locationIndex' + * var options = { ns: 'test', + * set: 'demo', + * bin: binName, + * index: indexName } + * + * client.createBlobIndex(options, function (error) { + * if (error) throw error + * console.info('SI %s on %s was created successfully', indexName, binName) + * client.close() + * }) + * }) + */ + public createBlobIndex(options: IndexOptions, policy?: policy.InfoPolicy | null): Promise; + /** + * @param options - Options for creating the index. + * @param callback - The function to call when the operation completes. + */ + public createBlobIndex(options: IndexOptions, callback: TypedCallback): void; + /** + * @param options - Options for creating the index. + * @param policy - The Info Policy to use for this operation. + * @param callback - The function to call when the operation completes. + */ + public createBlobIndex(options: IndexOptions, policy: policy.InfoPolicy | null, callback: TypedCallback): void; + /** + * + * Creates a secondary index (SI). + * + * @remarks + * + * Calling the createIndex method issues an + * index create command to the Aerospike cluster and returns immediately. To + * verify that the index has been created and populated with all the data use + * the {@link IndexJob} instance returned by the callback. + * + * Aerospike currently supports indexing of strings, integers and geospatial + * information in GeoJSON format. + * + * ##### String Indexes + * + * A string index allows for equality lookups. An equality lookup means that if + * you query for an indexed bin with value "abc", then only records containing + * bins with "abc" will be returned. + * + * ##### Integer Indexes + * + * An integer index allows for either equality or range lookups. An equality + * lookup means that if you query for an indexed bin with value 123, then only + * records containing bins with the value 123 will be returned. A range lookup + * means that if you can query bins within a range. So, if your range is + * (1...100), then all records containing a value in that range will be + * returned. + * + * ##### Geo 2D Sphere Indexes + * + * A geo 2d sphere index allows either "contains" or "within" lookups. A + * "contains" lookup means that if you query for an indexed bin with GeoJSON + * point element, then only records containing bins with a GeoJSON element + * containing that point will be returned. A "within" lookup means that if you + * query for an indexed bin with a GeoJSON polygon element, then all records + * containing bins with a GeoJSON element wholly contained within that polygon + * will be returned. + * + * @param options - Options for creating the index. + * @param policy - The Info Policy to use for this operation. + * + * @returns A Promise that will resolve to an {@link IndexJob} instance. + * + * @see {@link indexType} for enumeration of supported index types. + * @see {@link indexDataType} for enumeration of supported data types. + * @see {@link IndexJob} + * + * @since v2.0 + * + * @example + * + * const Aerospike = require('aerospike') + * const Context = Aerospike.cdt.Context + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * } + * + * Aerospike.connect(config, (error, client) => { + * if (error) throw error + * + * // create index over user's recent locations + * let namespace = 'test' + * let set = 'demo' + * let binName = 'rloc' // recent locations + * let indexName = 'recentLocationsIdx' + * let indexType = Aerospike.indexType.LIST + * let dataType = Aerospike.indexDataType.GEO2DSPHERE + * let context = new Context().addListIndex(0) + * let options = { ns: namespace, + * set: set, + * bin: binName, + * index: indexName, + * type: indexType, + * datatype: dataType, + * context: context } + * + * let policy = new Aerospike.InfoPolicy({ timeout: 100 }) + * + * client.createIndex(options, policy, (error, job) => { + * if (error) throw error + * + * // wait for index creation to complete + * var pollInterval = 100 + * job.waitUntilDone(pollInterval, (error) => { + * if (error) throw error + * console.info('SI %s on %s was created successfully', indexName, binName) + * client.close() + * }) + * }) + * }) + */ + public createIndex(options: IndexOptions, policy?: policy.InfoPolicy | null): Promise; + /** + * @param options - Options for creating the index. + * @param callback - The function to call when the operation completes. + */ + public createIndex(options: IndexOptions, callback: TypedCallback): void; + /** + * @param options - Options for creating the index. + * @param policy - The Info Policy to use for this operation. + * @param callback - The function to call when the operation completes. + */ + public createIndex(options: IndexOptions, policy: policy.InfoPolicy | null, callback: TypedCallback): void; + /** + * Creates a SI of type Integer. + * + * @remarks This is a short-hand for calling {@link Client#createIndex} + * with the datatype option set to Aerospike.indexDataType.NUMERIC. + * + * @param options - Options for creating the index. + * @param policy - The Info Policy to use for this operation. + * + * @returns {?Promise} - A Promise that will resolve to an {@link IndexJob} instance. + * + * @see {@link Client#createIndex} + * + * @example + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * } + * + * Aerospike.connect(config, (error, client) => { + * if (error) throw error + * + * var binName = 'age' + * var indexName = 'ageIndex' + * var options = { ns: 'test', + * set: 'demo', + * bin: binName, + * index: indexName } + * + * client.createIntegerIndex(options, function (error) { + * if (error) throw error + * console.info('SI %s on %s was created successfully', indexName, binName) + * client.close() + * }) + * }) + */ + public createIntegerIndex(options: IndexOptions, policy?: policy.InfoPolicy | null): Promise; + /** + * @param options - Options for creating the index. + * @param callback - The function to call when the operation completes. + * + * @returns {?Promise} - A Promise that will resolve to an {@link IndexJob} instance. + */ + public createIntegerIndex(options: IndexOptions, callback: TypedCallback): void; + /** + * @param options - Options for creating the index. + * @param policy - The Info Policy to use for this operation. + * @param callback - The function to call when the operation completes. + * + * @returns {?Promise} - A Promise that will resolve to an {@link IndexJob} instance. + */ + public createIntegerIndex(options: IndexOptions, policy: policy.InfoPolicy | null, callback: TypedCallback): void; + /** + * Creates a SI of type String. + * + * @remarks This is a short-hand for calling {@link Client#createIndex} + * with the datatype option set to Aerospike.indexDataType.STRING. + * + * @param options - Options for creating the index. + * @param policy - The Info Policy to use for this operation. + * + * @returns {?Promise} - A Promise that will resolve to an {@link IndexJob} instance. + * + * @see {@link Client#createIndex} + * + * @example + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * } + * + * Aerospike.connect(config, (error, client) => { + * if (error) throw error + * + * var binName = 'name' + * var indexName = 'nameIndex' + * var options = { ns: 'test', + * set: 'demo', + * bin: binName, + * index: indexName } + * + * client.createStringIndex(options, function (error) { + * if (error) throw error + * console.info('SI %s on %s was created successfully', indexName, binName) + * client.close() + * }) + * }) + */ + public createStringIndex(options: IndexOptions, policy?: policy.InfoPolicy): Promise; + /** + * @param options - Options for creating the index. + * @param callback - The function to call when the operation completes. + */ + public createStringIndex(options: IndexOptions, callback: TypedCallback): void; + /** + * @param options - Options for creating the index. + * @param policy - The Info Policy to use for this operation. + * @param callback - The function to call when the operation completes. + */ + public createStringIndex(options: IndexOptions, policy: policy.InfoPolicy, callback: TypedCallback): void; + /** + * Creates a geospatial secondary secondary index. + * + * @remarks This is a short-hand for calling {@link Client#createIndex} + * with the datatype option set to Aerospike.indexDataType.GEO2DSPHERE. + * + * @param options - Options for creating the index. + * @param policy - The Info Policy to use for this operation. + * + * @returns {?Promise} - A Promise that will resolve to an {@link IndexJob} instance. + * + * @see {@link Client#createIndex} + * + * @example + * + * const Aerospike = require('aerospike') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * } + * + * Aerospike.connect(config, (error, client) => { + * if (error) throw error + * + * var binName = 'location' + * var indexName = 'locationIndex' + * var options = { ns: 'test', + * set: 'demo', + * bin: binName, + * index: indexName } + * + * client.createGeo2DSphereIndex(options, function (error) { + * if (error) throw error + * console.info('SI %s on %s was created successfully', indexName, binName) + * client.close() + * }) + * }) + */ + public createGeo2DSphereIndex(options: IndexOptions, policy?: policy.InfoPolicy): Promise; + /** + * @param options - Options for creating the index. + * @param callback - The function to call when the operation completes. + */ + public createGeo2DSphereIndex(options: IndexOptions, callback: TypedCallback): void; + /** + * @param options - Options for creating the index. + * @param policy - The Info Policy to use for this operation. + * @param callback - The function to call when the operation completes. + */ + public createGeo2DSphereIndex(options: IndexOptions, policy: policy.InfoPolicy, callback: TypedCallback): void; + /** + * + * Applies a User Defined Function (UDF) on a record in the database. + * + * @remarks Use this function to apply a + * ⇑Record UDF + * on a single record and return the result of the UDF function call. Record + * UDFs can be used to augment both read and write behavior. + * + * For additional information please refer to the section on + * ⇑Developing Record UDFs + * in the Aerospike technical documentation. + * + * @param key - The key, used to locate the record in the cluster. + * @param udfArgs - Parameters used to specify which UDF function to execute. + * @param policy - The Apply Policy to use for this operation. + * + * @returns {?Promise} A Promise that resolves to the value returned by the UDF. + * + * @since v2.0 + * + * @see {@link Client#udfRegister} to register a UDF module to use with apply(). + * @see {@link Query#background} and {@link Scan#background} to apply a Record UDF function to multiple records instead. + * + * @example + * + * const Aerospike = require('aerospike') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * const config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * apply : new Aerospike.ApplyPolicy({socketTimeout : 0, totalTimeout : 0}), + * } + * } + * + * var key = new Aerospike.Key('test', 'demo', 'value') + * + * var udfArgs = { + * module: 'my_udf_module', + * funcname: 'my_udf_function', + * args: ['abc', 123, 4.5] + * } + * + * Aerospike.connect(config, (error, client) => { + * if (error) throw error + * client.apply(key, udfArgs, (error, result) => { + * if (error) throw error + * + * console.log('Result of calling my_udf_function:', result) + * }) + * }) + */ + public apply(key: KeyOptions, udfArgs: UDF, policy?: policy.ApplyPolicy | null): Promise; + /** + * @param key - The key, used to locate the record in the cluster. + * @param udfArgs - Parameters used to specify which UDF function to execute. + * @param callback - This function will be called with the + * result returned by the Record UDF function call. + */ + public apply(key: KeyOptions, udfArgs: UDF, callback: TypedCallback): void; + /** + * @param key - The key, used to locate the record in the cluster. + * @param udfArgs - Parameters used to specify which UDF function to execute. + * @param policy - The Apply Policy to use for this operation. + * @param callback - This function will be called with the + * result returned by the Record UDF function call. + */ + public apply(key: KeyOptions, udfArgs: UDF, policy: policy.ApplyPolicy | null, callback: TypedCallback): void; + /** + * Checks the existance of a record in the database cluster. + * + * @param key - The key of the record to check for existance. + * @param policy - The Read Policy to use for this operation. + * + * @returns {?Promise} A Promise that resolves to true if the record exists or + * false otherwise. + * + * @example + * + * const Aerospike = require('aerospike') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * policies: { + * read : new Aerospike.ReadPolicy({socketTimeout : 0, totalTimeout : 0}), + * } + * } + * + * let key = new Aerospike.Key('test', 'demo', 'key1') + * Aerospike.connect(config) + * .then(client => { + * return client.exists(key) + * .then(exists => console.info('Key "%s" exists: %s', key.key, exists)) + * .then(() => client.close()) + * .catch(error => { + * console.error('Error checking existance of key:', error) + * client.close() + * }) + * }) + * .catch(error => { + * console.error('Error connecting to cluster:', error) + * }) + */ + public exists(key: KeyOptions, policy?: policy.ReadPolicy | null): Promise; + /** + * @param key - The key of the record to check for existance. + * @param callback - The function to call when the + * operation completes; the passed value is true if the record + * exists or false otherwise. + */ + public exists(key: KeyOptions, callback: TypedCallback): void; + /** + * @param key - The key of the record to check for existance. + * @param policy - The Read Policy to use for this operation. + * @param callback - The function to call when the + * operation completes; the passed value is true if the record + * exists or false otherwise. + */ + public exists(key: KeyOptions, policy: policy.ReadPolicy | null, callback: TypedCallback): void; + /** + * Checks the existance of a record in the database cluster. + * + * @param key - The key of the record to check for existance. + * @param policy - The Read Policy to use for this operation. + * + * @returns A Promise that resolves to an {@link AerospikeRecord} containing no bins and a {@link RecordMetadata} object. + * If the metadata contains data, the record exists. If the metadata contains null values, then the record does not exist. + * + * @example + * + * const Aerospike = require('aerospike') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * policies: { + * read : new Aerospike.ReadPolicy({socketTimeout : 0, totalTimeout : 0}), + * } + * } + * + * let key = new Aerospike.Key('test', 'demo', 'key1') + * Aerospike.connect(config) + * .then(client => { + * return client.exists(key) + * .then(exists => console.info('Key "%s" exists: %s', key.key, exists)) + * .then(() => client.close()) + * .catch(error => { + * console.error('Error checking existance of key:', error) + * client.close() + * }) + * }) + * .catch(error => { + * console.error('Error connecting to cluster:', error) + * }) + */ + public existsWithMetadata(key: KeyOptions, policy?: policy.ReadPolicy): Promise; + /** + * @param key - The key of the record to check for existance. + * @param callback - The function to call when the + * operation completes; An {@link AerospikeRecord} will be passed to the callback, containing no bins and a {@link RecordMetadata} object. + * If the metadata contains data, the record exists. If the metadata contains null values, then the record does not exist. + */ + public existsWithMetadata(key: KeyOptions, callback: TypedCallback): void; + /** + * @param key - The key of the record to check for existance. + * @param policy - The Read Policy to use for this operation. + * @param callback - The function to call when the + * operation completes; An {@link AerospikeRecord} will be passed to the callback, containing no bins and a {@link RecordMetadata} object. + * If the metadata contains data, the record exists. If the metadata contains null values, then the record does not exist. + */ + public existsWithMetadata(key: KeyOptions, policy: policy.ReadPolicy, callback: TypedCallback): void; + /** + * Using the key provided, reads a record from the database cluster. + * + * @param key - The key used to locate the record in the cluster. + * @param policy - The Read Policy to use for this operation. + * + * @returns A Promise that resolves to a {@link Record}. + * + * @example + * const Aerospike = require('aerospike') + * var key = new Aerospike.Key('test', 'demo', 'key1') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * policies: { + * read : new Aerospike.ReadPolicy({socketTimeout : 0, totalTimeout : 0}), + * } + * } + * + * Aerospike.connect(config, (error, client) => { + * if (error) throw error + * client.get(key, (error, record) => { + * if (error) throw error + * console.log(record) + * client.close() + * }) + * }) + * + */ + public get(key: KeyOptions, policy?: policy.ReadPolicy): Promise; + /** + * @param key - The key used to locate the record in the cluster. + * @param callback - The function to call when the + * operation completes with the results of the operation; if no callback + * function is provided, the method returns a Promise instead. + */ + public get(key: KeyOptions, callback: TypedCallback): void; + /** + * @param key - The key used to locate the record in the cluster. + * @param policy - The Read Policy to use for this operation. + * @param callback - The function to call when the + * operation completes with the results of the operation; if no callback + * function is provided, the method returns a Promise instead. + */ + public get(key: KeyOptions, policy: policy.ReadPolicy, callback: TypedCallback): void; + /** + * Removes the specified index. + * + * @param namespace - The namespace on which the index was created. + * @param index - The name of the index. + * @param policy - The Info Policy to use for this operation. + * + * @returns {?Promise} A Promise that resolves once the operation completes. + * + * @example + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * } + * Aerospike.connect(config, (error, client) => { + * client.indexRemove('location', 'locationIndex', (error) => { + * if (error) throw error + * client.close() + * }) + * }) + */ + public indexRemove(namespace: string, index: string, policy?: policy.InfoPolicy | null): Promise; + /** + * @param namespace - The namespace on which the index was created. + * @param index - The name of the index. + * @param callback - The function to call when the + * operation completes with the result of the operation. + */ + public indexRemove(namespace: string, index: string, callback: TypedCallback): void; + /** + * @param namespace - The namespace on which the index was created. + * @param index - The name of the index. + * @param policy - The Info Policy to use for this operation. + * @param callback - The function to call when the + * operation completes with the result of the operation. + */ + public indexRemove(namespace: string, index: string, policy: policy.InfoPolicy | null, callback: TypedCallback): void; + /** + * Sends an info query to a specific cluster node. + * + * @remarks The request parameter is a string representing an + * info request. If it is not specified, a default set of info values will be + * returned. + * + * Please refer to the + * Info Command Reference + * for a list of all available info commands. + * + * @param request - The info request to send. + * @param host - See {@link Host}. The address of the cluster host to send the request to. + * @param policy - The Info Policy to use for this operation. + * + * @returns A Promise that resolves to an info result string. + + * @see ⇑Info Command Reference + * + * @deprecated since v3.11.0 - use {@link Client#infoNode} or {@link Client#infoAny} instead. + * + * @example Sending a 'statistics' info query to a single host + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * } + * + * Aerospike.connect(config, (error, client) => { + * if (error) throw error + * client.info('statistics', {addr: '192.168.33.10', port: 3000}, (error, response) => { + * if (error) throw error + * console.log(response) + * client.close() + * }) + * }) + * + */ + public info(request: string, host: Host | string, policy?: policy.InfoPolicy | null): Promise; + /** + * @param request - The info request to send. + * @param host - See {@link Host}. The address of the cluster host to send the request to. + * @param callback - The function to call when an info response from a cluster host is received. + */ + public info(request: string | undefined, host: Host | string, callback: TypedCallback): void; + /** + * @param request - The info request to send. + * @param host - See {@link Host}. The address of the cluster host to send the request to. + * @param policy - The Info Policy to use for this operation. + * @param callback - The function to call when an info response from a cluster host is received. + */ + public info(request: string | undefined, host: Host | string, policy: policy.InfoPolicy | null, callback: TypedCallback): void; + /** + * Sends an info query to a single, randomly selected cluster node. + * + * @remarks The request parameter is a string representing an + * info request. If it is not specified, a default set of info values will be + * returned. + * + * @param request - The info request to send. + * @param policy - The Info Policy to use for this operation. + * + * @returns A Promise that resolves to an info result string. + * + * @see ⇑Info Command Reference + * + * @since v2.4.0 + * + * @example Sending 'statistics' info command to random cluster node + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * } + * + * Aerospike.connect(config, (error, client) => { + * if (error) throw error + * client.infoAny('statistics', (error, response) => { + * if (error) throw error + * console.log(response) + * client.close() + * }) + * }) + * + */ + public infoAny(request?: string | undefined, policy?: policy.InfoPolicy | null): Promise; + /** + * @param callback - The function to call once the node + * returns the response to the info command; if no callback function is + * provided, the method returns a Promise instead. + */ + public infoAny(callback: TypedCallback): void; + /** + * @param request - The info request to send. + * @param callback - The function to call once the node + * returns the response to the info command; if no callback function is + * provided, the method returns a Promise instead. + */ + public infoAny(request?: string | undefined, callback?: TypedCallback): void; + /** + * @param request - The info request to send. + * @param policy - The Info Policy to use for this operation. + * @param callback - The function to call once the node + * returns the response to the info command; if no callback function is + * provided, the method returns a Promise instead. + */ + public infoAny(request?: string | undefined, policy?: policy.InfoPolicy | null, callback?: TypedCallback): void; + /** + * + * Sends an info query to all nodes in the cluster and collects the + * results. + * + * @remarks The request parameter is a string representing an + * info request. If it is not specified, a default set of info values will be + * returned. + * + * @param request - The info request to send. + * @param policy - The Info Policy to use for this operation. + * + * @returns A Promise that resolves to an {@link InfoAllResponse}. + * + * @see ⇑Info Command Reference + * + * @since v2.3.0 + * + * @example Sending info command to whole cluster + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * } + * + * Aerospike.connect(config, (error, client) => { + * if (error) throw error + * Client.infoAll('statistics', (error, response) => { + * if (error) throw error + * console.log(response) + * client.close() + * }) + * }) + * + */ + public infoAll(request?: string | undefined, policy?: policy.InfoPolicy | null): Promise; + /** + * @param request - The info request to send. + * @param callback - The function to call once all nodes have + * returned a response to the info command; if no callback function is + * provided, the method returns a Promise instead. + */ + public infoAll(request?: string | undefined, callback?: TypedCallback): void; + /** + * @param request - The info request to send. + * @param policy - The Info Policy to use for this operation. + * @param callback - The function to call once all nodes have + * returned a response to the info command; if no callback function is + * provided, the method returns a Promise instead. + */ + public infoAll(request?: string | undefined, policy?: policy.InfoPolicy | null, callback?: TypedCallback): void; + /** + * Sends an info query to a single node in the cluster. + * + * @remarks The request parameter is a string representing an + * info request. If it is not specified, a default set of info values will be + * returned. + * + * @param request - The info request to send. + * @param node - The node to send the request to. See {@link InfoNodeParam}. + * @param policy - The Info Policy to use for this operation. + * + * @returns A Promise that resolves to an info result string. + * + * @see ⇑Info Command Reference + * + * @since v3.11.0 + * + * @example Sending 'statistics' info command to specific cluster node + * + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * } + * + * Aerospike.connect(config, (error, client) => { + * if (error) throw error + * const node = client.getNodes().pop() + * client.infoNode('statistics', node, (error, response) => { + * if (error) throw error + * console.log(response) + * client.close() + * }) + * }) + * + */ + public infoNode(request: string | undefined, node: InfoNodeParam, policy?: policy.InfoPolicy | null): Promise; + /** + * @param request - The info request to send. + * @param node - The node to send the request to. See {@link InfoNodeParam}. + * @param callback - The function to call once the node + * returns the response to the info command; if no callback function is + * provided, the method returns a Promise instead. + */ + public infoNode(request: string | undefined, node: InfoNodeParam, callback: TypedCallback): void; + /** + * @param request - The info request to send. + * @param node - The node to send the request to. See {@link InfoNodeParam}. + * @param policy - The Info Policy to use for this operation. + * @param callback - The function to call once the node + * returns the response to the info command; if no callback function is + * provided, the method returns a Promise instead. + */ + public infoNode(request: string | undefined, node: InfoNodeParam, policy: policy.InfoPolicy | null, callback: TypedCallback): void; + /** + * Is client connected to any server nodes. + * + * @param checkTenderErrors - Whether to consider a server + * node connection that has had 5 consecutive info request failures during + * cluster tender. Default is true. + * + * @returns {boolean} true if the client is currently connected to any server nodes. + * + * @since v2.0 + */ + public isConnected(checkTenderErrors?: boolean): boolean; + /** + * Performs multiple operations on a single record. + * + * @remarks Operations can be created using the methods in one of the + * following modules: + * * {@link operations} - General operations on all types. + * * {@link lists} - Operations on CDT List values. + * * {@link maps} - Operations on CDT Map values. + * * {@link bitwise} - Operations on Bytes values. + * + * @param key - The key of the record. + * @param operations - List of {@link operations.Operation | Operations} to perform on the record. + * @param metadata - Meta data. + * @param policy - The Operate Policy to use for this operation. + * + * @example + * + * const Aerospike = require('aerospike') + * const op = Aerospike.operations + * const key = new Aerospike.Key('test', 'demo', 'mykey1') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}), + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * } + * } + * var ops = [ + * op.append('a', 'xyz'), + * op.incr('b', 10), + * op.read('b') + * ] + * + * Aerospike.connect(config, (error, client) => { + * if (error) throw error + * client.put(key, { a: 'abc', b: 42 }, (error) => { + * if (error) throw error + * client.operate(key, ops, (error, record) => { + * if (error) throw error + * console.log(record.bins) // => { b: 52 } + * client.close() + * }) + * }) + * }) + * + */ + public operate(key: KeyOptions, operations: operations.Operation[], metadata?: RecordMetadata | null, policy?: policy.OperatePolicy | null): Promise; + /** + * @param key - The key of the record. + * @param operations - List of {@link operations.Operation | Operations} to perform on the record. + * @param callback - The function to call when the + * operation completes with the results of the operation; if no callback + * function is provided, the method returns a Promise instead. + */ + public operate(key: KeyOptions, operations: operations.Operation[], callback: TypedCallback): void; + /** + * @param key - The key of the record. + * @param operations - List of {@link operations.Operation | Operations} to perform on the record. + * @param metadata - Meta data. + * @param callback - The function to call when the + * operation completes with the results of the operation; if no callback + * function is provided, the method returns a Promise instead. + */ + public operate(key: KeyOptions, operations: operations.Operation[], metadata: RecordMetadata, callback: TypedCallback): void; + /** + * @param key - The key of the record. + * @param operations - List of {@link operations.Operation | Operations} to perform on the record. + * @param metadata - Meta data. + * @param policy - The Operate Policy to use for this operation. + * @param callback - The function to call when the + * operation completes with the results of the operation; if no callback + * function is provided, the method returns a Promise instead. + */ + public operate(key: KeyOptions, operations: operations.Operation[], metadata: RecordMetadata | null, policy: policy.OperatePolicy | null, callback: TypedCallback): void; + /** + * Shortcut for applying the {@link + * operations.append} operation to one or more record bins. + * + * @remarks This function works on bins of type string or bytes; to append + * a new value (of any type) to a bin containing a list of existing values, use + * the {@link lists.append} operation instead. + * + * @param key - The key of the record. + * @param bins - The key-value mapping of bin names and the + * corresponding values to append to the bin value. The bins must contain + * either string or byte array values and the values to append must be of the + * same type. + * @param metadata - Meta data. + * @param policy - The Operate Policy to use for this operation. + * + * @returns A Promise that resolves to the results of the opertion. + * + * @see {@link Client#operate} + * @see {@link operations.append} + */ + public append(key: KeyOptions, bins: AerospikeBins, metadata?: RecordMetadata | null, policy?: policy.OperatePolicy | null): Promise; + /** + * @param key - The key of the record. + * @param bins - The key-value mapping of bin names and the + * corresponding values to append to the bin value. The bins must contain + * either string or byte array values and the values to append must be of the + * same type. + * @param callback - The function to call when the + * operation completes with the results of the operation. + */ + public append(key: KeyOptions, bins: AerospikeBins, callback: TypedCallback): void; + /** + * @param key - The key of the record. + * @param bins - The key-value mapping of bin names and the + * corresponding values to append to the bin value. The bins must contain + * either string or byte array values and the values to append must be of the + * same type. + * @param metadata - Meta data. + * @param callback - The function to call when the + * operation completes with the results of the operation. + */ + public append(key: KeyOptions, bins: AerospikeBins, metadata: RecordMetadata | null, callback: TypedCallback): void; + /** + * @param key - The key of the record. + * @param bins - The key-value mapping of bin names and the + * corresponding values to append to the bin value. The bins must contain + * either string or byte array values and the values to append must be of the + * same type. + * @param metadata - Meta data. + * @param policy - The Operate Policy to use for this operation. + * @param callback - The function to call when the + * operation completes with the results of the operation. + */ + public append(key: KeyOptions, bins: AerospikeBins, metadata: RecordMetadata | null, policy: policy.OperatePolicy | null, callback: TypedCallback): void; + /** + * + * Shortcut for applying the {@link operations.prepend} operation to one or more record bins. + * + * @param key - The key of the record. + * @param bins - The key-value mapping of bin names and the corresponding values to prepend to the bin value. + * @param metadata - Meta data. + * @param policy - The Operate Policy to use for this operation. + * + * @returns A Promise that resolves to the results of the opertion. + * + * @see {@link Client#operate} + * @see {@link operations.prepend} + */ + public prepend(key: KeyOptions, bins: AerospikeBins, metadata?: RecordMetadata | null, policy?: policy.OperatePolicy | null): Promise; + /** + * @param key - The key of the record. + * @param bins - The key-value mapping of bin names and the corresponding values to prepend to the bin value. + * @param callback - The function to call when the + * operation completes with the results of the operation. + */ + public prepend(key: KeyOptions, bins: AerospikeBins, callback: TypedCallback): void; + /** + * @param key - The key of the record. + * @param bins - The key-value mapping of bin names and the corresponding values to prepend to the bin value. + * @param metadata - Meta data. + * @param callback - The function to call when the + * operation completes with the results of the operation. + */ + public prepend(key: KeyOptions, bins: AerospikeBins, metadata: RecordMetadata | null, callback: TypedCallback): void; + /** + * @param key - The key of the record. + * @param bins - The key-value mapping of bin names and the corresponding values to prepend to the bin value. + * @param metadata - Meta data. + * @param policy - The Operate Policy to use for this operation. + * @param callback - The function to call when the + * operation completes with the results of the operation. + */ + public prepend(key: KeyOptions, bins: AerospikeBins, metadata: RecordMetadata | null, policy: policy.OperatePolicy | null, callback: TypedCallback): void; + /** + * Shortcut for applying the {@link operations.add} operation to one or more record bins. + * + * @param key - The key of the record. + * @param bins - The key-value mapping of bin names and the corresponding values to use to increment the bin values with. + * @param metadata - Meta data. + * @param policy - The Operate Policy to use for this operation. + * + * @returns A Promise that resolves to the results of the opertion. + * + * @since v2.0 + * + * @see {@link Client#operate} + * @see {@link operations.incr} + */ + public add(key: KeyOptions, bins: AerospikeBins, metadata?: RecordMetadata | null, policy?: policy.OperatePolicy | null): Promise; + /** + * @param key - The key of the record. + * @param bins - The key-value mapping of bin names and the corresponding values to use to increment the bin values with. + * @param callback - The function to call when the + * operation completes with the results of the operation. + */ + public add(key: KeyOptions, bins: AerospikeBins, callback: TypedCallback): void; + /** + * @param key - The key of the record. + * @param bins - The key-value mapping of bin names and the corresponding values to use to increment the bin values with. + * @param callback - The function to call when the + * operation completes with the results of the operation. + */ + public add(key: KeyOptions, bins: AerospikeBins, metadata: RecordMetadata | null, callback: TypedCallback): void; + /** + * @param key - The key of the record. + * @param bins - The key-value mapping of bin names and the corresponding values to use to increment the bin values with. + * @param metadata - Meta data. + * @param callback - The function to call when the + * operation completes with the results of the operation. + */ + public add(key: KeyOptions, bins: AerospikeBins, metadata: RecordMetadata | null, policy: policy.OperatePolicy | null, callback: TypedCallback): void; + /** + * + * Alias for {@link Client#add}. + * + * @param key - The key of the record. + * @param bins - The key-value mapping of bin names and the corresponding values to use to increment the bin values with. + * @param metadata - Meta data. + * @param policy - The Operate Policy to use for this operation. + * + * @returns A Promise that resolves to the results of the opertion. + */ + public incr(key: KeyOptions, bins: AerospikeBins, metadata?: RecordMetadata, policy?: policy.OperatePolicy): Promise; + /** + * + * Alias for {@link Client#add}. + * + * @param key - The key of the record. + * @param bins - The key-value mapping of bin names and the corresponding values to use to increment the bin values with. + * @param callback - The function to call when the + * operation completes with the results of the operation. + * + */ + public incr(key: KeyOptions, bins: AerospikeBins, callback: TypedCallback): void; + /** + * + * Alias for {@link Client#add}. + * + * @param key - The key of the record. + * @param bins - The key-value mapping of bin names and the corresponding values to use to increment the bin values with. + * @param metadata - Meta data. + * @param callback - The function to call when the + * operation completes with the results of the operation. + * + */ + public incr(key: KeyOptions, bins: AerospikeBins, metadata: RecordMetadata | null, callback: TypedCallback): void; + /** + * + * Alias for {@link Client#add}. + * + * @param key - The key of the record. + * @param bins - The key-value mapping of bin names and the corresponding values to use to increment the bin values with. + * @param metadata - Meta data. + * @param policy - The Operate Policy to use for this operation. + * @param callback - The function to call when the + * operation completes with the results of the operation. + * + */ + public incr(key: KeyOptions, bins: AerospikeBins, metadata: RecordMetadata | null, policy: policy.OperatePolicy | null, callback: TypedCallback): void; + /** + * Writes a record to the database cluster. + * + * @remarks + * If the record exists, it modifies the record with bins provided. + * To remove a bin, set its value to null. + * + * __Note:__ The client does not perform any automatic data type conversions. + * Attempting to write an unsupported data type (e.g. boolean) into a record + * bin will cause an error to be returned. Setting an undefined + * value will also cause an error. + * + * @param key - The key of the record. + * @param bins - A record object used for specifying the fields to store. + * @param meta - Meta data. + * @param policy - The Write Policy to use for this operation. + * + * @returns A Promise that resolves to a {@link Record}. + + * @example + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * read : new Aerospike.ReadPolicy({socketTimeout : 0, totalTimeout : 0}), + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * } + * } + * + * const Key = Aerospike.Key + * + * var key = new Key('test', 'demo', 'key1') + * var bins = { + * a: 'xyz', + * b: 123 + * } + * Aerospike.connect(config, (error, client) => { + * if (error) throw error + * client.put(key, bins, (error) => { + * if (error) throw error + * client.get(key, (error, record) => { + * if (error) throw error + * console.log(record) + * client.close() + * }) + * }) + * }) + */ + public put(key: KeyOptions, bins: AerospikeBins | Map | Bin | AerospikeRecord, meta?: RecordMetadata | null, policy?: policy.WritePolicy | null): Promise; + /** + * @param key - The key of the record. + * @param bins - A record object used for specifying the fields to store. + * @param callback - The function to call when the operation completes with the result of the operation. + */ + public put(key: KeyOptions, bins: AerospikeBins | Map | Bin | AerospikeRecord, callback: TypedCallback): void; + /** + * @param key - The key of the record. + * @param bins - A record object used for specifying the fields to store. + * @param meta - Meta data. + * @param callback - The function to call when the operation completes with the result of the operation. + */ + public put(key: KeyOptions, bins: AerospikeBins | Map | Bin | AerospikeRecord, meta: RecordMetadata | null, callback: TypedCallback): void; + /** + * @param key - The key of the record. + * @param bins - A record object used for specifying the fields to store. + * @param meta - Meta data. + * @param policy - The Write Policy to use for this operation. + * @param callback - The function to call when the operation completes with the result of the operation. + */ + public put(key: KeyOptions, bins: AerospikeBins | Map | Bin | AerospikeRecord, meta: RecordMetadata | null, policy: policy.WritePolicy | null, callback: TypedCallback): void; + /** + * Creates a new {@link Query} instance, which is used to define query + * in the database. + * + * @param ns - The namespace to be queried. + * @param options - Query parameters. See {@link Query} constructor for details. + * + * @returns A Promise that resolves to a {@link Query}. + + * @see {@link Query} + * + * @example + * + * const filter = Aerospike.filter + * + * var statement = {} + * statment.filters: [filter.equal('color', 'blue')] + * + * var query = client.query(ns, set, statment) + * var stream = query.execute() + */ + public query(ns: string, options?: QueryOptions): Query; + /** + * @param ns - The namespace to be queried. + * @param set - The set on which the query is to be executed. + * @param options - Query parameters. See {@link Query} constructor for details. + */ + public query(ns: string, set: string | null, options?: QueryOptions): Query; + /** + * + * Removes a record with the specified key from the database cluster. + * + * @param key - The key of the record. + * @param policy - The Remove Policy to use for this operation. + * + * @returns A Promise that resolves to the {@link Key} of the removed record. + + * @example + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * remove : new Aerospike.RemovePolicy({socketTimeout : 0, totalTimeout : 0}), + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * } + * } + * + * const Key = Aerospike.Key + * + * var key = new Key('test', 'demo', 'key1') + * var bins = { + * a: 'xyz', + * b: 123 + * } + * Aerospike.connect(config, (error, client) => { + * if (error) throw error + * client.put(key, bins, (error) => { + * if (error) throw error + * client.remove(key, (error) => { + * if (error) throw error + * console.log("Record removed") + * client.close() + * }) + * }) + * }) + */ + public remove(key: KeyOptions, policy?: policy.RemovePolicy | null): Promise; + /** + * @param key - The key of the record. + * @param callback - The function to call when the operation completes with the results of the operation. + */ + public remove(key: KeyOptions, callback: TypedCallback): void; + /** + * @param key - The key of the record. + * @param policy - The Remove Policy to use for this operation. + * @param callback - The function to call when the operation completes with the results of the operation. + */ + public remove(key: KeyOptions, policy: policy.RemovePolicy | null, callback: TypedCallback): void; + /** + * + * Removes a seed host from the cluster. + * + * @param hostname - Hostname/IP address of the seed host + * @param port - Port number; defaults to {@link Config#port} or 3000. + * + * @since v2.6.0 + */ + public removeSeedHost(hostname: string, port?: number): void; + /** + * Creates a new {@link Scan} instance in order to execute a database + * scan using the Scan API. + * + * @see {@link Scan} constructor for options that can be used to initialize a + * new instance. + * + * @param ns - The namescape. + * @param options - Scan parameters. See {@link Scan} constructor for details. + * + * @returns A Promise that resolves to a {@link Query}. + * + * @since v2.0 + */ + public scan(ns: string, options?: ScanOptions): Scan; + /** + * @param ns - The namescape. + * @param set - The name of a set. + * @param options - Scan parameters. See {@link Scan} constructor for details. + * + * @returns A Promise that resolves to a {@link Query}. + */ + public scan(ns: string, set?: string, options?: ScanOptions): Scan; + /** + * + * Retrieves selected bins for a record of given key from the database cluster. + * + * @param key - The key of the record. + * @param bins - A list of bin names for the bins to be returned. + * @param policy - The Read Policy to use for this operation. + * + * @returns A Promise that resolves to a {@link AerospikeRecord}. + * + * @example + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * read : new Aerospike.ReadPolicy({socketTimeout : 0, totalTimeout : 0}), + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * } + * } + * + * const Key = Aerospike.Key + * + * var key = new Key('test', 'demo', 'key1') + * + * var bins = { + * a: 'xyz', + * b: 123 + * } + * + * Aerospike.connect(config, (error, client) => { + * if (error) throw error + * client.put(key, bins, (error) => { + * if (error) throw error + * client.select(key, ['a', 'b'], (error, record) => { + * if (error) throw error + * console.log(record) + * client.close() + * }) + * }) + * }) + * + */ + public select(key: KeyOptions, bins: string[], policy?: policy.ReadPolicy | null): Promise; + /** + * @param key - The key of the record. + * @param bins - A list of bin names for the bins to be returned. + * @param callback - The function to call when the + * operation completes with the results of the operation; if no callback + * function is provided, the method returns a Promise instead. + */ + public select(key: KeyOptions, bins: string[], callback: TypedCallback): void; + /** + * @param key - The key of the record. + * @param bins - A list of bin names for the bins to be returned. + * @param policy - The Read Policy to use for this operation. + * @param callback - The function to call when the + * operation completes with the results of the operation; if no callback + * function is provided, the method returns a Promise instead. + */ + public select(key: KeyOptions, bins: string[], policy: policy.ReadPolicy | null, callback: TypedCallback): void; + /** + * Removes records in specified namespace/set efficiently. + * + * @remarks This method is many orders of magnitude faster than deleting + * records one at a time. It requires server 3.12 or later. + * + * @param ns - Required namespace. + * @param set - Optional set name. Set to null to delete + * all sets in namespace. + * @param beforeNanos - Optionally delete records before given last + * update time. Units are in nanoseconds since unix epoch (1970-01-01). If + * specified, the value must be before the current time. Pass in 0 to delete + * all records in namespace/set regardless of last udpate time. + * @param policy - The Info Policy to use for this operation. + * + * @returns A Promise that resolves when the truncate is complete. + * + * @see https://www.aerospike.com/docs/reference/info#truncate + */ + public truncate(ns: string, set: string | null, beforeNanos: number, policy?: policy.InfoPolicy | null): Promise; + + /** + * @param ns - Required namespace. + */ + public truncate(ns: string, callback: TypedCallback): void; + /** + * @param ns - Required namespace. + * @param set - Optional set name. Set to null to delete + * all sets in namespace. + */ + public truncate(ns: string, set: string | null, callback: TypedCallback): void; + /** + * @param ns - Required namespace. + * @param set - Optional set name. Set to null to delete + * all sets in namespace. + * @param beforeNanos - Optionally delete records before given last + * update time. Units are in nanoseconds since unix epoch (1970-01-01). If + * specified, the value must be before the current time. Pass in 0 to delete + * all records in namespace/set regardless of last udpate time. + */ + public truncate(ns: string, set: string | null, beforeNanos: number, callback: TypedCallback): void; + /** + * @param ns - Required namespace. + * @param set - Optional set name. Set to null to delete + * all sets in namespace. + * @param beforeNanos - Optionally delete records before given last + * update time. Units are in nanoseconds since unix epoch (1970-01-01). If + * specified, the value must be before the current time. Pass in 0 to delete + * all records in namespace/set regardless of last udpate time. + * @param policy - The Info Policy to use for this operation. + */ + public truncate(ns: string, set: string | null, beforeNanos: number, policy: policy.InfoPolicy | null, callback: TypedCallback): void; + /** + * Registers a UDF module with the database cluster. + * + * @remarks This method loads a Lua script from the local filesystem into + * the Aerospike database cluster and registers it for use as a UDF module. The + * client uploads the module to a single cluster node. It then gets distributed + * within the whole cluster automatically. The callback function is called once + * the initial upload into the cluster has completed (or if an error occurred + * during the upload). One of the callback parameters is a {@link UdfJob} + * instance that can be used to verify that the module has been registered + * successfully on the entire cluster. + * + * @param udfPath - The file path to the Lua script to load into the server. + * @param udfType - Language of the UDF script. Lua is the default + * and only supported scripting language for UDF modules at the moment; ref. + * {@link language}. + * @param policy - The Info Policy to use for this operation. + * + * @returns A Promise that resolves to a {@link Job} instance. + * + * @example + * + * const Aerospike = require('aerospike') + * + * Aerospike.connect((error, client) => { + * if (error) throw error + * + * var path = './udf/my_module.lua' + * client.udfRegister(path, (error, job) => { + * if (error) throw error + * + * job.waitUntilDone(100, (error) => { + * if (error) throw error + * + * // UDF module was successfully registered on all cluster nodes + * + * client.close() + * }) + * }) + * }) + */ + public udfRegister(udfPath: string, udfType?: language | null, policy?: policy.InfoPolicy | null): Promise; + /** + * @param udfPath - The file path to the Lua script to load into the server. + * @param policy - The Info Policy to use for this operation. + * + * @returns A Promise that resolves to a {@link Job} instance. + */ + public udfRegister(udfPath: string, policy?: policy.InfoPolicy | null): Promise; + /** + * @param udfPath - The file path to the Lua script to load into the server. + * @param callback - The function to call when the + * operation completes with the result of the operation. + */ + + public udfRegister(udfPath: string, callback: TypedCallback): void; + /** + * @param udfPath - The file path to the Lua script to load into the server. + * @param udfType - Language of the UDF script. Lua is the default + * and only supported scripting language for UDF modules at the moment; ref. + * {@link language}. + * @param callback - The function to call when the + * operation completes with the result of the operation. + */ + public udfRegister(udfPath: string, udfType: language | null, callback: TypedCallback): void; + /** + * @param udfPath - The file path to the Lua script to load into the server. + * @param policy - The Info Policy to use for this operation. + * @param callback - The function to call when the + * operation completes with the result of the operation. + */ + public udfRegister(udfPath: string, policy: policy.InfoPolicy | null, callback: TypedCallback): void; + /** + * @param udfPath - The file path to the Lua script to load into the server. + * @param udfType - Language of the UDF script. Lua is the default + * and only supported scripting language for UDF modules at the moment; ref. + * {@link language}. + * @param policy - The Info Policy to use for this operation. + * @param callback - The function to call when the + * operation completes with the result of the operation. + */ + public udfRegister(udfPath: string, udfType: language | null, policy: policy.InfoPolicy | null, callback: TypedCallback): void; + /** + * Returns runtime stats about the client instance. + * + * @returns {@link Stats | Client Stats} + * + * @since v3.8.0 + * + * @example + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * } + * + * Aerospike.connect(config, (error, client) => { + * if (error) throw error + * const stats = client.stats() + * console.info(stats) // => { commands: { inFlight: 0, queued: 0 }, + * // nodes: + * // [ { name: 'BB94DC08D270008', + * // syncConnections: { inPool: 1, inUse: 0 }, + * // asyncConnections: { inPool: 0, inUse: 0 } }, + * // { name: 'C1D4DC08D270008', + * // syncConnections: { inPool: 0, inUse: 0 }, + * // asyncConnections: { inPool: 0, inUse: 0 } } ] } + * client.close() + * }) + * + */ + public stats(): Stats; + /** + * Removes a UDF module from the cluster. + * + * @remarks The info command to deregister the UDF module is sent to a + * single cluster node by the client. It then gets distributed within the whole + * cluster automatically. The callback function is called once the initial info + * command has succeeded (or if an error occurred). One of the callback + * parameters is a {@link UdfJob} instance that can be used to verify that the + * module has been removed successfully from the entire cluster. + * + * For server versions 4.5.0 and before, trying to delete an UDF module that + * does not exist on the server, will return an error. Starting with server + * version 4.5.1, the server no longer returns an error and the command will + * succeed. + * + * @param udfModule - The basename of the UDF module, without the + * local pathname but including the file extension (".lua"). + * @param policy - The Info Policy to use for this operation. + * + * @example + * + * const Aerospike = require('aerospike') + * + * Aerospike.connect((error, client) => { + * if (error) throw error + * + * var module = 'my_module.lua' + * client.udfRemove(module, (error, job) => { + * if (error) throw error + * + * job.waitUntilDone(100, (error) => { + * if (error) throw error + * + * // UDF module was successfully removed from all cluster nodes + * + * client.close() + * }) + * }) + * }) + */ + public udfRemove(udfModule: string, policy?: policy.InfoPolicy | null): Promise; + /** + * @param udfModule - The basename of the UDF module, without the + * local pathname but including the file extension (".lua"). + * @param callback - The function to call when the + * operation completes which the result of the operation. + * + */ + public udfRemove(udfModule: string, callback: TypedCallback): void; + /** + * @param udfModule - The basename of the UDF module, without the + * local pathname but including the file extension (".lua"). + * @param policy - The Info Policy to use for this operation. + * @param callback - The function to call when the + * operation completes which the result of the operation. + * + */ + public udfRemove(udfModule: string, policy: policy.InfoPolicy, callback: TypedCallback): void; + /** + * Updates log settings for the client. + * + * @param logConfig - A {@link Log} instance containing a log level and/or a file descriptor. For more info, see {@link Log} + */ + public updateLogging(logConfig: Log): void; + /** + * Client#changePassword + * + * Change a user's password. + * + * @param user - User name for the password change. + * @param password - User password in clear-text format. + * @param policy - Optional {@link AdminPolicy}. + * + * @example + * + * const Aerospike = require('aerospike') + * + * function wait (ms) { + * return new Promise(resolve => setTimeout(resolve, ms)) + * } + * + * ;(async function () { + * let client + * try { + * client = await Aerospike.connect({ + * hosts: '192.168.33.10:3000', + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), + * }, + * // Must have security enabled in server configuration before user and password configurations can be used. + * user: 'admin', + * password: 'admin' + * }) + * + * // User must be created before password is changed. See {@link Client#createUser} for an example. + * client.changePassword("khob", "TryTiger7!", ["Engineer"]) + * } catch (error) { + * console.error('Error:', error) + * process.exit(1) + * } finally { + * if (client) client.close() + * } + * })() + */ + public changePassword(user: string, password: string, policy?: policy.AdminPolicy | null): void; + /** + * Create user with password and roles. Clear-text password will be hashed using bcrypt before sending to server. + * + * @param user - User name for the new user. + * @param password - User password in clear-text format. + * @param roles - Optional array of role names. For more information on roles, see {@link admin.Role}. + * @param policy - Optional {@link policy.AdminPolicy}. + * + * @example + * + * const Aerospike = require('aerospike') + * + * function wait (ms) { + * return new Promise(resolve => setTimeout(resolve, ms)) + * } + * + * ;(async function () { + * let client + * try { + * client = await Aerospike.connect({ + * hosts: '192.168.33.10:3000', + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), + * }, + * // Must have security enabled in server configuration before user and password configurations can be used. + * user: 'admin', + * password: 'admin' + * }) + * + * client.createUser("khob", "MightyMice55!", ["Engineer"]) + * // Must wait a short length of time of the user to be fully created. + * await wait(5) + * const user = await client.queryUser("khob", null) + * console.log(user) + * } catch (error) { + * console.error('Error:', error) + * process.exit(1) + * } finally { + * if (client) client.close() + * } + * })() + */ + public createUser(user: string, password: string, roles?: Array | null, policy?: policy.AdminPolicy | null): void; + /** + * Create user defined role with optional privileges, whitelist and read/write quotas. + * Quotas require server security configuration "enable-quotas" to be set to true. + * + * @param roleName - role name + * @param privileges - List of privileges assigned to a role. + * @param policy - Optional {@link AdminPolicy}. + * @param whitelist - Optional list of allowable IP addresses assigned to role. IP addresses can contain wildcards (ie. 10.1.2.0/24). + * @param readQuota - Optional maximum reads per second limit, pass in zero for no limit. + * @param writeQuota - Optional maximum writes per second limit, pass in zero for no limit. + * + * @example + * + * const Aerospike = require('aerospike') + * + * function wait (ms) { + * return new Promise(resolve => setTimeout(resolve, ms)) + * } + * + * ;(async function () { + * let client + * try { + * client = await Aerospike.connect({ + * hosts: '192.168.33.10:3000', + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), + * }, + * // Must have security enabled in server configuration before user and password configs can be used. + * user: 'admin', + * password: 'admin' + * }) + * + * client.createRole("Engineer", [new Aerospike.admin.Privilege(Aerospike.privilegeCode.READ_WRITE), new Aerospike.admin.Privilege(Aerospike.privilegeCode.TRUNCATE)], null) + * // Must wait a short length of time of the role to be fully created. + * await wait(5) + * const role = await client.queryRole("Engineer", null) + * console.log(role) + * } catch (error) { + * console.error('Error:', error) + * process.exit(1) + * } finally { + * if (client) client.close() + * } + * })() + */ + public createRole(roleName: string, privileges: Array, policy?: policy.AdminPolicy | null, whitelist?: Array | null, readQuota?: number | null, writeQuota?: number | null ): void; + /** + * Drop user defined role. + * + * @param roleName - role name + * @param policy - Optional {@link AdminPolicy}. + * + * @example + * + * const Aerospike = require('aerospike') + * + * function wait (ms) { + * return new Promise(resolve => setTimeout(resolve, ms)) + * } + * + * ;(async function () { + * let client + * try { + * client = await Aerospike.connect({ + * hosts: '192.168.33.10:3000', + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), + * }, + * // Must have security enabled in server configuration before user and password configurations can be used. + * user: 'admin', + * password: 'admin' + * }) + * + * // A role must be created before a role can be dropped. See {@link Client#createRole} for an example. + * client.dropRole("Engineer") + * // Must wait a short length of time of the role to be fully dropped. + * await wait(5) + * let roles = await client.queryRoles() + * // 'Engineer' should no longer appear in the logged list of roles + * console.log(roles) + * } catch (error) { + * console.error('Error:', error) + * process.exit(1) + * } finally { + * if (client) client.close() + * } + * })() + */ + public dropRole(roleName: string, policy?: policy.AdminPolicy | null): void; + /** + * + * Remove a User from cluster + * + * @param user - User name to be dropped. + * @param policy - Optional {@link AdminPolicy}. + * + * @example + * + * const Aerospike = require('aerospike') + * + * function wait (ms) { + * return new Promise(resolve => setTimeout(resolve, ms)) + * } + * + * ;(async function () { + * let client + * try { + * client = await Aerospike.connect({ + * hosts: '192.168.33.10:3000', + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), + * }, + * // Must have security enabled in server configuration before user and password configurations can be used. + * user: 'admin', + * password: 'admin' + * }) + * + * // A user must be created before a user can be dropped. See {@link Client#createUser} for an example. + * client.dropUser("khob") + * // Must wait a short length of time of the role to be fully dropped. + * await wait(5) + * let users = await client.queryUsers() + * // 'khob' should no longer appear in the logged list of roles + * console.log(users) + * } catch (error) { + * console.error('Error:', error) + * process.exit(1) + * } finally { + * if (client) client.close() + * } + * })() + */ + public dropUser(user: string, policy?: policy.AdminPolicy | null): void; + /** + * Grant privileges to an user defined role. + * + * @param roleName - role name + * @param privileges - list of privileges assigned to a role. + * @param policy - Optional {@link AdminPolicy}. + * + * @example + * + * const Aerospike = require('aerospike') + * + * function wait (ms) { + * return new Promise(resolve => setTimeout(resolve, ms)) + * } + * + * ;(async function () { + * let client + * try { + * client = await Aerospike.connect({ + * hosts: '192.168.33.10:3000', + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), + * }, + * // Must have security enabled in server configuration before user and password configurations can be used. + * user: 'admin', + * password: 'admin' + * }) + * + * // A role must be created before privileges can be granted. See {@link Client#createUser} for an example. + * client.grantPrivileges("Engineer", [new Aerospike.admin.Privilege(Aerospike.privilegeCode.SINDEX_ADMIN)]) + * // Must wait a short length of time for the privilege to be granted. + * await wait(5) + * let role = await client.queryRole("Engineer") + * console.log(role) + * } catch (error) { + * console.error('Error:', error) + * process.exit(1) + * } finally { + * if (client) client.close() + * } + * })() + */ + public grantPrivileges(roleName: string, privileges: Array, policy?: policy.AdminPolicy | null): void; + /** + * + * Drop user defined role. + * + * @param user - User name for granted roles + * @param roles - Optional array of role names. For more information on roles, see {@link admin.Role}. + * @param policy - Optional {@link AdminPolicy}. + * + * @example + * + * const Aerospike = require('aerospike') + * + * function wait (ms) { + * return new Promise(resolve => setTimeout(resolve, ms)) + * } + * + * ;(async function () { + * let client + * try { + * client = await Aerospike.connect({ + * hosts: '192.168.33.10:3000', + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), + * }, + * // Must have security enabled in server configuration before user and password configurations can be used. + * user: 'admin', + * password: 'admin' + * }) + * + * // A user must be created before roles can be granted. See {@link Client#createUser} for an example. + * client.grantRoles("khob", ["Engineer"]) + * // Must wait a short length of time for the role to be granted + * await wait(5) + * let user = await client.queryUser("khob") + * console.log(user) + * } catch (error) { + * console.error('Error:', error) + * process.exit(1) + * } finally { + * if (client) client.close() + * } + * })() + * + */ + public grantRoles(user: string, roles: Array, policy?: policy.AdminPolicy | null): void; + /** + * + * Retrieves an {@link admin.Role} from the database. + * + * @param {String} roleName - role name filter. + * @param {Object} policy - Optional {@link AdminPolicy}. + * + * @returns An instance of {@link admin.Role}. For more information on roles, see {@link admin.Role}. + * + * @example + * + * const Aerospike = require('aerospike') + * + * function wait (ms) { + * return new Promise(resolve => setTimeout(resolve, ms)) + * } + * + * ;(async function () { + * let client + * try { + * client = await Aerospike.connect({ + * hosts: '192.168.33.10:3000', + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), + * }, + * // Must have security enabled in server configuration before user and password configurations can be used. + * user: 'admin', + * password: 'admin' + * }) + * + * // A role must be created before a role can be queried. See {@link Client#createRole} for an example. + * let role = await client.queryRole("Engineer") + * console.log(role) + * } catch (error) { + * console.error('Error:', error) + * process.exit(1) + * } finally { + * if (client) client.close() + * } + * })() + */ + public queryRole(roleName: string, policy?: policy.AdminPolicy | null): admin.Role; + /** + * + * Retrieve all roles and role information from the database. + * + * @param policy - Optional {@link AdminPolicy}. + * + * @returns An list of {@link admin.Role} instances. For more information on roles, see {@link admin.Role}. + * + * @example + * + * const Aerospike = require('aerospike') + * + * function wait (ms) { + * return new Promise(resolve => setTimeout(resolve, ms)) + * } + * + * ;(async function () { + * let client + * try { + * client = await Aerospike.connect({ + * hosts: '192.168.33.10:3000', + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), + * }, + * // Must have security enabled in server configuration before user and password configurations can be used. + * user: 'admin', + * password: 'admin' + * }) + * + * let roles = await client.queryRoles() + * console.log(roles) + * } catch (error) { + * console.error('Error:', error) + * process.exit(1) + * } finally { + * if (client) client.close() + * } + * })() + */ + public queryRoles(policy?: policy.AdminPolicy | null): Array; + /** + * Retrieves an {@link admin.User} from the database. + * + * @param user - User name filter. + * @param policy - Optional {@link AdminPolicy}. + * + * @returns An instance of {@link admin.User}. For more information on roles, see {@link admin.User}. + * + * @example + * + * const Aerospike = require('aerospike') + * + * function wait (ms) { + * return new Promise(resolve => setTimeout(resolve, ms)) + * } + * + * ;(async function () { + * let client + * try { + * client = await Aerospike.connect({ + * hosts: '192.168.33.10:3000', + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), + * }, + * // Must have security enabled in server configuration before user and password configurations can be used. + * user: 'admin', + * password: 'admin' + * }) + * + * // A user must be created before a user can be queried. See {@link Client#createUser} for an example. + * let user = await client.queryUser("khob") + * console.log(user) + * } catch (error) { + * console.error('Error:', error) + * process.exit(1) + * } finally { + * if (client) client.close() + * } + * })() + */ + public queryUser(user: string, policy?: policy.AdminPolicy | null): admin.User; + /** + * + * Retrieves All user and user information from the database. + * + * @param policy - Optional {@link AdminPolicy}. + * + * @returns An list of {@link admin.User} instances. For more information on roles, see {@link admin.User}. + * + * @example + * + * const Aerospike = require('aerospike') + * + * function wait (ms) { + * return new Promise(resolve => setTimeout(resolve, ms)) + * } + * + * ;(async function () { + * let client + * try { + * client = await Aerospike.connect({ + * hosts: '192.168.33.10:3000', + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), + * }, + * // Must have security enabled in server configuration before user and password configurations can be used. + * user: 'admin', + * password: 'admin' + * }) + * + * let users = await client.queryUsers() + * console.log(users) + * } catch (error) { + * console.error('Error:', error) + * process.exit(1) + * } finally { + * if (client) client.close() + * } + * })() + */ + public queryUsers(policy?: policy.AdminPolicy | null): Array; + /** + * + * Revoke privileges from an user defined role. + * + * @param roleName - role name + * @param privileges - List of privileges assigned to a role. + * @param policy - Optional {@link AdminPolicy}. + * + * @example + * + * const Aerospike = require('aerospike') + * + * function wait (ms) { + * return new Promise(resolve => setTimeout(resolve, ms)) + * } + * + * ;(async function () { + * let client + * try { + * client = await Aerospike.connect({ + * hosts: '192.168.33.10:3000', + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), + * }, + * // Must have security enabled in server configuration before user and password configurations can be used. + * user: 'admin', + * password: 'admin' + * }) + * // A role must be created before privileges can be revoked. See {@link Client#createRole} for an example. + * client.revokePrivileges("Engineer", [new Aerospike.admin.Privilege(Aerospike.privilegeCode.SINDEX_ADMIN)]) + * // Must wait a short length of time for the privilege to be granted. + * await wait(5) + * let users = await client.queryRole("Engineer") + * console.log(users) + * } catch (error) { + * console.error('Error:', error) + * process.exit(1) + * } finally { + * if (client) client.close() + * } + * })() + */ + public revokePrivileges(roleName: string, privileges: Array, policy?: policy.AdminPolicy | null): void; + /** + * Remove roles from user's list of roles. + * + * @param user - User name for revoked roles. + * @param roles - Optional array of role names. For more information on roles, see {@link admin.Role}. + * @param policy - Optional {@link AdminPolicy}. + * + * @example + * + * const Aerospike = require('aerospike') + * + * function wait (ms) { + * return new Promise(resolve => setTimeout(resolve, ms)) + * } + * + * ;(async function () { + * let client + * try { + * client = await Aerospike.connect({ + * hosts: '192.168.33.10:3000', + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), + * }, + * // Must have security enabled in server configuration before user and password configurations can be used. + * user: 'admin', + * password: 'admin' + * }) + * // A user must be created before roles can be revoked. See {@link Client#createUser} for an example. + * client.revokeRoles("khob", ["Engineer"]) + * // Must wait a short length of time for the privilege to be granted. + * await wait(5) + * let user = await client.queryUser("khob") + * console.log(user) + * } catch (error) { + * console.error('Error:', error) + * process.exit(1) + * } finally { + * if (client) client.close() + * } + * })() + */ + public revokeRoles(user: string, roles: Array, policy?: policy.AdminPolicy | null): void; + /** + * Set maximum reads/writes per second limits for a role. If a quota is zero, the limit is removed. + * Quotas require server security configuration "enable-quotas" to be set to true. + * + * @param roleName - role name + * @param readQuota - maximum reads per second limit, pass in zero for no limit. + * @param writeQuota - maximum writes per second limit, pass in zero for no limit. + * @param policy - Optional {@link AdminPolicy}. + * + * @example + * + * const Aerospike = require('aerospike') + * + * function wait (ms) { + * return new Promise(resolve => setTimeout(resolve, ms)) + * } + * + * ;(async function () { + * let client + * try { + * client = await Aerospike.connect({ + * hosts: '192.168.33.10:3000', + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), + * }, + * // Must have security enabled in server configuration before user and password configurations can be used. + * user: 'admin', + * password: 'admin' + * }) + * // Quotas must be enabled in the server configurations for quotas to be set. + * client.setQuotas("Engineer", 200, 300) + * // Must wait a short length of time for the privilegee to be granted. + * await wait(5) + * let role = await client.queryRole("Engineer") + * console.log(role) + * } catch (error) { + * console.error('Error:', error) + * process.exit(1) + * } finally { + * if (client) client.close() + * } + * })() + * + */ + public setQuotas(roleName: string, readQuota: number, writeQuota: number, policy?: policy.AdminPolicy | null): void; + /** + * Set IP address whitelist for a role. If whitelist is null or empty, remove existing whitelist from role. + * + * @param roleName - role name + * @param whitelist - Optional list of allowable IP addresses assigned to role. + * IP addresses can contain wildcards (ie. 10.1.2.0/24). + * @param policy - Optional {@link AdminPolicy}. + * + * @example + * + * const Aerospike = require('aerospike') + * + * function wait (ms) { + * return new Promise(resolve => setTimeout(resolve, ms)) + * } + * + * ;(async function () { + * let client + * try { + * client = await Aerospike.connect({ + * hosts: '192.168.33.10:3000', + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), + * }, + * // Must have security enabled in server configuration before user and password configurations can be used. + * user: 'admin', + * password: 'admin' + * }) + * // Quotas must be enabled in the server configurations for quotas to be set. + * client.setWhitelist("Engineer", ["172.17.0.2"]) + * // Must wait a short length of time for the privilegee to be granted. + * await wait(5) + * let role = await client.queryRole("Engineer") + * console.log(role) + * } catch (error) { + * console.error('Error:', error) + * process.exit(1) + * } finally { + * if (client) client.close() + * } + * })() + * + */ + public setWhitelist(roleName: string, whitelist: Array | null, policy?: policy.AdminPolicy | null): void; +} + +/** + * The Config class contains the settings for an Aerospike client + * instance, including the list of seed hosts, default policies, and other + * settings. + * + * @throws {TypeError} If invalid config values are passed. + * + * @example + * + * const Aerospike = require('aerospike') + * + * let config = { + * hosts: '192.168.1.10,192.168.1.11', + * user: process.env.DATABASE_USER, + * password: process.env.DATABASE_PASSWORD, + * policies: { + * read: new Aerospike.ReadPolicy({ + * totalTimeout: 0 + * }) + * }, + * log: { + * level: Aerospike.log.INFO, + * file: 2 // log to stderr + * } + * } + * + * Aerospike.connect(config) + * .then(client => { + * // client is ready to accept commands + * client.close() + * }) + * .catch(error => { + * console.error('Failed to connect to cluster: %s', error.message) + * }) + * + * + * // Initializes a new client configuration from the given config values. + * + */ +export class Config { + /** + * Authentication mode used when user/password is defined. + * + * One of the auth modes defined in {@link auth}. + */ + public authMode?: auth; + /** + * Initial host connection timeout in milliseconds. + * + * The client observes this timeout when opening a connection to + * the cluster for the first time. + * + * @default 1000 + */ + public connTimeoutMs?: number; + /** + * Expected Cluster Name. + * + * If not null, server nodes must return this + * cluster name in order to join the client's view of the cluster. Should + * only be set when connecting to servers that support the "cluster-name" + * info command. + * + * @since v2.4 + */ + public clusterName?: string; + /** + * + * The number of cluster tend iterations that defines the window for {@link maxErrorRate} to be surpassed. One tend iteration is defined + * as {@link tenderInterval} plus the time to tend all nodes. At the end of the window, the error count is reset to zero and backoff state is removed on all nodes. + * + * @type {number} + * + * @default 1 + */ + public errorRateWindow?: number; + /** + * List of hosts with which the client should attempt to connect. + * + * If not specified, the client attempts to read the host list + * from the AEROSPIKE_HOSTS environment variable or else falls + * back to use a default value of "localhost". + * + * @example Setting hosts using a string: + * + * const Aerospike = require('aerospike') + * + * const hosts = '192.168.0.1:3000,192.168.0.2:3000' + * const client = await Aerospike.connect({ hosts }) + * + * @example Setting hosts using an array of hostname/port tuples: + * + * const Aerospike = require('aerospike') + * + * const hosts = [ + * { addr: '192.168.0.1', port: 3000 }, + * { addr: '192.168.0.2', port: 3000 } + * ] + * const client = await Aerospike.connect({ hosts }) + * + * @example Setting hosts with TLS name using a string: + * + * const Aerospike = require('aerospike') + * + * const hosts = '192.168.0.1:example.com:3000,192.168.0.2:example.com:3000' + * const client = await Aerospike.connect({ hosts }) + * + * @example Setting hosts using an array of hostname/port/tlsname tuples: + * + * const Aerospike = require('aerospike') + * + * const hosts = [ + * { addr: '192.168.0.1', port: 3000, tlsname: 'example.com' }, + * { addr: '192.168.0.2', port: 3000, tlsname: 'example.com' } + * ] + * const client = await Aerospike.connect({ hosts }) + */ + public hosts: Host[] | string; + + /** + * Configuration for logging done by the client. + * + + * + * @example Enabling debug logging to a separate log file + * + * const Aerospike = require('aerospike') + * + * const fs = require('fs') + * + * var debuglog = fs.openSync('./debug.log', 'w') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * log: { + * level: Aerospike.log.DEBUG, + * file: debuglog + * } + * } + * Aerospike.connect(config, (err, client) => { + * if (err) throw err + * console.log("Connected. Now closing connection.") + * client.close() + * }) + */ + public log?: Log; + /** + * Node login timeout in milliseconds. + * + * @type {number} + * @default 5000 + */ + public loginTimeoutMs?: number; + /** + * Maximum number of asynchronous connections allowed per server node. + * + * New transactions will be rejected with an {@link + * status.ERR_NO_MORE_CONNECTIONS | ERR_NO_MORE_CONNECTIONS} + * error if the limit would be exceeded. + * * + * @default 100 + */ + public maxConnsPerNode?: number; + /** + * Maximum number of errors allowed per node per error_rate_window before backoff algorithm returns + * `AEROSPIKE_MAX_ERROR_RATE` for database commands to that node. If max_error_rate is zero, there is no error limit. + * The counted error types are any error that causes the connection to close (socket errors and client timeouts), + * server device overload and server timeouts. + * + * The application should backoff or reduce the transaction load until `AEROSPIKE_MAX_ERROR_RATE` stops being returned. + * + * If the backoff algorithm has been activated, transactions will fail with {@link + * status.AEROSPIKE_MAX_ERROR_RATE | AEROSPIKE_MAX_ERROR_RATE} until the {@link errorRateWindow} has passed and the + * error count has been reset. + * + * @default 100 + */ + public maxErrorRate?: number; + /** + * Maximum socket idle time in seconds. + * + * Connection pools will discard sockets that have been idle + * longer than the maximum. The value is limited to 24 hours (86400). + * + * It's important to set this value to a few seconds less than the server's + * proto-fd-idle-ms (default 60000 milliseconds or 1 minute), + * so the client does not attempt to use a socket that has already been + * reaped by the server. + * + * Connection pools are now implemented by a LIFO stack. Connections at the + * tail of the stack will always be the least used. These connections are + * checked for maxSocketIdle once every 30 tend iterations + * (usually 30 seconds). + * + * + * @default 0 seconds + */ + public maxSocketIdle?: number; + /** + * Minimum number of asynchronous connections allowed per server node. + * + * Preallocate min connections on client node creation. The + * client will periodically allocate new connections if count falls below + * min connections. + * + * Server proto-fd-idle-ms may also need to be increased + * substantially if min connections are defined. The + * proto-fd-idle-ms default directs the server to close + * connections that are idle for 60 seconds which can defeat the purpose of + * keeping connections in reserve for a future burst of activity. + * + * If server proto-fd-idle-ms is changed, client {@link + * Config#maxSocketIdle} should also be changed to be a few seconds less + * than proto-fd-idle-ms. + * + * @default 0 + */ + public minConnsPerNode?: number; + /** + * Configuration values for the mod-lua user path. + * + * If you are using user-defined functions (UDF) for processing + * query results (i.e. aggregations), then you will find it useful to set + * the modlua settings. Of particular importance is the + * modelua.userPath, which allows you to define a path to where + * the client library will look for Lua files for processing. + * + */ + public modlua: ModLua; + + /** + * The password to use when authenticating to the cluster. + */ + public password?: string; + + /** + * Global client policies. + * + * The configuration defines default policies for the + * application. Policies define the behavior of the client, which can be + * global for all uses of a single type of operation, or local to a single + * use of an operation. + * + * Each database operation accepts a policy for that operation as an + * argument. This is considered a local policy, and is a single use policy. + * This local policy supersedes any global policy defined. + * + * If a value of the policy is not defined, then the rule is to fallback to + * the global policy for that operation. If the global policy for that + * operation is undefined, then the global default value will be used. + * + * If you find that you have behavior that you want every use of an + * operation to utilize, then you can specify the default policy as + * {@link Config#policies}. + * + * For example, the {@link Client#put} operation takes a {@link + * WritePolicy} parameter. If you find yourself setting the {@link + * WritePolicy#key} policy value for every call to {@link Client.put}, then + * you may find it beneficial to set the global {@link WritePolicy} in + * {@link Config#policies}, which all operations will use. + * * + * @example Setting a default key policy for all write operations + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * policies: { + * write: new Aerospike.WritePolicy({ + * key: Aerospike.policy.key.SEND, + * socketTimeout : 0, + * totalTimeout : 0 + * }) + * } + * } + * + * let key = new Aerospike.Key('test', 'demo', 123) + * + * Aerospike.connect(config) + * .then(client => { + * return client.put(key, {int: 42}) + * .then(() => client.close()) + * .catch(error => { + * throw error + * client.close() + * }) + * }) + * .catch(console.error) + */ + public policies: ConfigPolicies; + /** + * Default port to use for any host address, that does not + * explicitly specify a port number. Default is 3000. + * * + * @since v2.4 + */ + public port: number; + /** + * Track server rack data. + * + * This field is useful when directing read commands to the + * server node that contains the key and exists on the same rack as the + * client. This serves to lower cloud provider costs when nodes are + * distributed across different racks/data centers. + * + * {@link rackId} config, {@link + * policy.replica.PREFER_RACK} replica policy, and server + * rack configuration must also be set to enable this functionality. + * + * @default false + * + * @since 3.8.0 + */ + public rackAware?: boolean; + /** + * Rack where this client instance resides. + * + * {@link rackAware} config, {@link policy.replica.PREFER_RACK} replica policy, and server + * rack configuration must also be set to enable this functionality. + * + * @default 0 + * + * @since 3.8.0 + */ + public rackId?: number; + /** + * Shared memory configuration. + * + * This allows multiple client instances running in separate + * processes on the same machine to share cluster status, including nodes and + * data partition maps. Each shared memory segment contains state for one + * Aerospike cluster. If there are multiple Aerospike clusters, a different + * key must be defined for each cluster. + * + * @see {@link http://www.aerospike.com/docs/client/c/usage/shm.html#operational-notes|Operational Notes} + * + * @example Using shared memory in a clustered setup + * + * const Aerospike = require('aerospike') + * const cluster = require('cluster') + * + * const config = { + * sharedMemory: { + * key: 0xa5000000 + * } + * } + * const client = Aerospike.client(config) + * const noWorkers = 4 + * + * if (cluster.isMaster) { + * // spawn new worker processes + * for (var i = 0; i < noWorkers; i++) { + * cluster.fork() + * } + * } else { + * // connect to Aerospike cluster in each worker process + * client.connect((err) => { if (err) throw err }) + * + * // handle incoming HTTP requests, etc. + * // http.createServer((request, response) => { ... }) + * + * // close DB connection on shutdown + * client.close() + * } + */ + public sharedMemory?: SharedMemory; + + /** + * Polling interval in milliseconds for cluster tender. + * + * @default 1000 + */ + public tenderInterval?: number; + /** + * Configure Transport Layer Security (TLS) parameters for secure + * connections to the database cluster. TLS connections are not supported as + * of Aerospike Server v3.9 and depend on a future server release. + * + * @since v2.4 + */ + public tls?: TLSInfo; + /** + * Whether the client should use the server's + * alternate-access-address instead of the + * access-address. + * + * @default false + * + * @since v3.7.1 + */ + public useAlternateAccessAddress: boolean; + /** + * The user name to use when authenticating to the cluster. + * + * Leave empty for clusters running without access management. + * (Security features are available in the Aerospike Database Enterprise + * Edition.) + * + */ + public user?: string; + + /** + * Construct an instance of the Config class. + */ + constructor(config?: ConfigOptions); + + /** + * Set default policies from the given policy values. + * + * @param policies - one or more default policies + * @throws {TypeError} if any of the properties of the policies object is not + * a valid policy type + */ + public setDefaultPolicies(policies?: ConfigPolicies): void; +} + +/** + * All the decimal values with valid fractions (e.g. 123.45) will be + * stored as double data type in Aerospike. To store decimal values with 0 + * fraction as double, the value needs to be wrapped in a `Double` class + * instance + * + * @example + * + * const Aerospike = require('aerospike') + * const Double = Aerospike.Double + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * read : new Aerospike.ReadPolicy({socketTimeout : 0, totalTimeout : 0}), + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * // (1.0) must be wrapped with "Double" in order to be added to another double. + * // (6.283) does not need to be wrapped, but it may be wrapped if convenient. + * ops = [Aerospike.operations.incr('d', 6.283), + * Aerospike.operations.incr('d', new Double(1.0))] + * const key = new Aerospike.Key('test', 'demo', 'myDouble') + * var record = { d: 3.1415 } + * Aerospike.connect(config, (error, client) => { + * if (error) throw error + * client.put(key, record, (error) => { + * if (error) throw error + * client.operate(key, ops, (error) => { + * if (error) throw error + * client.get(key, (error, record) => { + * console.log(record.bins.d) // => 10.4245 + * client.close() + * }) + * }) + * }) + * }) + */ +export class Double { + /** + * Creates a new Double instance. + * + * @param value - The value of the double. + */ + constructor(value: number); + /** + * Value represented as a Double. + */ + public Double: number; + /** + * Returns Double value to user. + */ + public value(): number; +} + +/** + * + * Error raised by the client when execution of a database command fails. This + * may be either due to an error status code returned by the server, or caused + * by an error condition that occured on the client side. + * + * @example Expected output: "Error: 127.0.0.1:3000 Record does not exist in database. May be returned by read, or write with policy Aerospike.policy.exists.UPDATE [2]" + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * read : new Aerospike.ReadPolicy({socketTimeout : 0, totalTimeout : 0}), + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * + * let key = new Aerospike.Key('test', 'key', 'does_not_exist') + * Aerospike.connect() + * .then(client => { + * client.get(key) + * .then(record => console.info(record)) + * .catch(error => console.error(`Error: ${error.message} [${error.code}]`)) + * .then(() => client.close()) + * }) + */ +export class AerospikeError extends Error { + /** + * Numeric status code returned by the server or the client. + * * + * @see {@link statusNamespace} contains the full list of possible status codes. + */ + readonly code: typeof statusNamespace[keyof typeof statusNamespace]; + /** + * Command during which the error occurred. + */ + readonly command: any | null; + /** + * C/C++ function name in which the error occurred. + */ + readonly func?: string | null; + /** + * File name of the C/C++ source file in which the error occurred. + */ + readonly file?: string | null; + /** + * Line number in the C/C++ source file in which the error occurred. + * + * @type {?number} + */ + readonly line?: number | null; + /** + * It is possible that a write transaction completed even though the client + * returned this error. This may be the case when a client error occurs + * (like timeout) after the command was sent to the server. + */ + readonly inDoubt?: boolean; + /** + * Constructs a new instace of AerospikeError. + */ + constructor(message?: string, command?: any); + private static fromASError(asError: AerospikeError | Error | null, command?: AerospikeError): AerospikeError; + private static copyASErrorProperties(target: AerospikeError, source: Error): void; + private static formatMessage(message: string, code: typeof statusNamespace[keyof typeof statusNamespace]): string; + private setStackTrace(stack: string): void; + /** + * Indicates whether the error originated on the database server. + * + * @returns true if the server raised the error, false otherwise. + */ + public isServerError(): boolean; + /** + * The {@link Client} instance associated with this error, if any. + * + * @since v3.7.0 + * + * @example Closing the client connection, when an error occurs: + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * read : new Aerospike.ReadPolicy({socketTimeout : 0, totalTimeout : 0}), + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * + * Aerospike.connect(config).then(async client => { + * await client.put(new Aerospike.Key('demo', 'test', 'foo'), { 'foo': 'bar' }) + * client.close() + * }).catch(error => { + * console.error('Error: %s [%i]', error.message, error.code) + * if (error.client) error.client.close() + * }) + */ + get client(): Client | void; +} + +/** + * Representation of a GeoJSON value. Since GeoJSON values are JSON + * objects they need to be wrapped in the GeoJSON class so that + * the client can distinguish them from other types of objects. + * + * For more information, please refer to the section on + * ⇑Geospatial Data Type + * in the Aerospike technical documentation. + * + * @example + * + * const Aerospike = require('aerospike') + * const GeoJSON = Aerospike.GeoJSON + * const Key = Aerospike.Key + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * read : new Aerospike.ReadPolicy({socketTimeout : 0, totalTimeout : 0}), + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * + * } + * } + * Aerospike.connect(config, (error, client) => { + * if (error) throw error + * let key = new Key('test', 'demo', 'bob') + * let location = new GeoJSON({type: 'Point', coordinates: [103.913, 1.308]}) + * client.put(key, {loc: location}, (error) => { + * if (error) throw error + * client.get(key, (error, record) => { + * if (error) throw error + * console.log(record.bins.loc) // => {"type":"Point","coordinates":[103.913,1.308]} + * client.close() + * }) + * }) + * }) + * + */ +export class GeoJSON { + /** + * Creates a new GeoJSON instance. + * + * @param json - GeoJSON value; the constructor accepts + * either a string representation of the JSON object, or a JS object. + */ + constructor(json: string | object); + /** + * + * Helper function to create a new GeoJSON geometry object + * representing a circle with the given coordinates and radius. + * + * @param lng - Longitude of the center point. + * @param lat - Latitude of the center point. + * @param radius - Radius in meters. + * @returns a GeoJSON representation of the circle. + * + * @see [Aerospike GeoJSON Extension]{@link https://www.aerospike.com/docs/guide/geospatial.html#aerospike-geojson-extension} + * + * @example + * + * const Aerospike = require('aerospike') + * const GeoJSON = Aerospike.GeoJSON + * + * let point = GeoJSON.Circle(103.913, 1.308, 5000) + */ + static Circle: { + new (lng: number, lat: number, radius: number); + }; + /** + * + * Helper function to create a new GeoJSON object representing the + * point with the given coordinates. + * + * @param lng - Longitude + * @param lat - Latitude + * + * @returns a GeoJSON representation of the point + * + * @example + * + * const Aerospike = require('aerospike') + * const GeoJSON = Aerospike.GeoJSON + * + * let point = GeoJSON.Point(103.913, 1.308) + */ + static Point: { + new (lng: number, lat: number); + }; + /** + * + * Helper function to create a new GeoJSON object representing the + * polygon with the given coordinates. + * + * @param coordinates - one or more coordinate pairs (lng, lat) + * describing the polygon. + * + * @returns a GeoJSON representation of the polygon. + * + * @example + * + * const Aerospike = require('aerospike') + * const GeoJSON = Aerospike.GeoJSON + * + * let polygon = GeoJSON.Polygon([102.913, 0.308], [102.913, 2.308], [104.913, 2.308], [104.913, 0.308], [102.913, 0.308]) + */ + static Polygon: { + new (...coordinates: number[][]); + }; + public str?: string; + /** + * Returns the GeoJSON value as a JS object. + * + */ + public toJSON(): GeoJSONType; + /** + * Returns the GeoJSON value as a string + * + */ + public toString(): string; + /** + * Alias for {@link GeoJSON#toJSON}. Returns the GeoJSON value as a JS object. + * + * @return {Object} + */ + public value(): GeoJSONType; + +} +/** + * Return type of {@link UdfJob.info} and {@link IndexJob.info} + */ +export interface SindexInfo { + /** + * Percentage indicating the progress of secondary index populate phase. + */ + load_pct: number; +} + +/** + * Job class for waiting for UDF module registration/deregistration + * to complete across an entire Aerospike cluster. + * + * + * @see {@link Client#udfRegister} + * @see {@link Client#udfRemove} + * + * @example + * + * const Aerospike = require('aerospike') + * + * let path = './udf/my_module.lua' + * + * Aerospike.connect() + * .then(client => { + * client.udfRegister(path) + * .then(job => job.wait()) + * .then(() => { + * console.info('UDF module %s was registered successfully', path) + * client.close() + * }) + * .catch(error => { + * console.error('Error registering UDF module:', error) + * client.close() + * }) + * }) + * .catch(error => console.error('Error connecting to cluster:', error)) + */ +export class UdfJob extends Job { + /** + * Path to UDF. + */ + udfModule: string; + /** + * UDF Command type. Acceptable values are {@link REGISTER} and {@link UNREGISTER} + */ + command: string; + /** + * UDF Register command code. + */ + static REGISTER: string; + /** + * UDF un-register command code. + */ + static UNREGISTER: string; + /** + * Constructs a new UdfJob instance. + */ + constructor(client: Client, namespace: string, indexName: string); +} + +/** + * Potentially long-running index creation job. + * + * @see {@link Client#createIndex} + * + * @example + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * + * let binName = 'food' + * let indexName = 'foodIndex' + * let options = { + * ns: 'test', + * set: 'demo', + * bin: binName, + * index: indexName, + * datatype: Aerospike.indexDataType.STRING + * } + * + * Aerospike.connect(config) + * .then(client => { + * client.put(new Aerospike.Key('test', 'demo', 'mykey1'), {location: "Kale"}) + * .then((result) => { + * client.createIndex(options) + * .then(job => job.wait()) + * .then(() => { + * console.info('secondary index (SI) %s on %s was created successfully', indexName, binName) + * client.indexRemove('test', indexName) + * .then(() => { + * client.close() + * }) + * .catch(error => { + * console.error('Error removing index:', error) + * client.close() + * }) + * }) + * .catch(error => { + * console.error('Error creating index:', error) + * client.close() + * }) + * }) + * .catch(error => { + * console.error('Error writing record:', error) + * client.close() + * }) + * }) + * .catch(error => console.error('Error connecting to cluster:', error)) + */ +export class IndexJob extends Job { + /** + * Namespace for the Job. + */ + public namespace: string; + /** + * Name of the Secondary Index. + */ + public indexName: string; + /** + * Constructs + */ + constructor(client: Client, namespace: string, indexName: string); +} + +/** + * Details the progress of a {@link Job}. + */ +export interface JobInfoResponse { + /** + * progress percentage of the job. + */ + progressPct: number; + /** + * number of scanned records. + */ + recordsRead: number; + /** + * current completion status. See {@link jobStatus} for possible values. + */ + status: jobStatus; +} + +/** + * Potentially long-running background job. + * + * @see {@link Scan#background} + * @see {@link Query#background} + */ +export class Job { + /** + * Client instance managing the {@link Job} + */; + public client: Client; + /** + * Identification number asssociated with the Job. + */ + public jobID: number; + /** + * Database operation associated with the Job. `query` and `scan` are the possible values` + */ + public module: string; + /** + * Constructs a new Job instance + */ + constructor(client: Client, jobID: number, module: string); + /** + * For internal use only. + */ + private static safeRandomJobID(): number; + /** + * Repeatedly execute the given status function until it either indicates that + * the job has completed or returns an error. + * + */ + private static pollUntilDone(statusFunction: () => Promise, pollInterval?: number): Promise; + /** + * For internal use only. + */ + private hasCompleted(info: JobInfoResponse): boolean; + /** + * Fetch job info once to check if the job has completed. + */ + private checkStatus(): Promise; + /** + * + * Check the progress of a background job running on the database. + * + * @param policy - The Info Policy to use for this operation. + * + * @return A Promise that resolves to the job info. + * + * @example + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * scan : new Aerospike.ScanPolicy({socketTimeout : 0, totalTimeout : 0}), + * } + * } + * Aerospike.connect(config, (error, client) => { + * if (error) throw error + * + * var scan = client.scan('test', 'demo') + * scan.background('myUdfModule', 'myUdfFunction', (error, job) => { + * if (error) throw error + * var timer = setInterval(() => { + * job.info((error, info) => { + * if (error) throw error + * console.info('scan status: %d (%d%% complete, %d records scanned)', info.status, info.progressPct, info.recordsRead) + * if (info.status === Aerospike.jobStatus.COMPLETED) { + * console.info('scan completed!') + * clearInterval(timer) + * client.close() + * } + * }) + * }, 1000) + * }) + * }) + */ + public info(policy?: policy.InfoPolicy): Promise; + /** + * @param callback - The function to call with the job info response. + */ + public info(callback: TypedCallback): void; + /** + * @param policy - The Info Policy to use for this operation. + * @param callback - The function to call with the job info response. + */ + public info(policy: policy.InfoPolicy, callback: TypedCallback): void; + /** + * + * Wait until the task has been completed. + * + * @param pollInterval - Interval in milliseconds to use when polling the cluster nodes. Default is 1000 (ms) + * + * @return A Promise that resolves once the job is completed. + */ + public wait(pollInterval?: number): Promise; + /** + * @param callback - The function to call when the task has completed. + */ + public wait(callback: TypedCallback): void; + /** + * @param pollInterval - Interval in milliseconds to use when polling the cluster nodes. Default is 1000 (ms) + * @param callback - The function to call when the task has completed. + */ + public wait(pollInterval: number, callback: TypedCallback): void; + /** + * + * Alias for {@link wait}. See {@link wait} for usage examples and more. + * + * @param pollInterval - Interval in milliseconds to use when polling the cluster nodes. Default is 1000 (ms) + * + * @return A Promise that resolves to the job info. + */ + public waitUntilDone(pollInterval?: number): Promise; + /** + * @param callback - The function to call with the job info response. + */ + public waitUntilDone(callback: TypedCallback): void; + /** + * @param pollInterval - Interval in milliseconds to use when polling the cluster nodes. Default is 1000 (ms) + * @param callback - The function to call with the job info response. + */ + public waitUntilDone(pollInterval: number, callback: TypedCallback): void; +} + +/** + * @class Key + * + * A key uniquely identifies a record in the Aerospike database within a given namespace. + * + * @remarks + * + * ###### Key Digests + * In your application, you must specify the namespace, set and the key itself + * to read and write records. When a key is sent to the database, the key value + * and its set are hashed into a 160-bit digest. When a database operation + * returns a key (e.g. Query or Scan operations) it might contain either the + * set and key value, or just the digest. + * + * @param ns - The Namespace to which the key belongs. + * @param set - The Set to which the key belongs. + * @param key - The unique key value. Keys can be + * strings, integers or an instance of the Buffer class. + * @param digest - The digest value of the key. + * + * @example Creating a new {@link Key} instance + * + * const Aerospike = require('aerospike') + * const Key = Aerospike.Key + * + * var key1 = new Key('test', 'demo', 12345) + * var key2 = new Key('test', 'demo', 'abcde') + * var key3 = new Key('test', 'demo', Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72])) + */ +export class Key implements KeyOptions { + /** + * The Namespace to which the key belongs. + */ + public ns: string; + /** + * he Set to which the key belongs. + */ + public set: string; + /** + * The unique key value. Keys can be + * strings, integers or an instance of the Buffer class. + */ + public key: string | number | Buffer; + /** + * The digest value of the key. + */ + public digest: Buffer | undefined; + /** + * Constructs a new Key instance. + * + * @param ns - The Namespace to which the key belongs. + * @param set - The Set to which the key belongs. + * @param key - The unique key value. Keys can be + * strings, integers or an instance of the Buffer class. + * @param digest - The digest value of the key. + */ + constructor(ns?: string | null, set?: string | null, key?: string | number | Buffer | BigInt | null, digest?: Buffer | null); + private static fromASKey(keyObj: KeyOptions): Key; + /** + * Compare the equality of two keys. + * + * @param other - {#Key} or {@link KeyOptions} Object for comparison. + */ + public equals(other: KeyOptions): boolean; +} + + +export interface RecordMetadata { + /** + * The record's remaining time-to-live in seconds, before the record will + * expire and be removed by the server. + */ + ttl?: number; + /** + * Record modification count. + */ + gen?: number; +} + +/** + * Stream of database records (full or partial) returned by {@link Query} or {@link Scan} operations. + * + * @remarks *Note:* Record stream currently does not support Node.js' + * Stream#pause and Stream#resume methods, i.e. it + * always operates in flowing mode. That means data is read from the Aerospike + * database and provided to your application as fast as possible. If no data + * event handlers are attached, then data will be lost. + * + * #### Aborting a Query/Scan + * + * A query or scan operation can be aborted by calling the {@link + * RecordStream#abort} method at any time. It is no possible to continue a + * record stream, once aborted. + * + * + * @example + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * scan : new Aerospike.ScanPolicy({socketTimeout : 0, totalTimeout : 0}), + * } + * } + * + * Aerospike.connect(config, (error, client) => { + * if (error) throw error + * var recordsSeen = 0 + * var scan = client.scan('test', 'demo') + * var stream = scan.foreach() + * + * stream.on('error', (error) => { + * console.error(error) + * throw error + * }) + * stream.on('data', (record) => { + * recordsSeen++ + * console.log(record) + * if (recordsSeen > 1000) { + * stream.abort() // We've seen enough! + * } + * }) + * stream.on('end', () => { + * console.info(stream.aborted ? 'scan aborted' : 'scan completed') + * client.close() + * }) + * }) + */ +export class RecordStream extends Stream { + /** + * true if the scan has been aborted by the user; false otherwise. + * @see {@link RecordStream#abort} + */ + public aborted: boolean; + /** + * A {@link Client} instance. + */ + public client: Client; + public writable: false; + public readable: true; + public _read(): void; + /** + * Aborts the query/scan operation. + * + * Once aborted, it is not possible to resume the stream. + * + * @since v2.0 + */ + public abort(): void; + /** + * @event 'data' + * @param listener - Function executed when data is received. + * Aerospike record incl. bins, key and meta data. + * Depending on the operation, all, some or no bin values will be returned. + */ + public on(event: 'data', listener: (record: AerospikeRecord) => void): this; + /** + * @event 'error' + * @param listener - Function executed upon receipt of an error. + */ + public on(event: 'error', listener: (error: AerospikeError) => void): this; + /** + * @event 'end' + * @param listener - Function executed upon query end. + * If set to a valid serialized query, calling {@link Query.foreach} will allow the + * next page of records to be queried while preserving the progress + * of the previous query. If set to null, calling {@link Query.foreach} will begin a new query. + */ + public on(event: 'end', listener: (state: number[]) => void): this; +} + +// scan.js +export interface ScanOptions { + /** + * List of bin names to select. See + * {@link Scan#select}. + */ + select?: string[]; + /** + * Whether only meta data should be + * returned. See {@link Scan#nobins}. + */ + nobins?: boolean; + /** + * Whether all cluster nodes + * should be scanned concurrently. See {@link Scan#concurrent}. + */ + concurrent?: boolean; + /** + * The time-to-live (expiration) of the record in seconds. + * See {@link Scan#ttl}. + */ + ttl?: number; + /** + * Enables pagination. + */ + paginate?: boolean; + /** + * If set to a valid serialized scan, calling {@link Scan#foreach} will allow the next page of records to be queried while preserving the progress + * of the previous scan. If set to null, calling {@link Scan#foreach} will begin a new scan. + */ + scanState?: number[]; + +} +/** + * + * @deprecated since server 6.0 + * + * @remarks The scan object created by calling {@link Client#scan} is used + * for executing record scans on the specified namespace and set (optional). + * Scans can return a set of records as a {@link RecordStream} or apply an + * Aerospike UDF (user-defined function) on each of the records on the server. + * + * #### Scan is obsolete in server 6.0 + * Use query methods implemented by {@link Client#query}. + * For more information, please refer to the section on + * ⇑Historical evolution of scan features + * in the Aerospike technical documentation. + * + * #### Selecting Bins + * + * Using {@link Scan#select} it is possible to select a subset of bins which + * should be returned by the query. If no bins are selected, then the whole + * record will be returned. If the {@link Scan#nobins} property is set to + * true the only the record meta data (ttl, generation, etc.) will + * be returned. + * + * #### Executing a Scan + * + * A scan is executed using {@link Scan#foreach}. The method returns a {@link + * RecordStream} which emits a data event for each record returned + * by the scan. The scan can be aborted at any time by calling + * {@link RecordStream#abort}. + * + * #### Executing Record UDFs using Background Scans + * + * Record UDFs perform operations on a single record such as updating records + * based on a set of parameters. Using {@link Scan#background} you can run a + * Record UDF on the result set of a scan. Scans using Records UDFs are run + * in the background on the server and do not return the records to the client. + * + * For additional information please refer to the section on + * ⇑Record UDFs + * in the Aerospike technical documentation. + * + * #### Scan pagination + * + * Scan pagination allows for queries return records in pages rather than all at once. + * To enable scan pagination, the scan property {@link Scan#paginate} must be true + * and the previously stated scan policy {@link ScanPolicy#maxRecords} must be set to a + * nonzero positive integer in order to specify a maximum page size. + * + * When a page is complete, {@link RecordStream} event {@link RecordStream#on 'error'} will + * emit a {@link Scan#scanState} object containing a serialized version of the scan. + * This serialized scan, if be assigned back to {@link Scan#scanState}, allows the scan + * to retrieve the next page of records in the scan upon calling {@link Scan#foreach}. + * If {@link RecordStream#on 'error'} emits an undefined object, either {@link Scan#paginate} + * is not true, or the scan has successfully returned all the specified records. + * + * For additional information and examples, please refer to the {@link Scan#paginate} section + * below. + * + * @param {Client} client - A client instance. + * @param {string} ns - The namescape. + * @param {string} set - The name of a set. + * @param {object} [options] - Scan parameters. + * @param {Array} [options.select] - List of bin names to select. See + * {@link Scan#select}. + * @param {boolean} [options.nobins=false] - Whether only meta data should be + * returned. See {@link Scan#nobins}. + * @param {boolean} [options.concurrent=false] - Whether all cluster nodes + * should be scanned concurrently. See {@link Scan#concurrent}. + * @param {boolean} [options.ttl=0] - The time-to-live (expiration) of the record in seconds. + * See {@link Scan#ttl}. + * + * + * @see {@link Client#scan} to create new instances of this class. + * + * @since v2.0 + * + * @example + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * scan : new Aerospike.ScanPolicy({socketTimeout : 0, totalTimeout : 0}), + * } + * } + * + * Aerospike.connect(config, (error, client) => { + * if (error) throw error + * + * const scan = client.scan('test', 'demo') + * let recordsSeen = 0 + * const stream = scan.foreach() + * stream.on('error', (error) => { throw error }) + * stream.on('end', () => client.close()) + * stream.on('data', (record) => { + * console.log(record) + * recordsSeen++ + * if (recordsSeen > 100) stream.abort() // We've seen enough! + * }) + * }) + */ +export class Scan { + /** + * Client instance. + */ + public client: Client; + /** + * Namespace to scan. + */ + public ns: string; + /** + * Name of the set to scan. + */ + public set: string; + /** + * List of bin names to be selected by the scan. If a scan specifies bins to + * be selected, then only those bins will be returned. If no bins are + * selected, then all bins will be returned (unless {@link Scan#nobins} is + * set to true). + * + * @see Use {@link Scan#select} to specify the bins to select. + */ + public selected?: string[]; + /** + * If set to true, the scan will return only meta data, and exclude bins. + * + */ + public nobins?: boolean; + /** + * If set to true, all cluster nodes will be scanned in parallel. + * + */ + public concurrent?: boolean; + /** + * + */ + public udf?: UDF; + /** + * + */ + public ops?: operations.Operation[]; + /** + * + */ + public ttl?: number; + /** + * If set to true, paginated queries are enabled. In order to receive paginated + * results, the {@link ScanPolicy#maxRecords} property must assign a nonzero integer value. + * + * + * @example Asynchronous pagination over a set of thirty records with {@link Scan#foreach}. + * + * const Aerospike = require('./lib/aerospike'); + * // Define host configuration + * let config = { + * hosts: '34.213.88.142:3000', + * policies: { + * batchWrite : new Aerospike.BatchWritePolicy({socketTimeout : 0, totalTimeout : 0}), + * } + * }; + * + * var batchRecords = [] + * for(let i = 0; i < 30; i++){ + * batchRecords.push({ + * type: Aerospike.batchType.BATCH_WRITE, + * key: new Aerospike.Key('test', 'demo', 'key' + i), + * ops:[Aerospike.operations.write('exampleBin', i)] + * }) + * } + * + * ;(async function() { + * try { + * client = await Aerospike.connect(config) + * await client.truncate('test', 'demo', 0) + * await client.batchWrite(batchRecords, {socketTimeout : 0, totalTimeout : 0}) + * + * const scan = client.scan('test', 'demo', {paginate: true}) + * do { + * const stream = scan.foreach({maxRecords: 11}) + * stream.on('error', (error) => { throw error }) + * stream.on('data', (record) => { + * console.log(record.bins) + * }) + * await new Promise(resolve => { + * stream.on('end', (scanState) => { + * scan.nextPage(scanState) + * console.log(scan.scanState) + * resolve() + * }) + * }) + * } while (scan.hasNextPage()) + * + * } catch (error) { + * console.error('An error occurred at some point.', error) + * process.exit(1) + * } finally { + * if (client) client.close() + * } + * })() + * + * @example Asynchronous pagination over a set of thirty records with {@link Scan#foreach}. + * + * const Aerospike = require('./lib/aerospike'); + * // Define host configuration + * let config = { + * hosts: '34.213.88.142:3000', + * policies: { + * batchWrite : new Aerospike.BatchWritePolicy({socketTimeout : 0, totalTimeout : 0}), + * } + * }; + * + * var batchRecords = [] + * for(let i = 0; i < 30; i++){ + * batchRecords.push({ + * type: Aerospike.batchType.BATCH_WRITE, + * key: new Aerospike.Key('test', 'demo', 'key' + i), + * ops:[Aerospike.operations.write('exampleBin', i)] + * }) + * } + * + * ;(async function() { + * try { + * client = await Aerospike.connect(config) + * await client.truncate('test', 'demo', 0) + * await client.batchWrite(batchRecords, {socketTimeout : 0, totalTimeout : 0}) + * + * const scan = client.scan('test', 'demo', {paginate: true}) + * let allResults = [] + * let results = await scan.results({maxRecords: 11}) + * allResults = [...allResults, ...results] + * + * + * results = await scan.results({maxRecords: 11}) + * allResults = [...allResults, ...results] + * + * results = await scan.results({maxRecords: 11}) + * allResults = [...allResults, ...results] + * + * console.log("Records returned in total: " + allResults.length) // Should be 30 records + * + * } catch (error) { + * console.error('An error occurred at some point.', error) + * process.exit(1) + * } finally { + * if (client) client.close() + * } + * })() + * + */ + public paginate?: boolean; + /** + * If set to true, the scan will return only those belongs to partitions. + * + */ + private pfEnabled?: boolean; + /** + * If set to a valid serialized scan, calling {@link Scan#foreach} will allow the next page of records to be queried while preserving the progress + * of the previous scan. If set to null, calling {@link Scan#foreach} will begin a new scan. + */ + public scanState?: number[]; + /** + * Constructs a new Scan instance. + */ + constructor(client: Client, ns: string, set: string, options?: ScanOptions); + /** + * Checks compiliation status of a paginated scan. + * + * @remarks If false is returned, there are no more records left in the scan, and the scan is complete. + * If true is returned, calling {@link Scan#foreach} will continue from the state specified by {@link Scan#scanState}. + * + * @returns `True` if a next page exists, `false` otherwise + */ + public hasNextPage(): boolean; + /** + * Sets {@link Scan#scanState} to the value specified by the state argument. + * + * @remarks setter function for the {@link Scan#scanState} member variable. + * + * @param state - serialized scan emitted from the {@link RecordStream#on 'error'} event. + */ + public nextPage(state: number[]): void; + /** + * + * Specify the begin and count of the partitions + * to be scanned by the scan foreach op. + * + * @remarks If a scan specifies partitions begin and count, + * then only those partitons will be scanned and returned. + * If no partitions are specified, + * then all partitions will be scanned and returned. + * + * @param begin - Start partition number to scan. + * @param count - Number of partitions from the start to scan. + * @param digest - Start from this digest if it is specified. + */ + public partitions(begin: number, count: number, digest?: Buffer) + /** + * + * Specify the names of bins to be selected by the scan. + * + * @remarks If a scan specifies bins to be selected, then only those bins + * will be returned. If no bins are selected, then all bins will be returned. + * (Unless {@link Scan#nobins} is set to true.) + * + * @param bins - List of bin names to return. + */ + public select(bins: string[]): void; + /** + * Specify the names of bins to be selected by the scan. + * + * @remarks If a scan specifies bins to be selected, then only those bins + * will be returned. If no bins are selected, then all bins will be returned. + * (Unless {@link Scan#nobins} is set to true.) + * + * @param bins - Spread of bin names to return. + */ + public select(...bins: string[]): void; + /** + * Perform a read-write background scan and apply a Lua user-defined + * function (UDF) to each record. + * + * @remarks When a background scan is initiated, the client will not wait + * for results from the database. Instead a {@link Job} instance will be + * returned, which can be used to query the scan status on the database. + * + * @param udfModule - UDF module name. + * @param udfFunction - UDF function name. + * @param udfArgs - Arguments for the function. + * @param policy - The Scan Policy to use for this operation. + * @param scanID - Job ID to use for the scan; will be assigned + * randomly if zero or undefined. + * + * @returns A Promise that resolves to a Job instance. + */ + public background(udfModule: string, udfFunction: string, udfArgs?: AerospikeBinValue[], policy?: policy.ScanPolicy, scanID?: number): Promise; + /** + * + * @param udfModule - UDF module name. + * @param udfFunction - UDF function name. + * @param callback - The function to call when the operation completes. + */ + public background(udfModule: string, udfFunction: string, callback: TypedCallback): void; + /** + * + * @param udfModule - UDF module name. + * @param udfFunction - UDF function name. + * @param udfArgs - Arguments for the function. + * @param callback - The function to call when the operation completes. + */ + public background(udfModule: string, udfFunction: string, udfArgs: AerospikeBinValue[], callback: TypedCallback): void; + /** + * + * @param udfModule - UDF module name. + * @param udfFunction - UDF function name. + * @param udfArgs - Arguments for the function. + * @param policy - The Scan Policy to use for this operation. + * @param callback - The function to call when the operation completes. + */ + public background(udfModule: string, udfFunction: string, udfArgs: AerospikeBinValue[], policy: policy.ScanPolicy, callback: TypedCallback): void; + /** + * + * @param udfModule - UDF module name. + * @param udfFunction - UDF function name. + * @param udfArgs - Arguments for the function. + * @param policy - The Scan Policy to use for this operation. + * @param scanID - Job ID to use for the scan; will be assigned + * randomly if zero or undefined. + * @param callback - The function to call when the operation completes. + */ + public background(udfModule: string, udfFunction: string, udfArgs: AerospikeBinValue[], policy: policy.ScanPolicy, scanID: number, callback: TypedCallback): void; + /** + * Applies write operations to all matching records. + * + * @remarks Performs a background scan and applies one or more write + * operations to all records. Neither the records nor the results of the + * operations are returned to the client. Instead a {@link Job} instance will + * be returned, which can be used to query the scan status. + * + * This method requires server >= 3.7.0. + * + * @param operations - List of write + * operations to perform on the matching records. + * @param policy - The Scan Policy to use for this operation. + * @param scanID - Job ID to use for the scan; will be assigned + * randomly if zero or undefined. + * + * @returns A Promise that resolves to a Job instance. + * + * @since v3.14.0 + * + * @example Increment count bin on all records in set using a background scan + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * scan : new Aerospike.ScanPolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * + * Aerospike.connect(config).then(async (client) => { + * const scan = client.scan('namespace', 'set') + * const ops = [Aerospike.operations.incr('count', 1)] + * const job = await scan.operate(ops) + * await job.waitUntilDone() + * client.close() + * }) + */ + public operate(operations: operations.Operation[], policy?: policy.ScanPolicy, scanID?: number): Promise; + /** + * @param operations - List of write + * operations to perform on the matching records. + * @param policy - The Scan Policy to use for this operation. + * @param scanID - Job ID to use for the scan; will be assigned + * randomly if zero or undefined. + * @param callback - The function to call when the operation completes. + */ + public operate(operations: operations.Operation[], policy: policy.ScanPolicy, scanID: number, callback: TypedCallback): void; + /** + * + * Performs a read-only scan on each node in the cluster. As the scan + * iterates through each partition, it returns the current version of each + * record to the client. + * + * @param policy - The Scan Policy to use for this operation. + * @param dataCb - The function to call when the + * operation completes with the results of the operation; if no callback + * function is provided, the method returns a Promise instead. + * @param errorCb - Callback function called when there is an error. + * @param endCb - Callback function called when an operation has completed. + */ + public foreach(policy?: policy.ScanPolicy | null, dataCb?: (data: AerospikeRecord) => void, errorCb?: (error: Error) => void, endCb?: () => void): RecordStream; +} + +/** + * + * The info protocol provides access to configuration and + * statistics for the Aerospike server. This module provides the {@link + * info.parse | parse} utility function for parsing the info + * data returned by the Aerospike server. + * + * @see {@link Client#info} + * @see ⇑Info Command Reference + * + * @example + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * } + * Aerospike.connect(config, (error, client) => { + * if (error) throw error + * + * var cmd = 'build\nfeatures' + * client.infoAny(cmd, (err, infoStr) => { + * if (err) { + * console.error('error retrieving info for cmd "%s": %s [%d]', + * cmd, err.message, err.code) + * } else { + * var info = Aerospike.info.parse(infoStr) + * console.log(info) // => { build: '3.12.0', + * // features: [ + * // 'cdt-list', + * // 'pipelining', + * // 'geo', + * // ..., + * // 'udf' ] } + * } + * client.close() + * }) + * }) + */ +export namespace info { + /** + * + * Parses the info string returned from a cluster node into key-value pairs. + * + * @param info - The info string returned by the cluster node. + * + * @returns key-value pairs in an {@link Record} + * + * @since v2.6 + */ + export function parse(info: string): Record; +} + +/** + * The {@link features} module contains a list of the + * feature strings used by the Aerospike server. + * + */ +export namespace features { + /** + * CDT_MAP feature string. + */ + export const CDT_MAP: 'cdt-map'; + /** + * CDT_LIST feature string. + */ + export const CDT_LIST: 'cdt-list'; + /** + * BLOB_BITS feature string. + */ + export const BLOB_BITS: 'blob-bits'; +} + +export const Record: typeof AerospikeRecord; +export function print(err: Error, result: any): void; +/** + * + * Release event loop resources held by the module, which could keep + * the Node.js event loop from shutting down properly. + * + * @remarks This method releases some event loop resources held by the + * Aerospike module and the Aerospike C client library, such as libuv handles + * and timers. If not released, these handles will prevent the Node.js event + * loop from shutting down, i.e. it will keep your application from + * terminating. + * + * The Aerospike module keeps an internal counter of active {@link Client} + * instances, i.e. instances which have not been close()'d yet. If + * a client is closed and the counter reaches zero, this method will be called + * automatically, unless {@link Client#close} is called with + * releaseEventLoop set to false. (The default is + * true.) + * + * If an application needs to create multiple client instance, i.e. to connect + * to multiple, different clusters, the event loop resources will be managed + * automatically, as long as at least once client instance is active at any + * given time, until the application terminates. + * + * If, however, there could be one or more intermittent time periods, during + * which no client is active (i.e. the internal client counter reaches zero), + * then the clients need to be closed with releaseEventLoop set + * to false and the event loop needs to be released explicitly by + * calling releaseEventLoop(). + */ +export function releaseEventLoop(): void; +/** + * Creates a new {@link Client} instance. + * + * @param {Config} [config] - The configuration for the client. + */ +export function client(config?: ConfigOptions): Client; +/** + * Creates a new {@link Client} instance and connects to the Aerospike cluster. + * + * @param config - The configuration for the client. + * + * @return A Promise resolving to the connected client. + * + * @example Connection can be established using the {@link module:aerospike|aerospike} module. + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * } + * + * Aerospike.connect(config, (err, client) => { + * console.log("Connected. Closing now.") + * client.close() + * }) + * + * @example Connection can also be established using the {@link Client} module. + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * } + * + * const client = Aerospike.client(config) + * client.connect((err) => { + * console.log("Connected. Closing now.") + * client.close() + * }) + * + * @example A connection established using callback function. + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * } + * Aerospike.connect(config, (error, client) => { + * if (error) { + * console.error('Failed to connect to cluster: %s', error.message) + * process.exit() + * } else { + * // client is ready to accept commands + * console.log("Connected. Now closing connection.") + * client.close() + * } + * }) + * + * @example A connection established by returning a Promise. + * + * const Aerospike = require('aerospike') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * } + * Aerospike.connect(config) + * .then(client => { + * // client is ready to accept commands + * console.log("Connected. Now Closing Connection.") + * client.close() + * }) + * .catch(error => { + * console.error('Failed to connect to cluster: %s', error.message) + * }) + */ +export function connect(config?: ConfigOptions): Promise; +/** + * @param callback - The function to call, once the client is connected to the cluster successfully. + */ +export function connect(callback: TypedCallback): Client; +/** + * @param config - The configuration for the client. + * @param callback - The function to call, once the client is connected to the cluster successfully. + */ +export function connect(config: ConfigOptions, callback: TypedCallback): Client; +/** + * Sets the global, default log level and destination. The default log settings + * are used for all new client instances, unless different log settings are + * supplied in the client's configuration. + * + * The global log settings are also used to control the logging of the Aerospike + * C client SDK which is included in the aerospike native add-on. + * The C client SDK log settings are global and cannot be set separately per + * {@link Client} instance. + * + * @param {Object} logInfo - {@link Log} object cotaining a {@link Log.level} and a {@link Log.file}. + * + * @since v3.1.0 + * + * @example + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * } + * + * Aerospike.setDefaultLogging({ + * level: Aerospike.log.TRACE + * }) + * + * Aerospike.connect(config, (error, client) => { + * if (error) { + * console.error('Failed to connect to cluster: %s', error.message) + * process.exit() + * } else { + * // client is ready to accept commands + * console.log("Connected. Now closing connection.") + * client.close() + * } + * }) + */ +export function setDefaultLogging(logInfo: Log): void; +/** + * Configures the global command queue. (Disabled by default.) + * + * @remarks Note that there is only one instance of the command queue that + * is shared by all client instances, even client instances connected to + * different Aerospike clusters. The setupGlobalCommandQueue + * method must be called before any client instances are connected. + * + * @param policy - Set of policy values governing the + * behaviour of the global command queue. + * + * @see {@link CommandQueuePolicy} for more information about the use of the + * command queue. + */ +export function setupGlobalCommandQueue(policy: policy.CommandQueuePolicy): void; + +/* INTERFACES */ + +/** + * Option specification for {@ link AdminPolicy} class values. + */ +export interface AdminPolicyOptions extends BasePolicyOptions { + /** + * Maximum time in milliseconds to wait for the operation to complete. + * + * @type number + */ + timeout?: number; +} + +/** + * Option specification for {@ link AdminPolicy} class values. + */ +export interface ApplyPolicyOptions extends BasePolicyOptions { + /** + * Specifies the number of replicas required to be committed successfully + * when writing before returning transaction succeeded. + * + * @see {@link policy.commitLevel} for supported policy values. + */ + commitLevel?: policy.commitLevel; + /** + * Specifies whether a {@link + * http://www.aerospike.com/docs/guide/durable_deletes.html|tombstone} + * should be written in place of a record that gets deleted as a result of + * this operation. + * + * @default false (do not tombstone deleted records) + */ + durableDelete?: boolean; + /** + * Specifies the behavior for the key. + * + * @see {@link policy.key} for supported policy values. + */ + key?: policy.key; + /** + * The time-to-live (expiration) of the record in seconds. + * + */ + ttl?: number; +} +/** + * Option specification for {@ link BasePolicy} class values. + */ +export interface BasePolicyOptions { + /** + * Use zlib compression on write or batch read commands when the command + * buffer size is greater than 128 bytes. In addition, tell the server to + * compress it's response on read commands. The server response compression + * threshold is also 128 bytes. + * + * This option will increase cpu and memory usage (for extra compressed + * buffers), but decrease the size of data sent over the network. + * + * Requires Enterprise Server version >= 4.8. + * + * @default: false + * @since v3.14.0 + */ + compress?: boolean; + /** + * Optional expression filter. If filter exp exists and evaluates to false, the + * transaction is ignored. This can be used to eliminate a client/server roundtrip + * in some cases. + * + * expression filters can only be applied to the following commands: + * * {@link Client.apply} + * * {@link Client.batchExists} + * * {@link Client.batchGet} + * * {@link Client.batchRead} + * * {@link Client.batchSelect} + * * {@link Client.exists} + * * {@link Client.get} + * * {@link Client.operate} + * * {@link Client.put} + * * {@link Client.remove} + * * {@link Client.select} + */ + filterExpression?: AerospikeExp; + /** + * Maximum number of retries before aborting the current transaction. + * The initial attempt is not counted as a retry. + * + * If maxRetries is exceeded, the transaction will return + * error {@link statusNamespace.ERR_TIMEOUT|ERR_TIMEOUT}. + * + * WARNING: Database writes that are not idempotent (such as "add") + * should not be retried because the write operation may be performed + * multiple times if the client timed out previous transaction attempts. + * It is important to use a distinct write policy for non-idempotent + * writes which sets maxRetries to zero. + * + * @default: 2 (initial attempt + 2 retries = 3 attempts) + */ + maxRetries?: number; + /** + * Socket idle timeout in milliseconds when processing a database command. + * + * If socketTimeout is not zero and the socket has been idle + * for at least socketTimeout, both maxRetries + * and totalTimeout are checked. If maxRetries + * and totalTimeout are not exceeded, the transaction is + * retried. + * + * If both socketTimeout and totalTimeout are + * non-zero and socketTimeout > totalTimeout, + * then socketTimeout will be set to + * totalTimeout. If socketTimeout is zero, there + * will be no socket idle limit. + * + * @default 0 (no socket idle time limit). + */ + socketTimeout?: number; + /** + * Total transaction timeout in milliseconds. + * + * The totalTimeout is tracked on the client and sent to the + * server along with the transaction in the wire protocol. The client will + * most likely timeout first, but the server also has the capability to + * timeout the transaction. + * + * If totalTimeout is not zero and totalTimeout + * is reached before the transaction completes, the transaction will return + * error {@link statusNamespace.ERR_TIMEOUT|ERR_TIMEOUT}. + * If totalTimeout is zero, there will be no total time limit. + * + * @default 1000 + */ + totalTimeout?: number; + +} + +/** + * Option specification for {@ link AdminPolicy} class values. + */ +export interface BatchApplyPolicyOptions { + /** + * Specifies the number of replicas required to be committed successfully + * when writing before returning transaction succeeded. + * + * @see {@link policy.commitLevel} for supported policy values. + */ + commitLevel?: policy.commitLevel; + /** + * Specifies whether a {@link + * http://www.aerospike.com/docs/guide/durable_deletes.html|tombstone} + * should be written in place of a record that gets deleted as a result of + * this operation. + * + * @default false (do not tombstone deleted records) + */ + durableDelete?: boolean; + /** + * Optional expression filter. If filter exp exists and evaluates to false, the + * transaction is ignored. This can be used to eliminate a client/server roundtrip + * in some cases. + */ + filterExpression?: AerospikeExp; + /** + * Specifies the behavior for the key. + * + * @see {@link policy.key} for supported policy values. + */ + key?: policy.key; + + /** + * The time-to-live (expiration) of the record in seconds. + */ + ttl?: number; +} + + +/** + * Option specification for {@ link AdminPolicy} class values. + */ +export interface BatchPolicyOptions extends BasePolicyOptions { + /** + * Read policy for AP (availability) namespaces. + * + * @default Aerospike.policy.readModeAP.ONE + * @see {@link policy.readModeAP} for supported policy values. + */ + allowInline?: boolean; + /** + * Allow batch to be processed immediately in the server's receiving thread for SSD + * namespaces. If false, the batch will always be processed in separate service threads. + * Server versions < 6.0 ignore this field. + * + * Inline processing can introduce the possibility of unfairness because the server + * can process the entire batch before moving onto the next command. + * + * @default false + */ + allowInlineSSD?: boolean; + /** + * Determine if batch commands to each server are run in parallel threads. + * + * Values: + * false: Issue batch commands sequentially. This mode has a performance advantage for small + * to medium sized batch sizes because commands can be issued in the main transaction thread. + * This is the default. + * true: Issue batch commands in parallel threads. This mode has a performance + * advantage for large batch sizes because each node can process the command immediately. + * The downside is extra threads will need to be created (or taken from + * a thread pool). + * + * @default false + */ + concurrent?: boolean; + /** + * Should CDT data types (Lists / Maps) be deserialized to JS data types + * (Arrays / Objects) or returned as raw bytes (Buffer). + * + * @default true + * @since v3.7.0 + */ + deserialize?: boolean; + /** + * Read policy for AP (availability) namespaces. + * + * @default {@link policy.readModeAP.ONE} + * @see {@link policy.readModeAP} for supported policy values. + */ + readModeAP?: policy.readModeAP; + /** + * Read policy for SC (strong consistency) namespaces. + * + * @default {@link policy.readModeSC.SESSION} + * @see {@link policy.readModeSC} for supported policy values. + */ + readModeSC?: policy.readModeSC; + /** + * Determine how record TTL (time to live) is affected on reads. When enabled, the server can + * efficiently operate as a read-based LRU cache where the least recently used records are expired. + * The value is expressed as a percentage of the TTL sent on the most recent write such that a read + * within this interval of the record’s end of life will generate a touch. + * + * For example, if the most recent write had a TTL of 10 hours and read_touch_ttl_percent is set to + * 80, the next read within 8 hours of the record's end of life (equivalent to 2 hours after the most + * recent write) will result in a touch, resetting the TTL to another 10 hours. + * + * @default 0 + */ + readTouchTtlPercent?: number; + /** + * Algorithm used to determine target node. + * + * @default {@link policy.replica.MASTER} + * @see {@link policy.replica} for supported policy values. + */ + replica?: policy.replica; + /** + * Should all batch keys be attempted regardless of errors. This field is used on both + * the client and server. The client handles node specific errors and the server handles + * key specific errors. + * + * If true, every batch key is attempted regardless of previous key specific errors. + * Node specific errors such as timeouts stop keys to that node, but keys directed at + * other nodes will continue to be processed. + * + * If false, the server will stop the batch to its node on most key specific errors. + * The exceptions are AEROSPIKE_ERR_RECORD_NOT_FOUND and AEROSPIKE_FILTERED_OUT + * which never stop the batch. The client will stop the entire batch on node specific + * errors for sync commands that are run in sequence (concurrent == false). The client + * will not stop the entire batch for async commands or sync commands run in parallel. + * + * Server versions < 6.0 do not support this field and treat this value as false + * for key specific errors. + * + * @default true + */ + respondAllKeys?: boolean; + /** + * Send set name field to server for every key in the batch. This is only + * necessary when authentication is enabled and security roles are defined + * on a per-set basis. + * + * @default false + */ + sendSetName?: boolean; +} +/** + * Option specification for {@ link AdminPolicy} class values. + */ +export interface BatchReadPolicyOptions { + /** + * Optional expression filter. If filter exp exists and evaluates to false, the + * transaction is ignored. This can be used to eliminate a client/server roundtrip + * in some cases. + */ + filterExpression?: AerospikeExp; + /** + * Read policy for AP (availability) namespaces. + * + * @default {@link policy.readModeAP.ONE} + * @see {@link policy.readModeAP} for supported policy values. + */ + readModeAP?: policy.readModeAP; + /** + * Read policy for SC (strong consistency) namespaces. + * + * @default {@link policy.readModeSC.SESSION} + * @see {@link policy.readModeSC} for supported policy values. + */ + readModeSC?: policy.readModeSC; + /** + * Determine how record TTL (time to live) is affected on reads. When enabled, the server can + * efficiently operate as a read-based LRU cache where the least recently used records are expired. + * The value is expressed as a percentage of the TTL sent on the most recent write such that a read + * within this interval of the record’s end of life will generate a touch. + * + * For example, if the most recent write had a TTL of 10 hours and read_touch_ttl_percent is set to + * 80, the next read within 8 hours of the record's end of life (equivalent to 2 hours after the most + * recent write) will result in a touch, resetting the TTL to another 10 hours. + * + * @default 0 + */ + readTouchTtlPercent?: number; +} + +/** + * Interface used for creating BatchRead record objects. + */ +export interface BatchReadRecord { + /** + * List of bins to retrieve. + */ + bins?: string[]; + /** + * A {@link key} uniquely identifies a record in the Aerospike database within a given namespace. + */ + key: Key; + /** + * List of {@link operations|operations} + */ + ops?: operations.Operation[] + /** + * The Batch Policy to use for this operation. + */ + policy?: BatchPolicyOptions; + /** + * Whether to retrieve all bins or + * just the meta data of the record. If true, ignore bins and read + * all bins; if false and bins is specified, read specified bins; + * if false and bins is not specified, read only record meta data + * (generation, expiration, etc.) + */ + readAllBins?: boolean; +} + +/** + * Option specification for {@ link AdminPolicy} class values. + */ +export interface BatchRemovePolicyOptions { + /** + * Specifies the number of replicas required to be committed successfully + * when writing before returning transaction succeeded. + * + * @see {@link policy.commitLevel} for supported policy values. + */ + commitLevel?: policy.commitLevel; + /** + * Specifies whether a {@link + * http://www.aerospike.com/docs/guide/durable_deletes.html|tombstone} + * should be written in place of a record that gets deleted as a result of + * this operation. + * + * @default false (do not tombstone deleted records) + */ + durableDelete?: boolean; + /** + * Optional expression filter. If filter exp exists and evaluates to false, the + * transaction is ignored. This can be used to eliminate a client/server roundtrip + * in some cases. + * + */ + filterExpression?: AerospikeExp; + /** + * Specifies the behavior for the generation value. + * + * @see {@link policy.gen} for supported policy values. + */ + gen?: policy.gen; + /** + * The generation of the record. + */ + generation?: number; + /** + * Specifies the behavior for the key. + * + * @see {@link policy.key} for supported policy values. + */ + key?: policy.key; +} + +/** + * Option specification for {@ link AdminPolicy} class values. + */ +export interface BatchWritePolicyOptions { + /** + * Specifies the number of replicas required to be committed successfully + * when writing before returning transaction succeeded. + * + * @see {@link policy.commitLevel} for supported policy values. + */ + commitLevel?: policy.commitLevel; + /** + * Specifies whether a {@link + * http://www.aerospike.com/docs/guide/durable_deletes.html|tombstone} + * should be written in place of a record that gets deleted as a result of + * this operation. + * + * @default false (do not tombstone deleted records) + */ + durableDelete?: boolean; + /** + * Specifies the behavior for the existence of the record. + * + * @see {@link policy.exists} for supported policy values. + */ + exists?: policy.exists; + /** + * Optional expression filter. If filter exp exists and evaluates to false, the + * transaction is ignored. This can be used to eliminate a client/server roundtrip + * in some cases. + */ + filterExpression?: AerospikeExp; + /** + * Specifies the behavior for the generation value. + * + * @see {@link policy.gen} for supported policy values. + */ + gen?: policy.gen; + /** + * Specifies the behavior for the key. + * + * @see {@link policy.key} for supported policy values. + */ + key?: policy.key; + /** + * The time-to-live (expiration) of the record in seconds. + */ + ttl?: number; +} + +/** + * Interface used for creating BatchWrite record objects. + */ +export interface BatchWriteRecord { + /** + * Type of Batch operation + */ + type: batchType; + /** + * A {@link key} uniquely identifies a record in the Aerospike database within a given namespace. + */ + key: Key; + /** + * List of bins to retrieve. + */ + bins?: string[]; + /** + * Whether to retrieve all bins or + * just the meta data of the record. If true, ignore bins and read + * all bins; if false and bins is specified, read specified bins; + * if false and bins is not specified, read only record meta data + * (generation, expiration, etc.) + */ + readAllBins?: boolean; + /** + * List of {@link operations|operations} + */ + ops?: operations.Operation[] + /** + * The Batch Policy to use for this operation. + */ + policy?: BatchWritePolicyOptions; +} + + +/** + * Interface used for creating BatchSelect record objects. + */ +export interface BatchSelectRecord { + status: typeof statusNamespace[keyof typeof statusNamespace]; + key: KeyOptions; + meta?: RecordMetadata; + bins?: AerospikeBins; +} + + +/** + * Option specification for {@ link AdminPolicy} class values. + */ +export interface BitwisePolicyOptions extends BatchPolicyOptions { + /** + * Specifies the behavior when writing byte values. + * + * @default bitwise.writeFlags.DEFAULT + * @see {@link bitwise.writeFlags} for supported policy values. + */ + writeFlags: bitwise.writeFlags; +} + +/** + * Option specification for {@ link AdminPolicy} class values. + */ +export interface CommandQueuePolicyOptions extends BasePolicyOptions { + /** + * Maximum number of commands that can be processed at any point in time. + * Each executing command requires a socket connection. Consuming too many + * sockets can negatively affect application reliability and performance. + * If you do not limit command count in your application, this setting + * should be used to enforce a limit internally in the client. + * + * If this limit is reached, the next command will be placed on the + * client's command queue for later execution. If this limit is zero, all + * commands will be executed immediately and the command queue will not be + * used. (Note: {@link Config#maxConnsPerNode} may still limit number of + * connections per cluster node.) + * + * If defined, a reasonable value is 40. The optimal value will depend on + * the CPU speed and network bandwidth. + * + * @default 0 (execute all commands immediately) + */ + maxCommandsInProcess?: number; + /** + * Maximum number of commands that can be stored in the global command + * queue for later execution. Queued commands consume memory, but they do + * not consume sockets. This limit should be defined when it's possible + * that the application executes so many commands that memory could be + * exhausted. + * + * If this limit is reached, the next command will be rejected with error + * code ERR_ASYNC_QUEUE_FULL. If this limit is zero, all + * commands will be accepted into the delay queue. + * + * The optimal value will depend on the application's magnitude of command + * bursts and the amount of memory available to store commands. + * + * @default 0 (no command queue limit) + */ + maxCommandsInQueue?: number; + /** + * Initial capacity of the command queue. The command queue can resize + * beyond this initial capacity. + * + * @default 256 (if command queue is used) + */ + queueInitialCapacity?: number; +} + +export interface ConfigOptions { + /** + * Authentication mode used when user/password is defined. + * + * One of the auth modes defined in {@link auth}. + */ + authMode?: auth; + /** + * Initial host connection timeout in milliseconds. + * + * The client observes this timeout when opening a connection to + * the cluster for the first time. + * + * @default 1000 + */ + connTimeoutMs?: number; + /** + * Expected Cluster Name. + * + * If not null, server nodes must return this + * cluster name in order to join the client's view of the cluster. Should + * only be set when connecting to servers that support the "cluster-name" + * info command. + * + * @since v2.4 + */ + clusterName?: string; + /** + * + * The number of cluster tend iterations that defines the window for {@link maxErrorRate} to be surpassed. One tend iteration is defined + * as {@link tenderInterval} plus the time to tend all nodes. At the end of the window, the error count is reset to zero and backoff state is removed on all nodes. + * + * @type {number} + * + * @default 1 + */ + errorRateWindow?: number; + /** + * List of hosts with which the client should attempt to connect. + * + * If not specified, the client attempts to read the host list + * from the AEROSPIKE_HOSTS environment variable or else falls + * back to use a default value of "localhost". + * + * @example Setting hosts using a string: + * + * const Aerospike = require('aerospike') + * + * const hosts = '192.168.0.1:3000,192.168.0.2:3000' + * const client = await Aerospike.connect({ hosts }) + * + * @example Setting hosts using an array of hostname/port tuples: + * + * const Aerospike = require('aerospike') + * + * const hosts = [ + * { addr: '192.168.0.1', port: 3000 }, + * { addr: '192.168.0.2', port: 3000 } + * ] + * const client = await Aerospike.connect({ hosts }) + * + * @example Setting hosts with TLS name using a string: + * + * const Aerospike = require('aerospike') + * + * const hosts = '192.168.0.1:example.com:3000,192.168.0.2:example.com:3000' + * const client = await Aerospike.connect({ hosts }) + * + * @example Setting hosts using an array of hostname/port/tlsname tuples: + * + * const Aerospike = require('aerospike') + * + * const hosts = [ + * { addr: '192.168.0.1', port: 3000, tlsname: 'example.com' }, + * { addr: '192.168.0.2', port: 3000, tlsname: 'example.com' } + * ] + * const client = await Aerospike.connect({ hosts }) + */ + hosts?: Host[] | string; + + /** + * Configuration for logging done by the client. + * + + * + * @example Enabling debug logging to a separate log file + * + * const Aerospike = require('aerospike') + * + * const fs = require('fs') + * + * var debuglog = fs.openSync('./debug.log', 'w') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * log: { + * level: Aerospike.log.DEBUG, + * file: debuglog + * } + * } + * Aerospike.connect(config, (err, client) => { + * if (err) throw err + * console.log("Connected. Now closing connection.") + * client.close() + * }) + */ + log?: Log; + /** + * Node login timeout in milliseconds. + * + * @type {number} + * @default 5000 + */ + loginTimeoutMs?: number; + /** + * Maximum number of asynchronous connections allowed per server node. + * + * New transactions will be rejected with an {@link + * status.ERR_NO_MORE_CONNECTIONS | ERR_NO_MORE_CONNECTIONS} + * error if the limit would be exceeded. + * * + * @default 100 + */ + maxConnsPerNode?: number; + /** + * Maximum number of errors allowed per node per error_rate_window before backoff algorithm returns + * `AEROSPIKE_MAX_ERROR_RATE` for database commands to that node. If max_error_rate is zero, there is no error limit. + * The counted error types are any error that causes the connection to close (socket errors and client timeouts), + * server device overload and server timeouts. + * + * The application should backoff or reduce the transaction load until `AEROSPIKE_MAX_ERROR_RATE` stops being returned. + * + * If the backoff algorithm has been activated, transactions will fail with {@link + * status.AEROSPIKE_MAX_ERROR_RATE | AEROSPIKE_MAX_ERROR_RATE} until the {@link errorRateWindow} has passed and the + * error count has been reset. + * + * @default 100 + */ + maxErrorRate?: number; + /** + * Maximum socket idle time in seconds. + * + * Connection pools will discard sockets that have been idle + * longer than the maximum. The value is limited to 24 hours (86400). + * + * It's important to set this value to a few seconds less than the server's + * proto-fd-idle-ms (default 60000 milliseconds or 1 minute), + * so the client does not attempt to use a socket that has already been + * reaped by the server. + * + * Connection pools are now implemented by a LIFO stack. Connections at the + * tail of the stack will always be the least used. These connections are + * checked for maxSocketIdle once every 30 tend iterations + * (usually 30 seconds). + * + * + * @default 0 seconds + */ + maxSocketIdle?: number; + /** + * Minimum number of asynchronous connections allowed per server node. + * + * Preallocate min connections on client node creation. The + * client will periodically allocate new connections if count falls below + * min connections. + * + * Server proto-fd-idle-ms may also need to be increased + * substantially if min connections are defined. The + * proto-fd-idle-ms default directs the server to close + * connections that are idle for 60 seconds which can defeat the purpose of + * keeping connections in reserve for a future burst of activity. + * + * If server proto-fd-idle-ms is changed, client {@link + * Config#maxSocketIdle} should also be changed to be a few seconds less + * than proto-fd-idle-ms. + * + * @default 0 + */ + minConnsPerNode?: number; + /** + * Configuration values for the mod-lua user path. + * + * If you are using user-defined functions (UDF) for processing + * query results (i.e. aggregations), then you will find it useful to set + * the modlua settings. Of particular importance is the + * modelua.userPath, which allows you to define a path to where + * the client library will look for Lua files for processing. + * + */ + modlua?: ModLua; + + /** + * The password to use when authenticating to the cluster. + */ + password?: string; + + /** + * Global client policies. + * + * The configuration defines default policies for the + * application. Policies define the behavior of the client, which can be + * global for all uses of a single type of operation, or local to a single + * use of an operation. + * + * Each database operation accepts a policy for that operation as an + * argument. This is considered a local policy, and is a single use policy. + * This local policy supersedes any global policy defined. + * + * If a value of the policy is not defined, then the rule is to fallback to + * the global policy for that operation. If the global policy for that + * operation is undefined, then the global default value will be used. + * + * If you find that you have behavior that you want every use of an + * operation to utilize, then you can specify the default policy as + * {@link Config#policies}. + * + * For example, the {@link Client#put} operation takes a {@link + * WritePolicy} parameter. If you find yourself setting the {@link + * WritePolicy#key} policy value for every call to {@link Client.put}, then + * you may find it beneficial to set the global {@link WritePolicy} in + * {@link Config#policies}, which all operations will use. + * * + * @example Setting a default key policy for all write operations + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * policies: { + * write: new Aerospike.WritePolicy({ + * key: Aerospike.policy.key.SEND, + * socketTimeout : 0, + * totalTimeout : 0 + * }) + * } + * } + * + * let key = new Aerospike.Key('test', 'demo', 123) + * + * Aerospike.connect(config) + * .then(client => { + * return client.put(key, {int: 42}) + * .then(() => client.close()) + * .catch(error => { + * throw error + * client.close() + * }) + * }) + * .catch(console.error) + */ + policies?: ConfigPolicies; + /** + * Default port to use for any host address, that does not + * explicitly specify a port number. Default is 3000. + * * + * @since v2.4 + */ + port?: number; + /** + * Track server rack data. + * + * This field is useful when directing read commands to the + * server node that contains the key and exists on the same rack as the + * client. This serves to lower cloud provider costs when nodes are + * distributed across different racks/data centers. + * + * {@link rackId} config, {@link + * policy.replica.PREFER_RACK} replica policy, and server + * rack configuration must also be set to enable this functionality. + * + * @default false + * + * @since 3.8.0 + */ + rackAware?: boolean; + /** + * Rack where this client instance resides. + * + * {@link rackAware} config, {@link policy.replica.PREFER_RACK} replica policy, and server + * rack configuration must also be set to enable this functionality. + * + * @default 0 + * + * @since 3.8.0 + */ + rackId?: number; + /** + * Shared memory configuration. + * + * This allows multiple client instances running in separate + * processes on the same machine to share cluster status, including nodes and + * data partition maps. Each shared memory segment contains state for one + * Aerospike cluster. If there are multiple Aerospike clusters, a different + * key must be defined for each cluster. + * + * @see {@link http://www.aerospike.com/docs/client/c/usage/shm.html#operational-notes|Operational Notes} + * + * @example Using shared memory in a clustered setup + * + * const Aerospike = require('aerospike') + * const cluster = require('cluster') + * + * const config = { + * sharedMemory: { + * key: 0xa5000000 + * } + * } + * const client = Aerospike.client(config) + * const noWorkers = 4 + * + * if (cluster.isMaster) { + * // spawn new worker processes + * for (var i = 0; i < noWorkers; i++) { + * cluster.fork() + * } + * } else { + * // connect to Aerospike cluster in each worker process + * client.connect((err) => { if (err) throw err }) + * + * // handle incoming HTTP requests, etc. + * // http.createServer((request, response) => { ... }) + * + * // close DB connection on shutdown + * client.close() + * } + */ + sharedMemory?: SharedMemory; + + /** + * Polling interval in milliseconds for cluster tender. + * + * @default 1000 + */ + tenderInterval?: number; + /** + * Configure Transport Layer Security (TLS) parameters for secure + * connections to the database cluster. TLS connections are not supported as + * of Aerospike Server v3.9 and depend on a future server release. + * + * @since v2.4 + */ + tls?: TLSInfo; + /** + * Whether the client should use the server's + * alternate-access-address instead of the + * access-address. + * + * @default false + * + * @since v3.7.1 + */ + useAlternateAccessAddress?: boolean; + /** + * The user name to use when authenticating to the cluster. + * + * Leave empty for clusters running without access management. + * (Security features are available in the Aerospike Database Enterprise + * Edition.) + * + */ + user?: string; + +} + +/** + * Global client policies. + * + * @remarks The configuration defines default policies for the + * application. Policies define the behavior of the client, which can be + * global for all uses of a single type of operation, or local to a single + * use of an operation. + * + * Each database operation accepts a policy for that operation as an + * argument. This is considered a local policy, and is a single use policy. + * This local policy supersedes any global policy defined. + * + * If a value of the policy is not defined, then the rule is to fallback to + * the global policy for that operation. If the global policy for that + * operation is undefined, then the global default value will be used. + * + * If you find that you have behavior that you want every use of an + * operation to utilize, then you can specify the default policy as + * {@link Config#policies}. + * + * For example, the {@link Client#put} operation takes a {@link + * WritePolicy} parameter. If you find yourself setting the {@link + * WritePolicy#key} policy value for every call to {@link Client.put}, then + * you may find it beneficial to set the global {@link WritePolicy} in + * {@link Config#policies}, which all operations will use. + * + * + * @example Setting a default key policy for all write operations + * + * const Aerospike = require('aerospike') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * policies: { + * write: new Aerospike.WritePolicy({ + * key: Aerospike.policy.key.SEND, + * socketTimeout : 0, + * totalTimeout : 0 + * }) + * } + * } + * + * let key = new Aerospike.Key('test', 'demo', 123) + * + * Aerospike.connect(config) + * .then(client => { + * return client.put(key, {int: 42}) + * .then(() => client.close()) + * .catch(error => { + * throw error + * client.close() + * }) + * }) + * .catch(console.error) + */ +export interface ConfigPolicies { + /** + * Apply policy. For more information, see {@link policy.ApplyPolicy | ApplyPolicy} + */ + apply?: policy.ApplyPolicy; + /** + * Batch policy. For more information, see {@link policy.BasePolicy | BasePolicy} + */ + batch?: policy.BasePolicy; + /** + * Batch parent write policy. For more information, see {@link policy.BatchPolicy | BatchPolicy} + */ + batchParentWrite?: policy.BatchPolicy; + /** + * Info policy. For more information, see {@link policy.InfoPolicy | InfoPolicy} + */ + info?: policy.InfoPolicy; + /** + * Operate policy. For more information, see {@link policy.OperatePolicy | OperatePolicy} + */ + operate?: policy.OperatePolicy; + /** + * Read policy. For more information, see {@link policy.ReadPolicy | ReadPolicy} + */ + read?: policy.ReadPolicy; + /** + * Remove policy. For more information, see {@link policy.RemovePolicy | RemovePolicy} + */ + remove?: policy.RemovePolicy; + /** + * Scan policy. For more information, see {@link policy.ScanPolicy | ScanPolicy} + */ + scan?: policy.ScanPolicy; + /** + * Query policy. For more information, see {@link policy.QueryPolicy | QueryPolicy} + */ + query?: policy.QueryPolicy; + /** + * Write policy. For more information, see {@link policy.WritePolicy | WritePolicy} + */ + write?: policy.WritePolicy; + +} + + +export interface ConnectionStats { + /** + * Connections residing in + * pool(s) for this node. There can be multiple pools per node. + * This value is a summary of those pools for this node. + */ + inPool: number; + /** + * Connections actively being + * used in database transactions for this node. + */ + inUse: number; + /** + * Total number of node connections opened since node creation. + */ + opened: number; + /** + * Total number of node connections closed since node creation. + */ + closed: number; +} + + +export interface EventLoopStats { + /** + * Approximate number of commands + * actively being proccessed. + */ + inFlight: number; + /** + * Approximate number of commands queued + * on the global command queue, that have not yet been started. + */ + queued: number; +} + +/** + * Option specification for {@ link AdminPolicy} class values. + */ +export interface HLLPolicyOptions extends BasePolicyOptions { + /** + * Specifies the behavior when writing byte values. + * + * @default hll.writeFlags.DEFAULT + * @see {@link hll.writeFlags} for supported policy values. + */ + writeFlags: hll.writeFlags; +} +/** + * The address of the cluster host to send a request to. + */ +export interface Host { + /** + * The IP address or host name of the host. + */ + addr: string; + /** + * The port of the host. + */ + port?: number; + /** + * name to use when verifying the TLS certificate for TLS encrypted server connections. + */ + tlsname?: string; +} + +/** + * A key uniquely identifies a record in the Aerospike database within a given namespace. + * + * ###### Key Digests + * In your application, you must specify the namespace, set and the key itself + * to read and write records. When a key is sent to the database, the key value + * and its set are hashed into a 160-bit digest. When a database operation + * returns a key (e.g. Query or Scan operations) it might contain either the + * set and key value, or just the digest. + * + * @example Creating a new {@link Key} instance + * + * const Aerospike = require('aerospike') + * const Key = Aerospike.Key + * + * var key1 = new Key('test', 'demo', 12345) + * var key2 = new Key('test', 'demo', 'abcde') + * var key3 = new Key('test', 'demo', Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72])) */ +export interface KeyOptions { + /** + * The Namespace to which the key belongs. + */ + ns: string; + /** + * The Set to which the key belongs. + */ + set: string; + /** + * The unique key value. Keys can be + * strings, integers or an instance of the Buffer class. + */ + key?: string | number | Buffer; + /** + * The digest value of the key. + */ + digest?: Buffer; +} + +/** + * Options for creating an index. + */ +export interface IndexOptions { + /** + * The name of the bin which values are to be indexed. + */ + bin: string; + /** + * The namespace on which the index is to be created. + */ + ns: string; + /** + * The set on which the index is to be created. + */ + set: string; + /** + * The name of the index to be created. + */ + index: string; + /** + * Type of index to be + * created based on the type of values stored in the bin. This option needs to + * be specified if the bin to be indexed contains list or map values and the + * individual entries of the list or keys/values of the map should be indexed. + * + * See {@link indexType} for accepted values. + */ + type?: indexType; + /** + * The data type of the index to be created, e.g. Numeric, String or Geo. Not necessary to specify when using APIs + * such as {@link Client#createIntegerIndex}, {@link Client#createStringIndex}, or {@link Client#createBlobIndex}. + */ + datatype?: indexDataType; + /** + * The {@link cdt.Context} on which the index is to be created. + */ + context?: cdt.Context; +} + +/** + * Response to {@link Client.infoAll} command + */ +export interface InfoAllResponse { + /** + * The node that send the info response + */ + host: InfoNode; + /** + * The response string with the requested info. + */ + info: string; +} + +/** + * Representation of Node from {@link Client.infoAll} + */ +export interface InfoNode { + /** + * The name of the node. + */ + node_id: string; +} + +/** + * Defines node parameter type for use in {@link Client.infoNode} + */ +export interface InfoNodeParam { + /** + * The name of the node. + */ + name: string; +} + +/** + * Option specification for {@link policy.AdminPolicy} class values. + */ +export interface InfoPolicyOptions extends BasePolicyOptions { + /** + * Ensure the request is within allowable size limits. + */ + checkBounds?: boolean; + /** + * Send request without any further processing. + */ + sendAsIs?: boolean; + /** + * Maximum time in milliseconds to wait for the operation to complete. + */ + timeout?: number +} + +/** + * Option specification for {@ link AdminPolicy} class values. + */ +export interface ListPolicyOptions extends BasePolicyOptions { + /** + * Sort order for the list. + * + * @type number + * @default {@ link lists.order.UNORDERED} + * @see {@link lists.order} for supported policy values. + */ + order?: lists.order; + /** + * Specifies the behavior when replacing or inserting list items. + * + * @type number + * @default {@link lists.writeFlags.DEFAULT} + * @see {@link lists.writeFlags} for supported policy values. + */ + writeFlags?: lists.writeFlags; +} +/** + * Configuration for logging done by the client. + */ +export interface Log { + /** + * Log level; see {@link log} for details. + */ + level?: log; + /** + * File descriptor returned by + * fs.open() or one of process.stdout.fd or + * process.stderr.fd. + */ + file?: number; +} + +/** + * Option specification for {@ link AdminPolicy} class values. + */ +export interface MapPolicyOptions extends BasePolicyOptions { + /** + * Specifies the behavior when replacing or inserting map items. + * + * Map write flags require server version v4.3 or later. For earier server + * versions, set the {@link MapPolicy.writeMode|writeMode} instead. + * + * @default {@link maps.writeFlags.DEFAULT} + * @see {@link maps.writeFlags} for supported policy values. + * @since v3.5 + */ + order?: maps.order; + /** + * Specifies the behavior when replacing or inserting map items. + * + * Map write flags require server version v4.3 or later. For earier server + * versions, set the {@link MapPolicy.writeMode|writeMode} instead. + * + * @default {@link maps.writeFlags.DEFAULT} + * @see {@link maps.writeFlags} for supported policy values. + * @since v3.5 + */ + writeFlags?: maps.writeFlags; + /** + * Specifies the behavior when replacing or inserting map items. + * + * Map write mode should only be used for server versions prior to v4.3. + * For server versions v4.3 or later, the use of {@link + * MapPolicy.writeFlags | writeFlags} is recommended. + * + * @default {@link maps.writeMode.UPDATE} + * @see {@link maps.writeMode} for supported policy values. + * @deprecated since v3.5 + */ + writeMode?: maps.writeMode; +} +/** + * Configuration values for the mod-lua user path. + * + * If you are using user-defined functions (UDF) for processing + * query results (i.e. aggregations), then you will find it useful to set + * the modlua settings. Of particular importance is the + * modelua.userPath, which allows you to define a path to where + * the client library will look for Lua files for processing. + + * + * @property {string} [modlua.userPath] - Path to user Lua scripts. + */ +export interface ModLua { + /** + * Path to user Lua scripts. + */ + userPath?: string; +} + +export interface Node { + name: string; + address: string; +} + +export interface NodeStats { + name: string; + syncConnections: ConnectionStats; + asyncConnections: ConnectionStats; +} + +/** + * Option specification for {@ link AdminPolicy} class values. + */ +export interface OperatePolicyOptions extends BasePolicyOptions { + /** + * Specifies the number of replicas required to be committed successfully + * when writing before returning transaction succeeded. + * + * @see {@link policy.commitLevel} for supported policy values. + */ + commitLevel?: policy.commitLevel; + /** + * Should CDT data types (Lists / Maps) be deserialized to JS data types + * (Arrays / Objects) or returned as raw bytes (Buffer). + * + * @default true + * @since v3.7.0 + */ + deserialize?: boolean; + /** + * Specifies whether a {@link + * http://www.aerospike.com/docs/guide/durable_deletes.html|tombstone} + * should be written in place of a record that gets deleted as a result of + * this operation. + * + * @default false (do not tombstone deleted records) + */ + durableDelete?: boolean; + /** + * Specifies the behavior for the existence of the record. + * + * @see {@link policy.exists} for supported policy values. + */ + exists?: policy.exists; + /** + * Specifies the behavior for the generation value. + * + * @see {@link policy.gen} for supported policy values. + */ + gen?: policy.gen; + /** + * Specifies the behavior for the key. + * + * @see {@link policy.key} for supported policy values. + */ + key?: policy.key; + /** + * Read policy for AP (availability) namespaces. + * + * @default Aerospike.policy.readModeAP.ONE + * @see {@link policy.readModeAP} for supported policy values. + */ + readModeAP?: policy.readModeAP; + /** + * Read policy for SC (strong consistency) namespaces. + * + * @default Aerospike.policy.readModeSC.SESSION + * @see {@link policy.readModeSC} for supported policy values. + */ + readModeSC?: policy.readModeSC; + /** + * Determine how record TTL (time to live) is affected on reads. When enabled, the server can + * efficiently operate as a read-based LRU cache where the least recently used records are expired. + * The value is expressed as a percentage of the TTL sent on the most recent write such that a read + * within this interval of the record’s end of life will generate a touch. + * + * For example, if the most recent write had a TTL of 10 hours and read_touch_ttl_percent is set to + * 80, the next read within 8 hours of the record's end of life (equivalent to 2 hours after the most + * recent write) will result in a touch, resetting the TTL to another 10 hours. + * + * @default 0 + */ + readTouchTtlPercent?: number; + /** + * Specifies the replica to be consulted for the read operation. + * + * @see {@link policy.replica} for supported policy values. + */ + replica?: policy.replica; +} + +/** + * Interface used to specify which partition a query starts on and how many partitions to query. + */ +export interface PartFilter { + /** + * Parition in which the query will begin. + */ + begin?: number; + /** + * Number of partitions to filter. + */ + count?: number; + /** + * Digest in which the query will begin from. + */ + digest?: Buffer; +} + +/** + * Options when defining a privilege. + */ +export interface PrivilegeOptions { + /** + * Namespace scope. Apply permission to this null terminated namespace only. + * If string length is zero, the privilege applies to all namespaces. + */ + namespace?: string; + /** + * Set name scope. Apply permission to this null terminated set within namespace only. + * If string length is zero, the privilege applies to all sets within namespace. + */ + set?: string; +} + +/** + * Interface used for providing options to a new {@link Query} class instance. + */ +export interface QueryOptions { + /** + * User-defined function parameters to be applied to the query executed using + * {@link Query#foreach}. + */ + udf?: UDF; + /** + * Filters to apply to the query. + * + * *Note:* Currently, a single index filter is supported. To do more + * advanced filtering, you need to use a user-defined function (UDF) to + * process the result set on the server. + */ + filters?: filter.SindexFilterPredicate[]; + /** + * List of bin names to be selected by the query. If a query specifies bins to + * be selected, then only those bins will be returned. If no bins are + * selected, then all bins will be returned (unless {@link Query#nobins} is + * set to `true`). + */ + select?: string[]; + /** + * If set to `true`, the query will return only meta data, and exclude bins. + */ + nobins?: boolean; + /** + * Approximate number of records to return to client. + * + * When {@link paginate} is true, + * then maxRecords will be the page size if there are enough records remaining in the query to fill the page size. + * + * When {@link paginate} is false, this number is divided by the number of nodes involved in the scan, + * and actual number of records returned may be less than maxRecords if node record counts are small and unbalanced across nodes. + */ + maxRecords?: number; + /** + * If set to true, paginated queries are enabled. In order to receive paginated + * results, the {@link maxRecords} property must assign a nonzero integer value. + * + * @example Asynchronous pagination over a set of thirty records with {@link Query#foreach}. + * + * const Aerospike = require('./lib/aerospike'); + * // Define host configuration + * let config = { + * hosts: '34.213.88.142:3000', + * policies: { + * batchWrite : new Aerospike.BatchWritePolicy({socketTimeout : 0, totalTimeout : 0}), + * } + * }; + * + * var batchRecords = [] + * for(let i = 0; i < 30; i++){ + * batchRecords.push({ + * type: Aerospike.batchType;.BATCH_WRITE, + * key: new Aerospike.Key('test', 'demo', 'key' + i), + * ops:[Aerospike.operations.write('exampleBin', i)] + * }) + * } + * + * ;(async function() { + * try { + * client = await Aerospike.connect(config) + * await client.truncate('test', 'demo', 0) + * await client.batchWrite(batchRecords, {socketTimeout : 0, totalTimeout : 0}) + * + * const query = client.query('test', 'demo', { paginate: true, maxRecords: 10}) + * do { + * const stream = query.foreach() + * stream.on('error', (error) => { throw error }) + * stream.on('data', (record) => { + * console.log(record.bins) + * }) + * await new Promise(resolve => { + * stream.on('end', (queryState) => { + * query.queryState = queryState + * resolve() + * }) + * }) + * } while (query.queryState !== undefined) + * + * } catch (error) { + * console.error('An error occurred at some point.', error) + * process.exit(1) + * } finally { + * if (client) client.close() + * } + * })() + * + * @example Asynchronous pagination over a set of thirty records with {@link Query#results} + * + * + * const Aerospike = require('./lib/aerospike'); + * // Define host configuration + * let config = { + * hosts: '34.213.88.142:3000', + * policies: { + * batchWrite : new Aerospike.BatchWritePolicy({socketTimeout : 0, totalTimeout : 0}), + * } + * }; + * + * var batchRecords = [] + * for(let i = 0; i < 30; i++){ + * batchRecords.push({ + * type: Aerospike.batchType.BATCH_WRITE, + * key: new Aerospike.Key('test', 'demo', 'key' + i), + * ops:[Aerospike.operations.write('exampleBin', i)] + * }) + * } + * + * + * ;(async function() { + * try { + * client = await Aerospike.connect(config) + * await client.truncate('test', 'demo', 0) + * await client.batchWrite(batchRecords, {socketTimeout : 0, totalTimeout : 0}) + * + * const query = client.query('test', 'demo', { paginate: true, maxRecords: 11}) + * + * let allResults = [] + * let results = await query.results() + * allResults = [...allResults, ...results] + * + * + * results = await query.results() + * allResults = [...allResults, ...results] + * + * results = await query.results() + * allResults = [...allResults, ...results] + * + * console.log("Records returned in total: " + allResults.length) // Should be 30 records + * } catch (error) { + * console.error('An error occurred at some point.', error) + * process.exit(1) + * } finally { + * if (client) client.close() + * } + * })() + * + */ + paginate?: boolean; + /** + * The time-to-live (expiration) of the record in seconds. + * + * There are also special values that can be set in the record TTL For details + * + * Note that the TTL value will be employed ONLY on background query writes. + */ + ttl?: number; +} + +/** + * Option specification for {@ link AdminPolicy} class values. + */ +export interface QueryPolicyOptions extends BasePolicyOptions { + /** + * Should CDT data types (Lists / Maps) be deserialized to JS data types + * (Arrays / Objects) or returned as raw bytes (Buffer). + * + * @default true + * @since v3.7.0 + */ + deserialize?: boolean; + /** + * Expected query duration. The server treats the query in different ways depending on the expected duration. + * This field is ignored for aggregation queries, background queries and server versions < 6.0. + * + * @see {@link policy.queryDuration} for supported policy values. + * @default {@link policy.queryDuration.LONG} + */ + expectedDuration?: policy.queryDuration; + /** + * Terminate the query if the cluster is in migration state. If the query's + * "where" clause is not defined (scan), this field is ignored. + * + * Requires Aerospike Server version 4.2.0.2 or later. + * + * @default false + * @since v3.4.0 + */ + failOnClusterChange?: boolean; + /** + * Timeout in milliseconds used when the client sends info commands to + * check for cluster changes before and after the query. This timeout is + * only used when {@link + * QueryPolicy.failOnClusterChange | failOnClusterChange} is true and the + * query's "where" clause is defined. + * + * @default 10000 ms + * @since v3.16.5 + */ + infoTimeout?: number; + /** + * Specifies the replica to be consulted for the query operation. + * + * @see {@link policy.replica} for supported policy values. + */ + replica?: policy.replica; + /** + * Total transaction timeout in milliseconds. + * + * The totalTimeout is tracked on the client and sent to the + * server along with the transaction in the wire protocol. The client will + * most likely timeout first, but the server also has the capability to + * timeout the transaction. + * + * If totalTimeout is not zero and totalTimeout + * is reached before the transaction completes, the transaction will return + * error {@link status.ERR_TIMEOUT | ERR_TIMEOUT}. + * If totalTimeout is zero, there will be no total time limit. + * + * @default 0 + * @override + */ + totalTimeout?: number; +} +/** + * Option specification for {@ link AdminPolicy} class values. + */ +export interface ReadPolicyOptions extends BasePolicyOptions { + /** + * Should CDT data types (Lists / Maps) be deserialized to JS data types + * (Arrays / Objects) or returned as raw bytes (Buffer). + * + * @type boolean + * @default true + * @since v3.7.0 + */ + deserialize?: boolean; + /** + * Specifies the behavior for the key. + * + * @type number + * @see {@link policy.key} for supported policy values. + */ + key?: policy.key; + /** + * Read policy for AP (availability) namespaces. + * + * @type number + * @default Aerospike.policy.readModeAP.ONE + * @see {@link policy.readModeAP} for supported policy values. + */ + readModeAP?: policy.readModeAP; + + /** + * Read policy for SC (strong consistency) namespaces. + * + * @type number + * @default Aerospike.policy.readModeSC.SESSION + * @see {@link policy.readModeSC} for supported policy values. + */ + readModeSC?: policy.readModeSC; + /** + * Determine how record TTL (time to live) is affected on reads. When enabled, the server can + * efficiently operate as a read-based LRU cache where the least recently used records are expired. + * The value is expressed as a percentage of the TTL sent on the most recent write such that a read + * within this interval of the record’s end of life will generate a touch. + * + * For example, if the most recent write had a TTL of 10 hours and read_touch_ttl_percent is set to + * 80, the next read within 8 hours of the record's end of life (equivalent to 2 hours after the most + * recent write) will result in a touch, resetting the TTL to another 10 hours. + * * + * @type number + * @default 0 + */ + readTouchTtlPercent?: number; + /** + * Specifies the replica to be consulted for the read operation. + * + * @type number + * @see {@link policy.replica} for supported policy values. + */ + replica?: policy.replica; +} +/** + * Option specification for {@ link AdminPolicy} class values. + */ +export interface RemovePolicyOptions extends BasePolicyOptions { + /** + * Specifies the number of replicas required to be committed successfully + * when writing before returning transaction succeeded. + * + * @see {@link policy.commitLevel} for supported policy values. + */ + commitLevel?: policy.commitLevel; + /** + * Specifies whether a {@link + * http://www.aerospike.com/docs/guide/durable_deletes.html|tombstone} + * should be written in place of a record that gets deleted as a result of + * this operation. + * + * @default false (do not tombstone deleted records) + */ + durableDelete?: boolean; + /** + * Specifies the behavior for the generation value. + * + * @see {@link policy.gen} for supported policy values. + */ + gen?: policy.gen; + /** + * The generation of the record. + */ + generation?: number; + /** + * Specifies the behavior for the key. + * + * @see {@link policy.key} for supported policy values. + */ + key?: policy.key; +} + +/** + * Role defintion. + */ +export interface RoleOptions { + /** + * Role name + */ + name: string; + /** + * Maximum reads per second limit. + */ + readQuota: number; + /** + * Maximum writes per second limit. + */ + writeQuota: number; + /** + * Array of allowable IP address strings. + */ + whitelist: number[]; + /** + * Length of privileges array. + */ + privileges: admin.Privilege[]; +} + +/** + * Option specification for {@ link AdminPolicy} class values. + */ +export interface ScanPolicyOptions extends BasePolicyOptions { + /** + * Specifies whether a {@link + * http://www.aerospike.com/docs/guide/durable_deletes.html|tombstone} + * should be written in place of a record that gets deleted as a result of + * this operation. + * + * @default false (do not tombstone deleted records) + */ + durableDelete?: boolean; + /** + * Approximate number of records to return to client. This number is + * divided by the number of nodes involved in the scan. The actual number + * of records returned may be less than maxRecords if node record counts + * are small and unbalanced across nodes. + * + * Requires server >= 4.9. + * + * @default 0 (do not limit record count) + * + * @since v3.16.0 + */ + maxRecords?: number; + /** + * Limit returned records per second (RPS) rate for each server. Do not + * apply RPS limit if recordsPerSecond is zero. + * + * Requires server >= 4.7. + * + * @default 0 + * + * @since v3.14.0 + */ + recordsPerSecond?: number; + /** + * Specifies the replica to be consulted for the scan operation. + * + * @see {@link policy.replica} for supported policy values. + */ + replica?: policy.replica; + /** + * Total transaction timeout in milliseconds. + * + * The totalTimeout is tracked on the client and sent to the + * server along with the transaction in the wire protocol. The client will + * most likely timeout first, but the server also has the capability to + * timeout the transaction. + * + * If totalTimeout is not zero and totalTimeout + * is reached before the transaction completes, the transaction will return + * error {@link status.ERR_TIMEOUT | ERR_TIMEOUT}. + * If totalTimeout is zero, there will be no total time limit. + * + * @default 0 + * @override + */ + totalTimeout?: number; + +} +/** + * Interface used to configure shared memory. + */ +export interface SharedMemory { + /** + * Whether to enable/disable usage of + * shared memory. + */ + enable?: boolean; + /** + * Identifier for the shared memory segment + * associated with the target Aerospike cluster; the same key needs to be + * used on all client instances connecting to the same cluster. + */ + key: number; + /** + * Sets the max. number of server nodes in the cluster - this value is required to size the shared + * memory segment. Ensure that you leave a cushion between actual server node + * cound and maxNodes so that you can add new nodes without + * rebooting the client. + */ + maxNodes?: number; + /** + * Sets the max. number of namespaces used in the cluster - this value is required to size the shared + * memory segment. Ensure that you leave a cushion between actual namespace + * count and maxNamespaces so that you can add new namespaces + * without rebooking the client. + */ + maxNamespaces?: number; + /** + * Expiration time in seconds for the lock on the shared memory segment; if the cluster + * status has not been updated after this many seconds another client instance + * will take over the shared memory cluster tending. + */ + takeoverThresholdSeconds?: number; +} + +export interface Stats { + commands: EventLoopStats; + nodes: NodeStats[]; +} + +/** + * Configure Transport Layer Security (TLS) parameters for secure + * connections to the database cluster. TLS connections are not supported as + * of Aerospike Server v3.9 and depend on a future server release. + * + * @since v2.4 + * + */ +export interface TLSInfo + { + /** + * Enable TLS for socket connections to + * cluster nodes. By default TLS is enabled only if the client configuration + * includes a tls section. + */ + enable?: boolean; + /** + * Path to a trusted CA certificate file. By + * default TLS will use system standard trusted CA certificates. + */ + cafile?: string; + /** + * Path to a directory of trusted certificates. + * See the OpenSSL SSL_CTX_load_verify_locations manual page for more + * information about the format of the directory. + */ + capath?: string; + /** + * Specifies enabled protocols. The format is + * the same as Apache's SSLProtocol documented at + * https://httpd.apache.org/docs/current/mod/mod_ssl.html#sslprotocol. If not + * specified, the client will use "-all +TLSv1.2". If you are not sure what + * protocols to select this option is best left unspecified. + */ + protocols?: string; + /** + * Specifies enabled cipher suites. The + * format is the same as OpenSSL's Cipher List Format documented at + * https://www.openssl.org/docs/manmaster/apps/ciphers.html. If not specified + * the OpenSSL default cipher suite described in the ciphers documentation + * will be used. If you are not sure what cipher suite to select this option + * is best left unspecified. + */ + cipherSuite?: string; + /** + * Path to a certificate blacklist file. + * The file should contain one line for each blacklisted certificate. Each + * line starts with the certificate serial number expressed in hex. Each + * entry may optionally specify the issuer name of the certificate. (Serial + * numbers are only required to be unique per issuer.) Example records: + *
867EC87482B2 /C=US/ST=CA/O=Acme/OU=Engineering/CN=Test Chain CA
+ * E2D4B0E570F9EF8E885C065899886461
+ */ + certBlacklist?: string; + /** + * Path to the client's key for mutual + * authentication. By default, mutual authentication is disabled. + */ + keyfile?: string; + /** + * Decryption password for the + * client's key for mutual authentication. By default, the key is assumed + * not to be encrypted. + */ + keyfilePassword?: string; + /** + * Path to the client's certificate chain + * file for mutual authentication. By default, mutual authentication is + * disabled. + */ + certfile?: string; + /** + * Enable CRL checking for the + * certificate chain leaf certificate. An error occurs if a suitable CRL + * cannot be found. By default CRL checking is disabled. + */ + crlCheck?: boolean; + /** + * Enable CRL checking for the + * entire certificate chain. An error occurs if a suitable CRL cannot be + * found. By default CRL checking is disabled. + */ + crlCheckAll?: boolean; + /** + * Log session information for + * each connection. + */ + logSessionInfo?: boolean; + /** + * Use TLS connections only for login authentication. All other communication with the server will be done + * with non-TLS connections. Default: false (Use TLS connections for all + * communication with the server.) + */ + forLoginOnly?: boolean; +} + +/** + * Parameters used to specify which UDF function to execute. + */ +export interface UDF { + /** + * The name of the UDF module that was registered with the cluster. + */ + module: string; + /** + * The name of the UDF function within the module. + */ + funcname: string; + /** + * List of arguments to pass to the UDF function. + */ + args?: AerospikeBinValue[]; +} + +/** + * Contains user roles and other user related information + */ +export interface UserOptions { + /** + * Number of currently open connections. + */ + connsInUse: number; + /** + * Name of the {@link admin.User}. + */ + name: string; + /** + * Array of read statistics. Array may be null. + * Current statistics by offset are: + *
    + *
  • 0: read quota in records per second
  • + *
  • 1: single record read transaction rate (TPS)
  • + *
  • 2: read scan/query record per second rate (RPS)
  • + *
  • 3: number of limitless read scans/queries
  • + *
+ * Future server releases may add additional statistics. + */ + readInfo: number[]; + /** + * Array of write statistics. Array may be null. + * Current statistics by offset are: + *
    + *
  • 0: write quota in records per second
  • + *
  • 1: single record write transaction rate (TPS)
  • + *
  • 2: write scan/query record per second rate (RPS)
  • + *
  • 3: number of limitless write scans/queries
  • + *
+ * Future server releases may add additional statistics. + */ + writeInfo: number[]; + /** + * Array of assigned role names. + */ + roles: string[]; +} + +/** + * Option specification for {@ link AdminPolicy} class values. + */ +export interface WritePolicyOptions extends BasePolicyOptions { + /** + * Specifies the number of replicas required to be committed successfully + * when writing before returning transaction succeeded. + * + * @see {@link policy.commitLevel} for supported policy values. + */ + commitLevel?: policy.commitLevel; + + /** + * Minimum record size beyond which it is compressed and sent to the + * server. + */ + compressionThreshold?: number; + /** + * Specifies whether a {@link + * http://www.aerospike.com/docs/guide/durable_deletes.html|tombstone} + * should be written in place of a record that gets deleted as a result of + * this operation. + * + * @default false (do not tombstone deleted records) + */ + durableDelete?: boolean; + /** + * Specifies the behavior for the existence of the record. + * + * @see {@link policy.exists} for supported policy values. + */ + exists?: policy.exists; + /** + * Specifies the behavior for the generation value. + * + * @see {@link policy.gen} for supported policy values. + */ + gen?: policy.gen; + /** + * Specifies the behavior for the key. + * + * @see {@link policy.key} for supported policy values. + */ + key?: policy.key; +} + +/* ENUMS */ + + +/** + * Authentication mode when user/password is defined. + * + * Note: The Node.js client's TLS support is currently limited to Linux, and + * therefore secure, external authentication (e.g. LDAP) is only supported on + * Linux as well. External authentication can be used on macOS or Windows but + * it will _not_ be secure! + * + * @example Using external authentication mode, e.g. to use LDAP authentication + * + * const Aerospike = require('aerospike') + * + * const config = { + * user: process.env.ADMIN_USER, + * password: process.env.ADMIN_PASSWORD, + * authMode: Aerospike.auth.EXTERNAL + * } + * + * + * @example Using PKI authentication mode + * + * const Aerospike = require('aerospike') + * + * const config = { + * hosts: [ + * { addr: 'bob-cluster-a', port: process.env.PORT} + * ], + * tls: { + * cafile: process.env.CAFILE, + * keyfile: process.env.KEYFILE, + * certfile: process.env.CERT, + * } + * authMode: Aerospike.auth.AUTH_PKI, + * } + * + * + * Aerospike.connect(config).then(async (client) => { + * const info = await client.infoAny().then(Aerospike.info.parse) + * console.info(info) + * client.close() + * }) + */ +export enum auth { + /** + * Use internal authentication only. Hashed password is stored on the server. Do not send clear password. This is the + * default. + */ + INTERNAL, + /** + * Use external authentication (like LDAP). + * Specific external authentication is configured on server. If TLS is enabled, + * send clear password on node login via TLS. Throws exception, if TLS is not + * enabled. + */ + EXTERNAL, + /** + * Use external authentication (like + * LDAP). Specific external authentication is configured on server. Send + * clear password on node login whether or not TLS is enabled. This mode + * should only be used for testing purposes because it is not secure + * authentication. + */ + EXTERNAL_INSECURE, + /** + * Use PKI authentication. + * Authentication and authorization is based on a certificate. No user name or + * password needs to be configured. Requires mTLS and a client certificate. + */ + AUTH_PKI +} + +/** + * Identifies batch record type with designated enumerated type + */ +export enum batchType { + /** + * Indicates that a {@link Record} instance is used in a batch for read operations. + */ + BATCH_READ, + /** + * Indicates that a {@link Record} instance is used in a batch for write operations. + */ + BATCH_WRITE, + /** + * Indicates that a {@link Record} instance is used in a batch for applying record. + */ + BATCH_APPLY, + /** + * Indicates that a {@link Record} instance is used in a batch for removal operations. + */ + BATCH_REMOVE } + +/** + * Specifies secondary index data types. + */ +export enum indexDataType { + /* + * Values contained in the SI are strings. + */ + STRING, + /* + * Values contained in the SI are integers. + */ + NUMERIC, + /** + * Values contained in the SI are GeoJSON values (points or polygons). + */ + GEO2DSPHERE, + /* + * Values contained in the SI are blobs (Buffer in Node.js). + */ + BLOB +} + +/** + * Specifies the collection datatype the secondary index should be built on. DEFAULT implies the value is not a collection datatype.z + */ +export enum indexType { + /** + * Default Secondary Index type for bins + * containing scalar values (i.e. integer, string). + */ + DEFAULT, + /** + * Secondary index for bins containing + * ⇑Lists; + * The index will be built over the individual entries of the list. + */ + LIST, + /** + * SI for bins containing + * ⇑Maps; + * The index will be built over the individual keys of the map entries. + */ + MAPKEYS, + /** + * SI for bins containing + * ⇑Maps; + * The index will be built over the individual values of the map entries. + */ + MAPVALUES +} + +/** + * Enumerations represented the completion status of a Job in Aerospike. + */ +export enum jobStatus { + /** + * The job status is undefined. This is likely due to the + * status not being properly checked. + */ + UNDEF, + /** + * The job is currently running. + */ + INPROGRESS, + /** + * The job is completed successfully. + */ + COMPLETED +} + +/** + * Specifies language used in UDFs (User defined Functions). + */ +export enum language { + /** + * Lua (only supported UDF type at the moment) + */ + LUA +} + +/** + * Enumeration of log levels + */ +export enum log { + /** + * Turn off logging + */ + OFF = -1, + /** + * Log messages at ERROR level + */ + ERROR, + /** + * Log messages at WARN level or below + */ + WARN, + /** + * Log messages at INFO level or below + */ + INFO, + /** + * Log messages at DEBUG level or below + */ + DEBUG, + /** + * Log messages at TRACE level or below + */ + TRACE +} + +/** + * Permission codes define the type of permission granted for a user's role. + */ +export enum privilegeCode { + /** + * User can edit/remove other users. + */ + USER_ADMIN, + /** + * User can perform systems administration functions on a database that do not involve user administration. + */ + SYS_ADMIN, + /** + * User can perform UDF and SINDEX administration actions. + */ + DATA_ADMIN, + /** + * User can perform user defined function (UDF) administration actions. + */ + UDF_ADMIN, + /** + * User can perform secondary index administration actions. + */ + SINDEX_ADMIN, + /** + * User can read data. + */ + READ, + /** + * User can read and write data. + */ + READ_WRITE, + /** + * User can read and write data through user defined functions. + */ + READ_WRITE_UDF, + /** + * User can write data. + */ + WRITE, + /** + * User can truncate data only. + */ + TRUNCATE +} + +/** + * POSIX regex compilation flags. + * + */ +export enum regex { + /** + * Use basic regular expression syntax. + */ + BASIC, + /** + * Use extended regular expression syntax. + */ + EXTENDED, + /** + * Ignore case when matching. + * */ + ICASE, + /** + * Anchors do not match at newline characters in the string. + */ + NEWLINE +} + +/** + * Enumeration of special TTL (time-to-live) values. + * + * Instead of specifying a TTL in seconds, you can set the TTL + * to one of these special values when creating or updating a record. + */ +export enum ttl { + /** + * Use the default TTL value specified in {@link policy} for a given operation type. + */ + CLIENT_DEFAULT = -3, + /** + * Use the default TTL value for the + * namespace of the record. + */ + DONT_UPDATE, + /** + * Never expire the record. + */ + NEVER_EXPIRE, + /** + * Update the record without changing the + * record's TTL value. Requires server 3.10.1 or later. + */ + NAMESPACE_DEFAULT +} + +/* NAMESPACES */ + +export namespace admin { + + export class Privilege { + constructor(code: privilegeCode, options?: PrivilegeOptions); + code: privilegeCode; + namespace: string; + set: string; + } + + export class Role { + constructor(options: RoleOptions); + name: string; + readQuota: number; + writeQuota: number; + whitelist: number[]; + privileges: Privilege[]; + } + + export class User { + constructor(options: UserOptions); + connsInUse: number; + name: string; + readInfo: number[]; + writeInfo: number[]; + roles: string[]; + } +} +/** + * Bitwise write flags. + * + * @see {@link policy.BitwisePolicy} + */ +export namespace bitwise { + /** + * Bitwise write flags. + */ + export enum writeFlags { + /** + * Allow create or update. Default. + */ + DEFAULT, + /** + * If the bin already exists, the operation + * will be denied. If the bin does not exist, a new bin will be created. + */ + CREATE_ONLY, + /** + * If the bin already exists, the bin will be + * overwritten. If the bin does not exist, the operation will be denied. + */ + UPDATE_ONLY, + /** + * Do not raise error if operation is denied. + */ + NO_FAIL, + /** + * Allow other valid operations to be committed if + * this operations is denied due to flag constraints. + */ + PARTIAL = 8 + } + /** + * Bitwise resize flags. + */ + export enum resizeFlags { + /** + * Default. + */ + DEFAULT, + /** + * Add/remove bytes from the beginning instead + * of the end. + */ + FROM_FRONT, + /** + * Only allow the bitmap size to increase. + */ + GROW_ONLY, + /** + * Only allow the bitmap size to decrease. + */ + SHRINK_ONLY = 4 + } + /** + * Bitwise overflow action. + * + * @remarks Action to take when a bitwise {@link + * bitwise.add | add}/{@link + * bitwise.subtract | subtract} operation results in + * overflow/underflow. + * + */ + export enum overflow { + /** + * Fail operation with error. Default. + */ + FAIL, + /** + * If add/subtract overflows/underflows, set to + * max/min value. Example: MAXINT + 1 = MAXINT. + */ + SATURATE = 2, + /** + * If add/subtract overflows/underflows, wrap the + * value. Example: MAXINT + 1 = -1. + */ + WRAP = 4 + } + + export class BitwiseOperation extends operations.Operation { + withPolicy(policy: policy.BitwisePolicy): BitwiseOperation; + } + + export class OverflowableBitwiseOp extends BitwiseOperation { + public overflowAction: bitwise.overflow; + public onOverflow(action: bitwise.overflow): OverflowableBitwiseOp; + } + + /** + * Create byte "resize" operation. + * + * @remarks Server resizes bitmap to byte size according to flags. + * Server does not return a value. + * + * @param bin - The name of the bin. The bin must contain a byte value. + * @param size - Number of bytes to resize the byte value to. + * @param flags - Optional {@link bitwise.resizeFlags|resize flags}. Default is {@link bitwise.resizeFlags.DEFAULT}. + * @returns Operation that can be passed to the {@link Client#operate} command. + */ + export function resize(bin: string, size: number, flags?: bitwise.resizeFlags): BitwiseOperation; + /** + * Create byte "insert" operation. + * + * @remarks Server inserts value bytes into bitmap. Server does not return + * a value. + * + * @param bin - The name of the bin. The bin must contain a byte value. + * @param byteOffset - Offset in bytes. + * @param value - Bytes to insert. + * @returns Operation that can be passed to the {@link Client#operate} command. + */ + export function insert(bin: string, byteOffset: number, value: Buffer): BitwiseOperation; + /** + * Create byte "remove" operation. + * @remarks Server removes bytes from bitmap. Server does not return a + * value. + * + * @param bin - The name of the bin. The bin must contain a byte value. + * @param byteOffset - Offset in bytes. + * @param byteSize - Number of bytes to remove + * @returns Operation that can be passed to the {@link Client#operate} command. + */ + export function remove(bin: string, byteOffset: number, byteSize: number): BitwiseOperation; + /** + * Create bit "set" operation. + * + * @remarks Server sets value on bitmap. Server does not return a value. + * + * @param bin - The name of the bin. The bin must contain a byte value. + * @param bitOffset - Offset in bits. + * @param bitSize - Number of bits to set. + * @param value - Value to set. + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @throws TypeError unless value is an integer or a Buffer. + */ + export function set(bin: string, bitOffset: number, bitSize: number, value: number | Buffer): BitwiseOperation; + /** + * Create bit "or" operation. + * + * @remarks Server performs bitwise "or" on value and bitmap. Server does + * not return a value. + * + * @param bin - The name of the bin. The bin must contain a byte value. + * @param bitOffset - Offset in bits. + * @param bitSize - Number of bits. + * @param value - Value. + * @returns Operation that can be passed to the {@link Client#operate} command. + */ + export function or(bin: string, bitOffset: number, bitSize: number, value: Buffer): BitwiseOperation; + /** + * Create bit "exclusive or" operation. + * + * @remarks Server performs bitwise "xor" on value and bitmap. Server does + * not return a value. + * + * @param bin - The name of the bin. The bin must contain a byte value. + * @param bitOffset - Offset in bits. + * @param bitSize - Number of bits. + * @param value - Value. + * @returns Operation that can be passed to the {@link Client#operate} command. + */ + export function xor(bin: string, bitOffset: number, bitSize: number, value: Buffer): BitwiseOperation; + /** + * Create bit "and" operation. + * + * @remarks Server performs bitwise "and" on value and bitmap. Server does + * not return a value. + * + * @param bin - The name of the bin. The bin must contain a byte value. + * @param bitOffset - Offset in bits. + * @param bitSize - Number of bits. + * @param value - Value. + * @returns Operation that can be passed to the {@link Client#operate} command. + */ + export function and(bin: string, bitOffset: number, bitSize: number, value: Buffer): BitwiseOperation; + /** + * Create bit "not" operation. + * + * @remarks Server negates bitmap. Server does not return a value. + * + * @param bin - The name of the bin. The bin must contain a byte value. + * @param bitOffset - Offset in bits. + * @param bitSize - Number of bits. + * @returns Operation that can be passed to the {@link Client#operate} command. + */ + export function not(bin: string, bitOffset: number, bitSize: number): BitwiseOperation; + /** + * Create bit "left shift" operation. + * @remarks Server shifts left bitmap. Server does not return a value. + * + * @param bin - The name of the bin. The bin must contain a byte value. + * @param bitOffset - Offset in bits. + * @param bitSize - Number of bits to shift. + * @param shift - Distance to shift bits. + * @returns Operation that can be passed to the {@link Client#operate} command. + */ + export function lshift(bin: string, bitOffset: number, bitSize: number, shift: number): BitwiseOperation; + /** + * Create bit "right shift" operation. + * @remarks Server shifts right bitmap. Server does not return a value. + * + * @param bin - The name of the bin. The bin must contain a byte value. + * @param bitOffset - Offset in bits. + * @param bitSize - Number of bits to shift. + * @param shift - Distance to shift bits. + * @returns Operation that can be passed to the {@link Client#operate} command. + */ + export function rshift(bin: string, bitOffset: number, bitSize: number, shift: number): BitwiseOperation; + /** + * Create bit "add" operation. + * @remarks Server adds value to bitmap. Server does not return a value. + * + * @param bin - The name of the bin. The bin must contain a byte value. + * @param bitOffset - Offset in bits. + * @param bitSize - Number of bits; must be <= 64. + * @param value - Value to add. + * @param sign - Sign indicates if bits should be treated as a signed + * number. + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @see {@link bitwise~OverflowableBitwiseOp#onOverflow|OverflowableBitwiseOp#onOverflow} + * can used to control how the operation executes, when the addition results + * in an overflow/underflow. + */ + export function add(bin: string, bitOffset: number, bitSize: number, value: number, sign: boolean): OverflowableBitwiseOp; + /** + * Create bit "subtract" operation. + * @remarks Server subtracts value from bitmap. Server does not return a + * value. + * + * @param bin - The name of the bin. The bin must contain a byte value. + * @param bitOffset - Offset in bits. + * @param bitSize - Number of bits; must be <= 64. + * @param value - Value to subtract. + * @param sign - Sign indicates if bits should be treated as a signed + * number. + * @returns {OverflowableBitwiseOp} Operation that can be passed to the {@link + * Client#operate} command. + * + * @see {@link bitwise~OverflowableBitwiseOp#onOverflow|OverflowableBitwiseOp#onOverflow} + * can used to control how the operation executes, when the addition results + * in an overflow/underflow. + */ + export function subtract(bin: string, bitOffset: number, bitSize: number, value: number, sign: boolean): OverflowableBitwiseOp; + /** + * Create bit "get" operation. + * @remarks Server returns bits from bitmap. + * + * @param {string} bin - The name of the bin. The bin must contain a byte value. + * @param {number} bitOffset - Offset in bits. + * @param {number} bitSize - Number of bits to return. + * @returns {BitwiseOperation} Operation that can be passed to the {@link + * Client#operate} command. + */ + export function get(bin: string, bitOffset: number, bitSize: number): BitwiseOperation; + /** + * Create bit "get integer" operation. + * @remarks Server returns integer from bitmap. + * + * @param {string} bin - The name of the bin. The bin must contain a byte value. + * @param {number} bitOffset - Offset in bits. + * @param {number} bitSize - Number of bits to return. + * @param {boolean} sign - Sign indicates if bits should be treated as a + * signed. + * @returns {BitwiseOperation} Operation that can be passed to the {@link + * Client#operate} command. + */ + export function getInt(bin: string, bitOffset: number, bitSize: number, sign: boolean): BitwiseOperation; + /** + * Create bit "left scan" operation. + * @remarks Server returns integer bit offset of the first specified value + * bit in bitmap. + * + * @param {string} bin - The name of the bin. The bin must contain a byte value. + * @param {number} bitOffset - Offset in bits. + * @param {number} bitSize - Number of bits. + * @param {boolean} value - value to scan for, "0" (false) or "1" (true). + * @returns {BitwiseOperation} Operation that can be passed to the {@link + * Client#operate} command. + */ + export function lscan(bin: string, bitOffset: number, bitSize: number, value: boolean): BitwiseOperation; + /** + * Create bit "right scan" operation. + * + * @remarks Server returns integer bit offset of the last specified value + * bit in bitmap. + * + * @param bin - The name of the bin. The bin must contain a byte value. + * @param bitOffset - Offset in bits. + * @param bitSize - Number of bits. + * @param value - value to scan for, "0" (false) or "1" (true). + * @returns Operation that can be passed to the {@link Client#operate} command. + */ + export function rscan(bin: string, bitOffset: number, bitSize: number, value: boolean): BitwiseOperation; +} +/** + * + * Use the methods in the {@link hll|hll} + * module to create HLL operations for use with the {@link Client#operate} + * command. + */ +export namespace hll { + +/** + * HLL write flags. + * + * @property {number} DEFAULT - Allow create or update. Default. + * @property {number} CREATE_ONLY - If the bin already exists, the operation + * will be denied. If the bin does not exist, a new bin will be created. + * @property {number} UPDATE_ONLY - If the bin already exists, the bin will be + * overwritten. If the bin does not exist, the operation will be denied. + * @property {number} NO_FAIL - Do not raise error if operation is denied. + * @property {number} ALLOW_FOLD - Allow the resulting set to be the minimum of + * provided index bits. For {@link + * hll.getIntersectCount | getIntersectCount} and {@link + * hll.getSimilarity |getSimilarity }, allow the usage of less + * precise HLL algorithms when min hash bits of all participating sets do not + * match. + * + * @see {@link HLLPolicy} + */ + export enum writeFlags { + /** + * DEFAULT - Allow create or update. Default. + */ + DEFAULT, + /** + * If the bin already exists, the operation + * will be denied. If the bin does not exist, a new bin will be created. + */ + CREATE_ONLY, + /** + * If the bin already exists, the bin will be + * overwritten. If the bin does not exist, the operation will be denied. + */ + UPDATE_ONLY, + /** + * Do not raise error if operation is denied. + */ + NO_FAIL = 4, + /** + * Allow the resulting set to be the minimum of + * provided index bits. For {@link + * hll.getIntersectCount | getIntersectCount} and {@link + * hll.getSimilarity|getSimilarity}, allow the usage of less + * precise HLL algorithms when min hash bits of all participating sets do not + * match. + */ + ALLOW_FOLD = 8 + } + + export class HLLOperation extends operations.Operation { + public withPolicy(policy: policy.HLLPolicy): HLLOperation; + } + + /** + * Creates a new HLL or re-initializes an existing HLL. Re-initialization + * clears existing contents. + * + * The init operation supports the following {@link hll.writeFlags | HLL Policy write flags}: + * * CREATE_ONLY + * * UPDATE_ONLY + * * NO_FAIL + * + * @param bin - The name of the bin. The bin must contain an HLL value. + * @param indexBits - Number of index bits. Must be between 4 and 16 inclusive. + * @param minhashBits - Number of minhash bits. If specified, must + * be between 4 and 51 inclusive. + * @returns Operation that can be passed to the {@link Client#operate} command. + */ + export function init(bin: string, indexBits: number, minhashBits?: number): HLLOperation; + /** + * Adds elements to the HLL set. If the bin does not exist, create the HLL with + * the indexBits and minhashBits parameters. + * + * Returns an integer indicating number of entries that caused HLL to update a + * register. + * + * The add operation supports the following {@link hll.writeFlags | HLL Policy write flags}: + * * CREATE_ONLY + * * NO_FAIL + * + * Not specifying the bit count, implies UPDATE_ONLY. + * + * @param bin - The name of the bin. The bin must contain an HLL value. + * @param list - Entries to be added to the HLL set. + * @param indexBits - Number of index bits. If specified, must + * be between 4 and 16 inclusive. + * @param minhashBits - Number of minhash bits. If specified, must + * be between 4 and 51 inclusive. + * @returns Operation that can be passed to the {@link Client#operate} command. + */ + export function add(bin: string, list: AerospikeBinValue[], indexBits?: number, minhashBits?: number): HLLOperation; + /** + * Sets a union of the specified HLLs with the HLL bin value (if it exists) + * back into the HLL bin. + * + * The setUnion operation supports the following {@link hll.writeFlags | HLL Policy write flags}: + * * CREATE_ONLY + * * UPDATE_ONLY + * * ALLOW_FOLD + * * NO_FAIL + * + * If ALLOW_FOLD is not set, all provided HLLs and the target bin + * (if it exists) must have matching index bits and minhash bits. If + * ALLOW_FOLD is set, server will union down to the minimum index + * bits of all provided HLLs and the target bin (if it exists). Additionally, + * if minhash bits differs on any HLL, the resulting union will have 0 minhash + * bits. + * + * @param bin - The name of the bin. The bin must contain an HLL value. + * @param list - List of HLL objects (of type Buffer). + * @returns Operation that can be passed to the {@link Client#operate} command. + */ + export function setUnion(bin: string, list: AerospikeBinValue[]): HLLOperation; + /** + * Updates the cached count (if stale), and returns the estimated number of + * elements in the HLL bin. + * + * @param bin - The name of the bin. The bin must contain an HLL value. + * @returns Operation that can be passed to the {@link Client#operate} command. + */ + export function refreshCount(bin: string): HLLOperation; + /** + * Folds the index bit count to the specified value. This can only be applied + * when the min hash count on the HLL bin is 0. + * + * @param bin - The name of the bin. The bin must contain an HLL value. + * @param indexBits - Number of index bits. Must be between 4 and 16 inclusive. + * @returns Operation that can be passed to the {@link Client#operate} command. + */ + export function fold(bin: string, indexBits: number): HLLOperation; + /** + * Returns the estimated number of elements in the HLL bin. + * + * @param bin - The name of the bin. The bin must contain an HLL value. + * @returns Operation that can be passed to the {@link Client#operate} command. + */ + export function getCount(bin: string): HLLOperation; + /** + * Returns an HLL object, which is the union of all specified HLL objects in + * the list with the HLL bin. + * + * @param bin - The name of the bin. The bin must contain an HLL value. + * @param list - List of HLL objects (of type Buffer). + * @returns Operation that can be passed to the {@link Client#operate} command. + */ + export function getUnion(bin: string, list: AerospikeBinValue[]): HLLOperation; + /** + * Returns the estimated number of elements that would be contained by the + * union of these HLL objects. + * + * @param bin - The name of the bin. The bin must contain an HLL value. + * @param list - List of HLL objects (of type Buffer). + * @returns Operation that can be passed to the {@link Client#operate} command. + */ + export function getUnionCount(bin: string, list: AerospikeBinValue[]): HLLOperation; + /** + * Returns the estimated number of elements that would be contained by the + * intersection of these HLL objects. + * + * @param bin - The name of the bin. The bin must contain an HLL value. + * @param list - List of HLL objects (of type Buffer). + * @returns Operation that can be passed to the {@link Client#operate} command. + */ + export function getIntersectCount(bin: string, list: AerospikeBinValue[]); + /** + * Returns the estimated similarity of these HLL objects. Return type is double. + * + * @param bin - The name of the bin. The bin must contain an HLL value. + * @param list - List of HLL objects (of type Buffer). + * @returns {Object} Operation that can be passed to the {@link Client#operate} command. + */ + export function getSimilarity(bin: string, list: AerospikeBinValue[]): HLLOperation; + /** + * Returns the index and min hash bit counts used to create the HLL bin as a + * list of integers. + * + * @param bin - The name of the bin. The bin must contain an HLL value. + * @returns Operation that can be passed to the {@link Client#operate} command. + */ + export function describe(bin: string): HLLOperation; +} + +/** + * This module defines operations on the List data type. Create + * list operations used by the {@link Client#operate} command. + * + * For more information, please refer to the + * ⇑Lists + * and ⇑List Operations + * documentation in the Aerospike Feature Guide. + * + * #### List Index + * + * List operations support negative indexing. If the index is negative, the + * resolved index starts backwards from end of list. + * + * Index/Range examples: + * + * - Index 0: First item in list. + * - Index 4: Fifth item in list. + * - Index -1: Last item in list. + * - Index -3: Third to last item in list. + * - Index 1 Count 2: Second and third items in list. + * - Index -3 Count 3: Last three items in list. + * - Index -5 Count 4: Range between fifth to last item to second to last item inclusive. + * + * If an index is out of bounds, a parameter error will be returned. If a range + * is partially out of bounds, the valid part of the range will be returned. + * + * #### CDT Context - Operating on Nested Lists + * + * To operate on nested lists, use the {@link + * lists~ListOperation#withContext ListOperation#withContext} + * function to set the context for a list operation. + * + * @see {@link Client#operate} + */ +export namespace lists { + + /** + * List order. + * + * @remarks The order determines what kind of indices the Aerospike server + * maintains for the list. + * + */ + export enum order { + /** + * List is not ordered. This is the default. + */ + UNORDERED, + /** + * List is ordered. + */ + ORDERED + } + /** + * List return type. + * @remarks The return type determines what data of the selected items the + * get and remove operations return in the result of the {@link Client#operate} + * command. It is optional to specify the return type for remove operations; + * default is NONE. For get operations the return type parameter + * is required. + */ + export enum returnType { + /** + * Do not return a result. + */ + NONE, + /** + * Return key index order. + */ + INDEX, + /** + * Return reverse key order. + */ + REVERSE_INDEX, + /** + * Return value order. + */ + RANK, + /** + * Return reverse value order. + */ + REVERSE_RANK, + /** + * Return count of items selected. + */ + COUNT, + /** + * Return value for single key read and value list for range read. + */ + VALUE = 7, + /** + * Return true if count > 0. + */ + EXISTS = 13, + /** + * Invert meaning of list command and return values. Let's take {@link removeByIndexRange} for example. + * + * + * With INVERTED enabled, the keys outside of the specified index range will be + * removed and returned. + */ + INVERTED = 0x10000, + } + /** + * List sort flags. + */ + export enum sortFlags { + /** + * Default. Preserve duplicate values when sorting list. + */ + DEFAULT, + /** + * Drop duplicate values when sorting list. + */ + DROP_DUPLICATES = 2 + } + + /** + * List write bit flags. + */ + export enum writeFlags { + /** + * Default. Allow duplicate values and insertions at any index. + */ + DEFAULT, + /** + * Only add unique values. + */ + ADD_UNIQUE, + /** + * Enforce list boundaries when inserting. Do not allow values to be inserted + * at index outside current list boundaries. + */ + INSERT_BOUNDED, + /** + * Do not raise error if a list item fails due to write flag constraints. + */ + NO_FAIL, + /** + * Allow other valid list items to be committed if a list item fails due to + * write flag constraints. + */ + PARTIAL + } + + /** + * Use the methods in the {@link lists} + * namespace to create list operations for use with the {@link Client#operate} + * command. + */ + export class ListOperation extends operations.Operation { + /** + * Set the return type for certain list operations. + * + * The return type only affects getBy\* and + * removeBy\* list operations. + * + * @param {number} returnType - The {@link lists.returnType|return type} indicating what data of the + * selected items to return. + * + * @example Fetch the first three list elements and return the values + * + * const Aerospike = require('aerospike') + * const lists = Aerospike.lists + * const key = new Aerospike.Key('test', 'demo', 'listsTest') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * Aerospike.connect(config).then(async client => { + * await client.put(key, { list: [32, 5, 85, 16, 22] }) + * const ops = [ + * lists.getByValueRange('list', 10, 30) + * .andReturn(lists.returnType.VALUE) + * ] + * const result = await client.operate(key, ops) + * console.log('Result:', result.bins.list) // => Result: [ 16, 22 ] + * client.close() + * }) + */ + public andReturn(returnType: lists.returnType): ListOperation; + /** + * By setting the context, the list operation will be executed on a + * nested list, instead of the bin value itself. + * + * @since v3.12.0 + * + * @example Fetch the 1st element of the 2nd nested list + * + * const Aerospike = require('aerospike') + * const lists = Aerospike.lists + * const key = new Aerospike.Key('test', 'demo', 'listsTest') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * Aerospike.connect(config).then(async (client) => { + * await client.put(key, { list: [[32, 5, 85], [16, 22]] }) + * const ops = [ + * lists.get('list', 0) + * .withContext((ctx) => ctx.addListIndex(1)) + * ] + * const result = await client.operate(key, ops) + * console.log('Result:', result.bins.list) // => Result: 16 + * client.close() + * }) + * + * @example Fetch the last element of the nested list stored under the 'nested' map key + * + * const Aerospike = require('aerospike') + * const lists = Aerospike.lists + * const Context = Aerospike.cdt.Context + * const key = new Aerospike.Key('test', 'demo', 'listsTest') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * Aerospike.connect(config).then(async (client) => { + * await client.put(key, { map: { nested: [32, 5, 85, 16, 22] } }) + * const context = new Context().addMapKey('nested') + * const ops = [ + * lists.get('map', -1) + * .withContext(context) + * ] + * const result = await client.operate(key, ops) + * console.log('Result:', result.bins.map) // => Result: 22 + * client.close() + * }) + */ + public withContext(contextOrFunction: cdt.Context | Function): ListOperation; + /** + * Inverts the selection of items for certain list operations. + * + * For getBy\* and removeBy\* list + * operations, calling the invertSelect method on the + * ListOperation has the effect of inverting the selection of + * list elements that the operation affects. + * + * @throws {AerospikeError} if the operation is not invertible. + * + * @example Remove all tags except for yellow from the record + * + * const Aerospike = require('aerospike') + * const lists = Aerospike.lists + * const key = new Aerospike.Key('test', 'demo', 'listsTest') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * Aerospike.connect(config).then(async client => { + * await client.put(key, { tags: ['blue', 'yellow', 'pink'] }) + * const ops = [ + * lists.removeByValue('tags', 'yellow') + * .invertSelection() + * ] + * await client.operate(key, ops) + * const record = await client.get(key) + * console.log('Result:', record.bins.tags) // => Result: [ 'yellow' ] + * client.close() + * }) + */ + public invertSelection(): void; + } + + /** + * List operation variant that can be inverted + */ + export class InvertibleListOp extends ListOperation { + /** + * Signifies if Operation will be inverted. + */ + public inverted: boolean; + /** + * Sets {@link InvertibleListOp.inverted} to `true`. + */ + public invertSelection(): InvertibleListOp; + } + + /** + * Creates list create operation. + * + * @param bin - bin name. + * @param order - list order. + * @param pad - If true, the context is allowed to be beyond list boundaries. In that case, nil + * list entries will be inserted to satisfy the context position. + * @param persistIndex - If true, persist list index. A list index improves lookup performance, + * but requires more storage. A list index can be created for a top-level ordered list only. Nested and + * unordered list indexes are not supported. + * @param ctx - optional path to nested list. If not defined, the top-level list is used. + * + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @example + * + * const Aerospike = require('aerospike') + * const lists = Aerospike.lists + * const key = new Aerospike.Key('test', 'demo', 'listKey') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * + * Aerospike.connect(config).then(async client => { + * let ops = [ + * lists.create('list', lists.order.ORDERED, false, true) + * ] + * let result = await client.operate(key, ops) + * console.log(result.bins) // => { list: null } + * let record = await client.get(key) + * console.log(record.bins) // => { list: [] } + * + * await client.remove(key) + * client.close() + * }) + */ + export function create(bin: string, order?: lists.order, pad?: boolean, persistIndex?: boolean, ctx?: cdt.Context): ListOperation; + /** + * Sets the list order to ORDERED or UNORDERED + * + * @remarks This operation does not return any result. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param order - The new {@link lists.order|list order}. + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @since v3.4.0 + */ + export function setOrder(bin: string, order: lists.order): ListOperation; + /** + * Sort the list according to flags. + * @remarks This operation does not return any result. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param flags - The {@link lists.sortFlags|sort flags} to use. + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @since v3.4.0 + */ + export function sort(bin: string, flags: lists.sortFlags): ListOperation; + /** + * Appends an element to the end of a list. + * + * @remarks This operation returns the element count of the list after the + * operation. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param value - The value to be appended. + * @param policy - Optional list policy. + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @example + * + * const Aerospike = require('aerospike') + * const op = Aerospike.operations + * const lists = Aerospike.lists + * const key = new Aerospike.Key('test', 'demo', 'mykey1') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * var ops = [ + * lists.append('tags', 'orange'), + * op.read('tags') + * ] + * + * Aerospike.client(config).connect((error, client) => { + * if (error) throw error + * client.put(key, { tags: ['blue', 'yellow', 'pink'] }, (error) => { + * if (error) throw error + * client.operate(key, ops, (error, result) => { + * if (error) throw error + * console.log(result.bins.tags) // => [ 'blue', 'yellow', 'pink', 'orange' ] + * client.close() + * }) + * }) + * }) + */ + export function append(bin: string, value: AerospikeBinValue, policy?: policy.ListPolicy): ListOperation; + /** + * Appends a list of elements to the end of a list. + * @remarks This operation returns the element count of the list after the + * operation. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param list - Array of elements to be appended. + * @param policy - Optional list policy. + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @example + * + * const Aerospike = require('aerospike') + * const op = Aerospike.operations + * const lists = Aerospike.lists + * const key = new Aerospike.Key('test', 'demo', 'mykey1') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * var ops = [ + * lists.appendItems('tags', ['orange', 'green']), + * op.read('tags') + * ] + * + * Aerospike.client(config).connect((error, client) => { + * if (error) throw error + * client.put(key, { tags: ['blue', 'yellow', 'pink'] }, (error) => { + * if (error) throw error + * client.operate(key, ops, (error, result) => { + * if (error) throw error + * console.log(result.bins.tags) // => [ 'blue', 'yellow', 'pink', 'orange', 'green' ] + * client.close() + * }) + * }) + * }) + */ + export function appendItems(bin: string, list: AerospikeBinValue[], policy?: policy.ListPolicy): ListOperation; + /** + * Inserts an element at the specified index. + * @remarks This operation returns the element count of the list after the + * operation. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param index - List index at which the new element should be inserted. + * @param value - The value to be appended. + * @param policy - Optional list policy. + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @example + * + * const Aerospike = require('aerospike') + * const op = Aerospike.operations + * const lists = Aerospike.lists + * const key = new Aerospike.Key('test', 'demo', 'mykey1') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * + * var ops = [ + * lists.insert('tags', 2, 'orange'), + * op.read('tags') + * ] + * + * Aerospike.client(config).connect((error, client) => { + * if (error) throw error + * client.put(key, { tags: ['blue', 'yellow', 'pink'] }, (error) => { + * if (error) throw error + * client.operate(key, ops, (error, result) => { + * if (error) throw error + * console.log(result.bins.tags) // => [ 'blue', 'yellow', 'orange', 'pink' ] + * client.close() + * }) + * }) + * }) + */ + export function insert(bin: string, index: number, value: AerospikeBinValue, policy?: policy.ListPolicy): ListOperation; + /** + * Inserts a list of elements at the specified index. + * + * @remarks This operation returns the element count of the list after the + * operation. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param index - List index at which the new elements should be inserted. + * @param list - Array of elements to be inserted. + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @example + * + * const Aerospike = require('aerospike') + * const op = Aerospike.operations + * const lists = Aerospike.lists + * const key = new Aerospike.Key('test', 'demo', 'mykey1') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * var ops = [ + * lists.insertItems('tags', 2, ['orange', 'green']), + * op.read('tags') + * ] + * + * Aerospike.client(config).connect((error, client) => { + * if (error) throw error + * client.put(key, { tags: ['blue', 'yellow', 'pink'] }, (error) => { + * if (error) throw error + * client.operate(key, ops, (error, result) => { + * if (error) throw error + * console.log(result.bins.tags) // => [ 'blue', 'yellow', 'orange', 'green', 'pink' ] + * client.close() + * }) + * }) + * }) + */ + export function insertItems(bin: string, index: number, list: AerospikeBinValue[], policy?: policy.ListPolicy): ListOperation; + /** + * Removes and returns the list element at the specified index. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param index - List index of the element to be removed. + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @example + * + * const Aerospike = require('aerospike') + * const op = Aerospike.operations + * const lists = Aerospike.lists + * const key = new Aerospike.Key('test', 'demo', 'mykey1') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * read : new Aerospike.ReadPolicy({socketTimeout : 0, totalTimeout : 0}), + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * var ops = [ + * lists.pop('tags', 1) + * ] + * + * Aerospike.client(config).connect((error, client) => { + * if (error) throw error + * client.put(key, { tags: ['blue', 'yellow', 'pink'] }, (error) => { + * if (error) throw error + * client.operate(key, ops, (error, result) => { + * if (error) throw error + * console.log(result.bins.tags) // => yellow + * client.get(key, (error, record) => { + * if (error) throw error + * console.log(record.bins.tags) // => { [ 'blue', 'pink' ] } + * client.close() + * }) + * }) + * }) + * }) + */ + export function pop(bin: string, index: number): ListOperation; +/** + * Removes and returns the list elements in the specified range. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param index - Index of the first element in the range. + * @param count - Number of elements in the range; if not specified, the range extends to the end of the list. + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @example + * + * const Aerospike = require('aerospike') + * const lists = Aerospike.lists + * const key = new Aerospike.Key('test', 'demo', 'mykey1') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * read : new Aerospike.ReadPolicy({socketTimeout : 0, totalTimeout : 0}), + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * var ops = [ + * lists.popRange('tags', 0, 2) + * ] + * + * Aerospike.client(config).connect((error, client) => { + * if (error) throw error + * client.put(key, { tags: ['blue', 'yellow', 'pink'] }, (error) => { + * if (error) throw error + * client.operate(key, ops, (error, result) => { + * if (error) throw error + * console.log(result.bins.tags) // => [ 'blue', 'yellow' ] + * client.get(key, (error, record) => { + * if (error) throw error + * console.log(record.bins.tags) // => { [ 'pink' ] } + * client.close() + * }) + * }) + * }) + * }) + */ + export function popRange(bin: string, index: number, count?: number): ListOperation; +/** + * Removes the list element at the specified index. + * + * @remarks This operation returns the number of elements removed from the + * list. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param index - Index of the element to be removed + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @example + * + * const Aerospike = require('aerospike') + * const lists = Aerospike.lists + * const key = new Aerospike.Key('test', 'demo', 'mykey1') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * read : new Aerospike.ReadPolicy({socketTimeout : 0, totalTimeout : 0}), + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * var ops = [ + * lists.remove('tags', 1) + * ] + * + * Aerospike.client(config).connect((error, client) => { + * if (error) throw error + * client.put(key, { tags: ['blue', 'yellow', 'pink'] }, (error) => { + * if (error) throw error + * client.operate(key, ops, (error) => { + * if (error) throw error + * client.get(key, (error, record) => { + * if (error) throw error + * console.log(record.bins.tags) // => { [ 'blue', 'pink' ] } + * client.close() + * }) + * }) + * }) + * }) + */ + export function remove(bin: string, index: number): ListOperation; +/** + * Removes the list elements in the specified range. + * + * @remarks This operation returns the number of elements removed from the + * list. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param index - Index of the first element in the range. + * @param count - Number of elements in the range; if not specified, the range extends to the end of the list. + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @example + * + * const Aerospike = require('aerospike') + * const lists = Aerospike.lists + * const key = new Aerospike.Key('test', 'demo', 'mykey1') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * read : new Aerospike.ReadPolicy({socketTimeout : 0, totalTimeout : 0}), + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * var ops = [ + * lists.removeRange('tags', 0, 2) + * ] + * + * Aerospike.client(config).connect((error, client) => { + * if (error) throw error + * client.put(key, { tags: ['blue', 'yellow', 'pink'] }, (error) => { + * if (error) throw error + * client.operate(key, ops, (error) => { + * if (error) throw error + * client.get(key, (error, record) => { + * if (error) throw error + * console.log(record.bins.tags) // => { [ 'pink' ] } + * client.close() + * }) + * }) + * }) + * }) + */ + export function removeRange(bin: string, index: number, count?: number): ListOperation; +/** + * Removes a single list element identified by its index from the list. + * @remarks This operation returns the data specified by returnType. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param index - Zero-based index of the item to remove. + * @param returnType - The {@link lists.returnType|return type} + * indicating what data of the removed item(s) to return (if any). + * @returns {lists~ListOperation} List operation that can be + * used with the {@link Client#operate} command. + * + * @see Use {@link lists~ListOperation#invertSelection|ListOperation#invertSelection} to + * invert the selection of items affected by this operation. + * @see Instead of passing returnType, you can also use + * {@link lists~ListOperation#andReturn|ListOperation#andReturn} to + * select what data to return. + * + * @since v3.4.0 + * + * @example Remove the 2nd item in the list and return its value + * + * const Aerospike = require('aerospike') + * const lists = Aerospike.lists + * const key = new Aerospike.Key('test', 'demo', 'listsTest') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * Aerospike.connect(config).then(async client => { + * await client.put(key, { tags: ['blue', 'yellow', 'pink'] }) + * const ops = [ + * lists.removeByIndex('tags', 1) + * .andReturn(lists.returnType.VALUE) + * ] + * const result = await client.operate(key, ops) + * console.log('Result:', result.bins.tags) // => Result: yellow + * const record = await client.get(key) + * console.log('Record:', record.bins.tags) // => Record: [ 'blue', 'pink' ] + * client.close() + * }) + */ + export function removeByIndex(bin: string, index: number, returnType?: lists.returnType): ListOperation; + /** + * Removes the list elements identified by the index range from the list. + * @remarks This operation returns the data specified by returnType. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param index - Index of the first element in the range. + * @param [count] - Number of elements in the range; if not specified, + * the range extends to the end of the list. + * @param returnType - The {@link lists.returnType|return type} + * indicating what data of the removed item(s) to return (if any). + * @returns List operation that can be + * used with the {@link Client#operate} command. + * + * @see Use {@link lists~ListOperation#invertSelection|ListOperation#invertSelection} to + * invert the selection of items affected by this operation. + * @see Instead of passing returnType, you can also use + * {@link lists~ListOperation#andReturn|ListOperation#andReturn} to + * select what data to return. + * + * @since v3.4.0 + */ + export function removeByIndexRange(bin: string, index: number, count?: number, returnType?: lists.returnType): InvertibleListOp; + /** + * Removes one or more items identified by a single value from the list. + * @remarks This operation returns the data specified by returnType. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param value - The list value to remove. + * @param returnType - The {@link lists.returnType|return type} + * indicating what data of the removed item(s) to return (if any). + * @returns List operation that can be + * used with the {@link Client#operate} command. + * + * @see Use {@link lists~ListOperation#invertSelection|ListOperation#invertSelection} to + * invert the selection of items affected by this operation. + * @see Instead of passing returnType, you can also use + * {@link lists~ListOperation#andReturn|ListOperation#andReturn} to + * select what data to return. + * + * @since v3.4.0 + */ + export function removeByValue(bin: string, value: AerospikeBinValue, returnType?: lists.returnType): InvertibleListOp; + /** + * Removes one or more items identified by a list of values from the list. + * @remarks This operation returns the data specified by returnType. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param values - An array of list values to remove. + * @param returnType - The {@link lists.returnType|return type} + * indicating what data of the removed item(s) to return (if any). + * @returns {lists~ListOperation} List operation that can be + * used with the {@link Client#operate} command. + * + * @see Use {@link lists~ListOperation#invertSelection|ListOperation#invertSelection} to + * invert the selection of items affected by this operation. + * @see Instead of passing returnType, you can also use + * {@link lists~ListOperation#andReturn|ListOperation#andReturn} to + * select what data to return. + * + * @since v3.4.0 + */ + export function removeByValueList(bin: string, values: AerospikeBinValue[], returnType?: lists.returnType): InvertibleListOp; + /** + * Removes one or more items identified by a range of values from the list. + * @remarks This operation returns the data specified by returnType. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param begin - Start values in the range (inclusive). If set to + * null, the range includes all values less than the + * end value. + * @param end - End value in the range (exclusive). If set to + * null, the range includes all values greater than or equal to the + * begin value. + * @param returnType - The {@link lists.returnType|return type} + * indicating what data of the removed item(s) to return (if any). + * @returns {lists~ListOperation} List operation that can be + * used with the {@link Client#operate} command. + * + * @see Use {lists~ListOperation#invertSelection|ListOperation#invertSelection} to + * invert the selection of items affected by this operation. + * @see Instead of passing returnType, you can also use + * {@link lists~ListOperation#andReturn|ListOperation#andReturn} to + * select what data to return. + * + * @since v3.4.0 + */ + export function removeByValueRange(bin: string, begin: number | null, end: number | null, returnType?: lists.returnType): InvertibleListOp; + /** + * Removes list items nearest to value and greater, by relative rank. + * + * @remarks This operation returns the data specified by returnType. + * + * Examples for ordered list [0, 4, 5, 9, 11, 15]: + * + * * (value, rank, count) = [removed items] + * * (5, 0, 2) = [5, 9] + * * (5, 1, 1) = [9] + * * (5, -1, 2) = [4, 5] + * * (3, 0, 1) = [4] + * * (3, 3, 7) = [11, 15] + * * (3, -3, 2) = [] + * + * Without count: + * + * * (value, rank) = [removed items] + * * (5, 0) = [5, 9, 11, 15] + * * (5, 1) = [9, 11, 15] + * * (5, -1) = [4, 5, 9, 11, 15] + * * (3, 0) = [4, 5, 9, 11, 15] + * * (3, 3) = [11, 15] + * * (3, -3) = [0, 4, 5, 9, 11, 15] + * + * Requires Aerospike Server v4.3.0 or later. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param value - Find list items nearest to this value and greater. + * @param rank - Rank of the items to be removed relative to the given value. + * @param count - Number of items to remove. If undefined, the range + * includes all items nearest to value and greater, until the end. + * @param returnType - The {@link lists.returnType|return type} + * indicating what data of the removed item(s) to return (if any). + * @returns List operation that can be + * used with the {@link Client#operate} command. + * + * @see Use {@link lists~ListOperation#invertSelection|ListOperation#invertSelection} to + * invert the selection of items affected by this operation. + * @see Instead of passing returnType, you can also use + * {@link lists~ListOperation#andReturn|ListOperation#andReturn} to + * select what data to return. + * + * @since v3.5.0 + * + * @example + * + * const Aerospike = require('aerospike') + * const lists = Aerospike.lists + * const key = new Aerospike.Key('test', 'demo', 'listKey') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * read : new Aerospike.ReadPolicy({socketTimeout : 0, totalTimeout : 0}), + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * Aerospike.connect(config).then(async client => { + * await client.put(key, { list: [0, 4, 5, 9, 11, 15] }) + * let result = await client.operate(key, [ + * lists.removeByValueRelRankRange('list', 3, 3) + * .andReturn(lists.returnType.VALUE)]) + * console.log(result.bins.list) // => [ 11, 15 ] + * let record = await client.get(key) + * console.log(record.bins.list) // => [ 0, 4, 5, 9 ] + * client.close() + * }) + */ + export function removeByValueRelRankRange(bin: string, value: number, rank: number, count?: number, returnType?: lists.returnType): InvertibleListOp; + /** + * Removes a single item identified by its rank value from the list. + * @remarks This operation returns the data specified by returnType. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param rank - Rank of the item to remove. + * @param returnType - The {@link lists.returnType|return type} + * indicating what data of the removed item(s) to return (if any). + * @returns {lists~ListOperation} List operation that can be + * used with the {@link Client#operate} command. + * + * @see Use {@link lists~ListOperation#invertSelection|ListOperation#invertSelection} to + * invert the selection of items affected by this operation. + * @see Instead of passing returnType, you can also use + * {@link lists~ListOperation#andReturn|ListOperation#andReturn} to + * select what data to return. + * + * @since v3.4.0 + */ + export function removeByRank(bin: string, rank: number, returnType?: lists.returnType): ListOperation; + /** + * Removes one or more items in the specified rank range from the list. + * @remarks This operation returns the data specified by returnType. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param rank - Starting rank. + * @param count - Number of items to remove; if undefined, the range + * includes all items starting from rank. + * @param returnType - The {@link lists.returnType|return type} + * indicating what data of the removed item(s) to return (if any). + * @returns List operation that can be + * used with the {@link Client#operate} command. + * + * @see Use {@link lists~ListOperation#invertSelection|ListOperation#invertSelection} to + * invert the selection of items affected by this operation. + * @see Instead of passing returnType, you can also use + * {@link lists~ListOperation#andReturn|ListOperation#andReturn} to + * select what data to return. + * + * @since v3.4.0 + */ + export function removeByRankRange(bin: string, rank: number, count?: number, returnType?: lists.returnType): InvertibleListOp; + /** + * Removes all the elements from the list. + * @remarks This operation returns no result. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @example + * + * const Aerospike = require('aerospike') + * const lists = Aerospike.lists + * const key = new Aerospike.Key('test', 'demo', 'mykey1') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * read : new Aerospike.ReadPolicy({socketTimeout : 0, totalTimeout : 0}), + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * var ops = [ + * lists.clear('tags') + * ] + * + * Aerospike.client(config).connect((error, client) => { + * if (error) throw error + * client.put(key, { tags: ['blue', 'yellow', 'pink'] }, (error) => { + * if (error) throw error + * client.operate(key, ops, (error) => { + * if (error) throw error + * client.get(key, (error, record) => { + * if (error) throw error + * console.log(record.bins.tags) // => { [ ] } + * client.close() + * }) + * }) + * }) + * }) + */ + export function clear(bin: string): ListOperation; + /** + * Sets the list element at the specified index to a new value. + * @remarks This operation returns no result. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param index - Index of the element to be replaced. + * @param value - The new value to assigned to the list element. + * @param policy - Optional list policy. + * @return Operation that can be passed to the {@link Client#operate} command. + * + * @console.log + * + * const Aerospike = require('aerospike') + * const lists = Aerospike.lists + * const key = new Aerospike.Key('test', 'demo', 'mykey1') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency varies with hardware selection. Configure as needed. + * policies: { + * read : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout: 0}), + * write : new Aerospike.ReadPolicy({socketTimeout : 0, totalTimeout: 0}), + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout: 0}) + * + * } + * } + * var ops = [ + * lists.set('tags', 1, 'green') + * ] + * + * Aerospike.client(config).connect((error, client) => { + * if (error) throw error + * client.put(key, { tags: ['blue', 'yellow', 'pink'] }, (error) => { + * if (error) throw error + * client.operate(key, ops, (error) => { + * if (error) throw error + * client.get(key, (error, record) => { + * if (error) throw error + * console.log(record.bins) // => { tags: [ 'blue', 'green', 'pink' ] } + * client.close() + * }) + * }) + * }) + * }) + */ + export function set(bin: string, index: number, value: AerospikeBinValue, policy?: policy.ListPolicy): ListOperation; + /** + * Removes all list elements that are not within the specified range. + * @remarks This operation returns the number of list elements removed. + * + * @param {string} bin - The name of the bin. The bin must contain a List value. + * @param {number} index - Index of the first element in the range. + * @param {number} count - Number of elements in the range. + * @returns {Object} Operation that can be passed to the {@link Client#operate} command. + * + * @example + * + * const Aerospike = require('aerospike') + * const lists = Aerospike.lists + * const key = new Aerospike.Key('test', 'demo', 'mykey1') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * read : new Aerospike.ReadPolicy({socketTimeout : 0, totalTimeout : 0}), + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * var ops = [ + * lists.trim('tags', 1, 1) + * ] + * + * Aerospike.client(config).connect((error, client) => { + * if (error) throw error + * client.put(key, { tags: ['blue', 'yellow', 'pink'] }, (error) => { + * if (error) throw error + * client.operate(key, ops, (error) => { + * if (error) throw error + * client.get(key, (error, record) => { + * if (error) throw error + * console.log(record.bins.tags) // => { ['yellow'] } + * client.close() + * }) + * }) + * }) + * }) + */ + export function trim(bin: string, index: number, count: number): ListOperation; + /** + * Returns the list element at the specified index. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param index - Index of the element to be returned. + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @example + * + * const Aerospike = require('aerospike') + * const lists = Aerospike.lists + * const key = new Aerospike.Key('test', 'demo', 'mykey1') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * var ops = [ + * lists.get('tags', 0) + * ] + * + * Aerospike.client(config).connect((error, client) => { + * if (error) throw error + * client.put(key, { tags: ['blue', 'yellow', 'pink'] }, (error) => { + * if (error) throw error + * client.operate(key, ops, (error, result) => { + * if (error) throw error + * console.log(result.bins) // => { tags: 'blue' } + * client.close() + * + * }) + * }) + * }) + */ + export function get(bin: string, index: number): ListOperation; + /** + * Returns the list element in the specified range. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param index - Index of the first element in the range. + * @param count - Number of elements in the range; if not specified, the range extends to the end of the list. + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @example + * + * const Aerospike = require('aerospike') + * const lists = Aerospike.lists + * const key = new Aerospike.Key('test', 'demo', 'mykey1') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * var ops = [ + * lists.getRange('tags', 1) + * ] + * + * Aerospike.client(config).connect((error, client) => { + * if (error) throw error + * client.put(key, { tags: ['blue', 'yellow', 'pink'] }, (error) => { + * if (error) throw error + * client.operate(key, ops, (error, result) => { + * if (error) throw error + * console.log(result.bins.tags) // => { [ 'yellow', 'pink' ] } + * client.close() + * + * }) + * }) + * }) + */ + export function getRange(bin: string, index: number, count?: number): ListOperation; + /** + * Retrieves a single list element from the list using a specified index. + * @remarks This operation returns the data specified by returnType. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param index - Zero-based index of the item to retrieve. + * @param returnType - The {@link lists.returnType|return type} + * indicating what data of the removed item(s) to return (if any). + * @returns {lists~ListOperation} List operation that can be + * used with the {@link Client#operate} command. + * + * @see Use {@link lists~ListOperation#invertSelection|ListOperation#invertSelection} to + * invert the selection of items affected by this operation. + * @see Instead of passing returnType, you can also use + * {@link lists~ListOperation#andReturn|ListOperation#andReturn} to + * select what data to return. + * + * @since v3.4.0 + * + * @example Retrieve the 2nd item in the list and return its value + * + * const Aerospike = require('aerospike') + * const lists = Aerospike.lists + * const key = new Aerospike.Key('test', 'demo', 'listsTest') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * Aerospike.connect(config).then(async client => { + * await client.put(key, { tags: ['blue', 'yellow', 'pink'] }) + * const ops = [ + * lists.getByIndex('tags', 1) + * .andReturn(lists.returnType.VALUE) + * ] + * const result = await client.operate(key, ops) + * console.log('Result:', result.bins.tags) // => Result: yellow + * client.close() + * }) + */ + export function getByIndex(bin: string, index: number, returnType?: lists.returnType): ListOperation; + /** + * Retrieves the list elements identified by the index range from the list. + * @remarks This operation returns the data specified by returnType. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param index - Index of the first element in the range. + * @param count - Number of elements in the range; if not specified, + * the range extends to the end of the list. + * @param returnType - The {@link lists.returnType|return type} + * indicating what data of the removed item(s) to return (if any). + * @returns List operation that can be + * used with the {@link Client#operate} command. + * + * @see Use {@link lists~ListOperation#invertSelection|ListOperation#invertSelection} to + * invert the selection of items affected by this operation. + * @see Instead of passing returnType, you can also use + * {@link lists~ListOperation#andReturn|ListOperation#andReturn} to + * select what data to return. + * + * @since v3.4.0 + */ + export function getByIndexRange(bin: string, index: number, count?: number, returnType?: lists.returnType): InvertibleListOp; + /** + * Retrieves one or more items identified by a single value from the list. + * @remarks This operation returns the data specified by returnType. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param value - The list value to retrieve. + * @param returnType - The {@link lists.returnType|return type} + * indicating what data of the removed item(s) to return (if any). + * @returns {lists~ListOperation} List operation that can be + * used with the {@link Client#operate} command. + * + * @see Use {@link lists~ListOperation#invertSelection|ListOperation#invertSelection} to + * invert the selection of items affected by this operation. + * @see Instead of passing returnType, you can also use + * {@link lists~ListOperation#andReturn|ListOperation#andReturn} to + * select what data to return. + * + * @since v3.4.0 + */ + export function getByValue(bin: string, value: AerospikeBinValue, returnType?: lists.returnType): InvertibleListOp; + /** + * Retrieves one or more items identified by a list of values from the list. + * @remarks This operation returns the data specified by returnType. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param values - An array of list values to retrieve. + * @param returnType - The {@link lists.returnType|return type} + * indicating what data of the removed item(s) to return (if any). + * @returns List operation that can be + * used with the {@link Client#operate} command. + * + * @see Use {@link lists~ListOperation#invertSelection|ListOperation#invertSelection} to + * invert the selection of items affected by this operation. + * @see Instead of passing returnType, you can also use + * {@link lists~ListOperation#andReturn|ListOperation#andReturn} to + * select what data to return. + * + * @since v3.4.0 + */ + export function getByValueList(bin: string, values: AerospikeBinValue[], returnType?: lists.returnType): InvertibleListOp; + /** + * Retrieves one or more items identified by a range of values from the list. + * @remarks This operation returns the data specified by returnType. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param begin - Start values in the range (inclusive). If set to + * null, the range includes all values less than the + * end value. + * @param end - End value in the range (exclusive). If set to + * null, the range includes all values greater than or equal to the + * begin value. + * @param returnType - The {@link lists.returnType|return type} + * indicating what data of the removed item(s) to return (if any). + * @returns {lists~ListOperation} List operation that can be + * used with the {@link Client#operate} command. + * + * @see Use {@link lists~ListOperation#invertSelection|ListOperation#invertSelection} to + * invert the selection of items affected by this operation. + * @see Instead of passing returnType, you can also use + * {@link lists~ListOperation#andReturn|ListOperation#andReturn} to + * select what data to return. + * + * @since v3.4.0 + */ + export function getByValueRange(bin: string, begin: number | null, end: number | null, returnType?: lists.returnType): InvertibleListOp; + /** + * Retrieves list items nearest to value and greater, by relative rank. + * @remarks This operation returns the data specified by returnType. + * + * Examples for ordered list [0, 4, 5, 9, 11, 15]: + * + * * (value, rank, count) = [selected items] + * * (5, 0, 2) = [5, 9] + * * (5, 1, 1) = [9] + * * (5, -1, 2) = [4, 5] + * * (3, 0, 1) = [4] + * * (3, 3, 7) = [11, 15] + * * (3, -3, 2) = [] + * + * Without count: + * + * * (value, rank) = [selected items] + * * (5, 0) = [5, 9, 11, 15] + * * (5, 1) = [9, 11, 15] + * * (5, -1) = [4, 5, 9, 11, 15] + * * (3, 0) = [4, 5, 9, 11, 15] + * * (3, 3) = [11, 15] + * * (3, -3) = [0, 4, 5, 9, 11, 15] + * + * Requires Aerospike Server v4.3.0 or later. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param value - Find list items nearest to this value and greater. + * @param rank - Rank of the items to be retrieved relative to the given value. + * @param count - Number of items to retrieve. If undefined, the + * range includes all items nearest to value and greater, until the end. + * @param returnType - The {@link lists.returnType|return type} + * indicating what data of the selected item(s) to return. + * @returns {lists~ListOperation} List operation that can be + * used with the {@link Client#operate} command. + * + * @see Use {@link lists~ListOperation#invertSelection|ListOperation#invertSelection} to + * invert the selection of items affected by this operation. + * @see Instead of passing returnType, you can also use + * {@link lists~ListOperation#andReturn|ListOperation#andReturn} to + * select what data to return. + * + * @since v3.5.0 + * + * @example + * + * const Aerospike = require('aerospike') + * const lists = Aerospike.lists + * const key = new Aerospike.Key('test', 'demo', 'listKey') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * Aerospike.connect(config).then(async client => { + * await client.put(key, { list: [0, 4, 5, 9, 11, 15] }) + * await client.operate(key, [ lists.setOrder('list', lists.order.ORDERED) ]) + * let result = await client.operate(key, [ + * lists.getByValueRelRankRange('list', 5, -1, 2) + * .andReturn(lists.returnType.VALUE)]) + * console.log(result.bins.list) // => [ 4, 5 ] + * client.close() + * }) + */ + export function getByValueRelRankRange(bin: string, value: number, rank: number, count?: number, returnType?: lists.returnType): InvertibleListOp; + /** + * Retrieves a single item identified by its rank value from the list. + * @remarks This operation returns the data specified by returnType. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param rank - Rank of the item to retrieve. + * @param returnType - The {@link lists.returnType|return type} + * indicating what data of the removed item(s) to return (if any). + * @returns {lists~ListOperation} List operation that can be + * used with the {@link Client#operate} command. + * + * @see Use {@link lists~ListOperation#invertSelection|ListOperation#invertSelection} to + * invert the selection of items affected by this operation. + * @see Instead of passing returnType, you can also use + * {@link lists~ListOperation#andReturn|ListOperation#andReturn} to + * select what data to return. + * + * @since v3.4.0 + */ + export function getByRank(bin: string, rank: number, returnType?: lists.returnType): ListOperation; + /** + * Retrieves one or more items in the specified rank range from the list. + * @remarks This operation returns the data specified by returnType. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param rank - Starting rank. + * @param count - Number of items to retrieve. If undefined, the + * range includes all items starting from rank. + * @param returnType - The {@link lists.returnType|return type} + * indicating what data of the removed item(s) to return (if any). + * @returns {lists~ListOperation} List operation that can be + * used with the {@link Client#operate} command. + * + * @see Use {@link lists~ListOperation#invertSelection|ListOperation#invertSelection} to + * invert the selection of items affected by this operation. + * @see Instead of passing returnType, you can also use + * {@link lists~ListOperation#andReturn|ListOperation#andReturn} to + * select what data to return. + * + * @since v3.4.0 + */ + export function getByRankRange(bin: string, rank: number, count?: number, returnType?: lists.returnType): InvertibleListOp; + /** + * Increments the value at the given list index and returns the new value after + * increment. + * + * @param bin - The name of the bin. The bin must contain a List value. + * @param index - Index of the list element to increment. + * @param value - Value to increment the element by. Default is 1. + * @param policy - Optional list policy. + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @since v2.4 + * + * @example + * + * const Aerospike = require('aerospike') + * const lists = Aerospike.lists + * const key = new Aerospike.Key('test', 'demo', 'mykey1') + * + * var ops = [ + * lists.increment('counters', 1, 3) + * ] + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * read : new Aerospike.ReadPolicy({socketTimeout : 0, totalTimeout : 0}), + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * + * Aerospike.client(config).connect((error, client) => { + * if (error) throw error + * client.put(key, { counters: [1, 2, 3] }, (error) => { + * if (error) throw error + * client.operate(key, ops, (error, result) => { + * if (error) throw error + * console.log(result['bins']['counters']) // => 5 + * client.get(key, (error, record) => { + * if (error) throw error + * console.log(record['bins']['counters']) // => { [1, 5, 3] } + * client.close() + * }) + * }) + * }) + * }) + */ + export function increment(bin: string, index: number, value?: number, policy?: policy.ListPolicy): ListOperation; + /** + * Returns the element count of the list + * + * @param bin - The name of the bin. The bin must contain a List value. + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @example + * + * const Aerospike = require('aerospike') + * const lists = Aerospike.lists + * const key = new Aerospike.Key('test', 'demo', 'mykey1') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * var ops = [ + * lists.size('tags') + * ] + * + * Aerospike.client(config).connect((error, client) => { + * if (error) throw error + * client.put(key, { tags: ['blue', 'yellow', 'pink'] }, (error) => { + * if (error) throw error + * client.operate(key, ops, (error, result) => { + * if (error) throw error + * console.log(result.bins.tags) // => { 3 } + * client.close() + * }) + * }) + * }) + */ + export function size(bin: string): ListOperation; +} + +export namespace maps { + + + /** + * Map storage order. + */ + export enum order { + /** + * Map is not ordered. This is the default. + */ + UNORDERED, + /** + * Order map by key. + */ + KEY_ORDERED, + /** + * Order map by key, then value. + */ + KEY_VALUE_ORDERED = 3 + } + /** + * Map write mode. + * + * @remarks Write mode is used to determine the criteria for a successful operation. + * + * Map write mode should only be used for server versions prior to v4.3. For + * server versions v4.3 or later, the use of {@link maps.writeFlags|writeFlags} is recommended. + * + * + * @deprecated since v3.5.0 + */ + export enum writeMode { + /** + * If the key already exists, the item will be + * overwritten. If the key does not exist, a new item will be created. This is + * the default write mode. + */ + UPDATE, + /** + * If the key already exists, the item will be + * overwritten. If the key does not exist, the write will fail. + */ + UPDATE_ONLY, + /** + * If the key already exists, the write will + * fail. If the key does not exist, a new item will be created. + */ + CREATE_ONLY + } + + /** + * Map write flags. + * + * @remarks Write flags are used to determine the criteria for a successful operation. + * + * Map write flags require server version v4.3 or later. For earier server + * versions, set the {@link maps.writeMode|writeMode} instead. + * + * + * @since v3.5.0 + */ + export enum writeFlags { + /** + * Allow create or update. Default. + */ + DEFAULT, + /** + * If the key already exists, the item will be + * denied. If the key does not exist, a new item will be created. + */ + CREATE_ONLY, + /** + * If the key already exists, the item will be + * overwritten. If the key does not exist, the item will be denied. + */ + UPDATE_ONLY, + /** + * Do not raise error, if map item is denied due + * to write flag constraints. + */ + NO_FAIL, + /** + * Allow other valid map items to be committed, if + * a map item is denied due to write flag constraints. + */ + PARTIAL + } + + export enum returnType { + /** + * Do not return a result; this is the default. + */ + NONE, + /** + * Return key index order. (0 = first key, 1 = + * second key, ...) + */ + INDEX, + /** + * Return reverse key order. (0 = last key, + * -1 = second last key, ...) + */ + REVERSE_INDEX, + /** + * Return value order. (0 = smallest value, 1 = + * second smallest value, ...) + */ + RANK, + /** + * Return reverse value order. (0 = largest + * value, -1 = second largest value, ...) + */ + REVERSE_RANK, + /** + * Return count of items selected. + */ + COUNT, + /** + * Return key for single key read and key list for + * range read. + */ + KEY, + /** + * Return value for single key read and value list + * for range read. + */ + VALUE, + /** + * Return map items keys and values as an Array. + * i.e. [key1, value1, key2, value2, ...]. + */ + KEY_VALUE, + /** + * Return true if count > 0. + */ + EXISTS = 13, + /** + * Return an unordered map. + */ + UNORDERED_MAP = 16, + /** + * Return an ordered map. + */ + ORDERED_MAP, + /** + * Invert meaning of map command and return values. Let's take {@link removeByKeyRange} for example. + * + * + * With INVERTED enabled, the keys outside of the specified key range will be + * removed and returned. + */ + INVERTED = 0x10000, + } + + export class MapOperation extends operations.Operation { + andReturn(returnType: maps.returnType): MapOperation; + public withContext(contextOrFunction: cdt.Context | Function): MapOperation; + } + /** + * Sets map policy attributes. + * + * This operation does not return any result. + * + * @param bin - The name of the bin. The bin must contain a Map value. + * @param policy - The map policy. + * @returns Operation that can be passed to the {@link Client#operate} command. + */ + export function setPolicy(bin: string, policy: policy.MapPolicy): MapOperation; + /** + * Creates map create operation. + * + * @param bin - bin name. + * @param order - map order. + * @param persistIndex - if true, persist map index. A map index improves lookup performance, but requires more storage. + * A map index can be created for a top-level ordered map only. Nested and unordered map indexes are not supported. + * @param ctx - optional path to nested map. If not defined, the top-level map is used. + * + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @example + * + * const Aerospike = require('aerospike') + * const maps = Aerospike.maps + * const key = new Aerospike.Key('test', 'demo', 'mapKey') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * + * Aerospike.connect(config).then(async client => { + * let ops = [ + * maps.create('map', maps.order.KEY_ORDERED, true) + * ] + * let result = await client.operate(key, ops) + * console.log(result.bins) // => { map: null } + * let record = await client.get(key) + * console.log(record.bins) // => { map: {} } + * + * await client.remove(key) + * client.close() + * }) + */ + export function create(bin: string, order?: maps.order, persistIndex?: boolean, ctx? : cdt.Context); + /** + * Writes a key/value item to the map. + * + * @remarks Depending on the map policy and whether an entry with the same + * key already exists in the map, a new key will be added to the map or the + * existing entry with the same key will be updated. If the bin does not yet + * contain a map value, a new map may be created. + * + * This operation returns the new size of the map. + * + * @param bin - The name of the bin. If the bin exists, it must + * contain a Map value; if it does not yet exist, a new Map may be created + * depending on the map policy's write mode. + * @param key - Map key to write. + * @param value - Map value to write. + * @param policy - The map policy. + * + * @returns Operation that can be passed to the {@link Client#operate} command. + */ + export function put(bin: string, key: string, value: AerospikeBinValue, policy?: policy.MapPolicy): MapOperation; +/** + * Writes each entry of the given map to the map bin on the server. + * + * @remarks For each item, depending on the map policy and whether an entry with the same + * key already exists in the map, a new entry will be added to the map or the + * existing entry with the same key will be updated. If the bin does not yet + * contain a map value, a new map may be created. + * + * This operation returns the new size of the map. + * + * @param bin - The name of the bin. If the bin exists, it must + * contain a Map value; if it does not yet exist, a new Map may be created + * depending on the map policy's write mode. + * @param items - One or more key value pairs to write to the map. + * @param policy - The map policy. + * @returns Operation that can be passed to the {@link Client#operate} command. + */ + export function putItems(bin: string, items: AerospikeBins | Map, policy?: policy.MapPolicy): MapOperation; +/** + * Increments the map entry identified by the given key by the value + * incr. Valid only for numeric values. + * + * @remarks If a map entry with the given key does not exist, the map + * policy's write mode determines whether a new entry will be created same as + * for the {@link maps.put|put} command. This operation may + * create a new map if the map bin is currently empty. + * + * This operation returns the new value of the map entry. + * + * @param bin - The name of the bin. If the bin exists, it must + * contain a Map value; if it does not yet exist, a new Map may be created + * depending on the map policy's write mode. + * @param key - The map key. + * @param incr - The value to increment the map entry by. Use negative + * value to decrement map entry. + * @param policy - The map policy. + * @returns Operation that can be passed to the {@link Client#operate} command. + */ + + export function increment(bin: string, key: string, incr?: number, policy?: policy.MapPolicy): MapOperation; +/** + * Decrements the map entry identified by the given key by the value + * decr. Valid only for numeric values. + * + * @remarks If a map entry with the given key does not exist, the map + * policy's write mode determines whether a new entry will be created same as + * for the {@link maps.put|put} command. This operation may + * create a new map if the map bin is currently empty. + * + * This operation returns the new value of the map entry. + * + * @param bin - The name of the bin. If the bin exists, it must + * contain a Map value; if it does not yet exist, a new Map may be created + * depending on the map policy's write mode. + * @param key - The map key. + * @param decr - The value to decrement the map entry by. + * @param policy - The map policy. + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @deprecated since v4.0.0 - use increment function with negative value instead. + */ + export function decrement(bin: string, key: string, decr: number, policy?: policy.MapPolicy): MapOperation; + /** + * Removes all items in the map. + * + * @remarks This operation does not return any result. + * + * @param bin - The name of the bin. If the bin exists, it must + * contain a Map value; if it does not yet exist, a new Map may be created + * depending on the map policy's write mode. + * @returns Operation that can be passed to the {@link Client#operate} command. + */ + export function clear(bin: string): MapOperation; + /** + * Removes a single item identified by key from the map. + * + * @remarks This operation returns the removed data specified by + * returnType. + * + * @param bin - The name of the bin, which must contain a Map value. + * @param key - The map key. + * @param returnType - The {@link maps.returnType|return type} + * indicating what data of the affected item(s) to return (if any). + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @see Instead of passing returnType, you can also use + * {@link maps~MapOperation#andReturn|MapOperation#andReturn} to + * select what data to return. + */ + export function removeByKey(bin: string, key: string, returnType?: maps.returnType): MapOperation; + /** + * Removes one or more items identified by key from the map. + * + * @remarks This operation returns the removed data specified by + * returnType. + * + * @param bin - The name of the bin, which must contain a Map value. + * @param keys - An array of map keys. + * @param returnType - The {@link maps.returnType|return type} + * indicating what data of the affected item(s) to return (if any). + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @see Instead of passing returnType, you can also use + * {@link maps~MapOperation#andReturn|MapOperation#andReturn} to + * select what data to return. + */ + export function removeByKeyList(bin: string, keys: string[], returnType?: maps.returnType): MapOperation; + /** + * Removes one or more items identified by a range of keys from the + * map. + * + * @remarks This operation returns the removed data specified by + * returnType. + * + * @param {string} bin - The name of the bin, which must contain a Map value. + * @param {?any} begin - Start key in the range (inclusive). If set to + * null, the range includes all keys less than the + * end key. + * @param {?any} end - End key in the range (exclusive). If set to + * null, the range includes all keys greater than or equal to the + * begin key. + * @param {number} [returnType] - The {@link maps.returnType|return type} + * indicating what data of the affected item(s) to return (if any). + * @returns {Object} Operation that can be passed to the {@link Client#operate} command. + * + * @see Instead of passing returnType, you can also use + * {@link maps~MapOperation#andReturn|MapOperation#andReturn} to + * select what data to return. + */ + export function removeByKeyRange(bin: string, begin: string | null, end: string | null, returnType?: maps.returnType): MapOperation; + /** + * Removes map items nearest to key and greater, by index, from the + * map. + * + * @remarks This operation returns the removed data specified by + * returnType. + * + * Examples for map { a: 17, e: 2, f: 15, j: 10 }: + * + * * (value, index, count) = [removed items] + * * ('f', 0, 1) = { f: 15 } + * * ('f', 1, 2) = { j: 10 } + * * ('f', -1, 1) = { e: 2 } + * * ('b', 2, 1) = { j: 10 } + * * ('b', -2, 2) = { a: 17 } + * + * Without count: + * + * * (value, index) = [removed items] + * * ('f', 0) = { f: 15, j: 10 } + * * ('f', 1) = { j: 10 } + * * ('f', -1) = { e: 2, f: 15, j: 10 } + * * ('b', 2) = { j: 10 } + * * ('b', -2) = { a: 17, e: 2, f: 15, j: 10 } + * + * Requires Aerospike Server v4.3.0 or later. + * + * @param {string} bin - The name of the bin, which must contain a Map value. + * @param {any} key - Find map items nearest to this key and greater. + * @param {number} index - Index of items to be removed relative to the given key. + * @param {number} [count] - Number of items to remove. If undefined, the range + * includes all items nearest to key and greater, until the end. + * @param {number} [returnType] - The {@link maps.returnType|return type} + * indicating what data of the affected item(s) to return (if any). + * @returns {Object} Operation that can be passed to the {@link Client#operate} command. + * + * @see Instead of passing returnType, you can also use + * {@link maps~MapOperation#andReturn|MapOperation#andReturn} to + * select what data to return. + * + * @since v3.5.0 + * + * @example + * + * const Aerospike = require('aerospike') + * const maps = Aerospike.maps + * const key = new Aerospike.Key('test', 'demo', 'mapKey') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * read : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * write : new Aerospike.ReadPolicy({socketTimeout : 0, totalTimeout : 0}), + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * Aerospike.connect(config) + * .then(async client => { + * await client.put(key, { map: { a: 17, e: 2, f: 15, j: 10 } }) + * let result = await client.operate(key, [ + * maps.removeByKeyRelIndexRange('map', 'f', -1, 1) + * .andReturn(maps.returnType.KEY_VALUE)]) + * console.info(result.bins.map) // => [ 'e', 2 ] + * let record = await client.get(key) + * console.info(record.bins.map) // => { a: 17, f: 15, j: 10 } + * client.close() + * }) + */ + export function removeByKeyRelIndexRange(bin: string, key: string, index: number, count?: number, returnType?: maps.returnType): MapOperation; + /** + * Removes one or more items identified by a single value from the + * map. + * + * @remarks This operation returns the removed data specified by + * returnType. + * + * @param bin - The name of the bin, which must contain a Map value. + * @param value - The map value. + * @param returnType - The {@link maps.returnType|return type} + * indicating what data of the affected item(s) to return (if any). + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @see Instead of passing returnType, you can also use + * {@link maps~MapOperation#andReturn|MapOperation#andReturn} to + * select what data to return. + */ + export function removeByValue(bin: string, value: AerospikeBinValue, returnType?: maps.returnType): MapOperation; + /** + * Removes one or more items identified by a list of values from the + * map. + * + * @remarks This operation returns the removed data specified by + * returnType. + * + * @param bin - The name of the bin, which must contain a Map value. + * @param values - An array of map values. + * @param returnType - The {@link maps.returnType|return type} + * indicating what data of the affected item(s) to return (if any). + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @see Instead of passing returnType, you can also use + * {@link maps~MapOperation#andReturn|MapOperation#andReturn} to + * select what data to return. + */ + export function removeByValueList(bin: string, values: AerospikeBinValue[], returnType?: maps.returnType): MapOperation; + /** + * Removes one or more items identified by a range of values from the + * map. + * + * @remarks This operation returns the removed data specified by + * returnType. + * + * @param bin - The name of the bin, which must contain a Map value. + * @param begin - Start values in the range (inclusive). If set to + * null, the range includes all values less than the + * end value. + * @param end - End value in the range (exclusive). If set to + * null, the range includes all values greater than or equal to the + * begin value. + * @param returnType - The {@link maps.returnType|return type} + * indicating what data of the affected item(s) to return (if any). + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @see Instead of passing returnType, you can also use + * {@link maps~MapOperation#andReturn|MapOperation#andReturn} to + * select what data to return. + */ + export function removeByValueRange(bin: string, begin: number | null, end: number | null, returnType?: maps.returnType): MapOperation; + /** + * Removes map items nearest to value and greater, by relative rank. + * + * This operation returns the removed data specified by + * returnType. + * + * Examples for map { e: 2, j: 10, f: 15, a: 17 }: + * + * * (value, rank, count) = [removed items] + * * (11, 1, 1) = { a: 17 } + * * (11, -1, 1) = { j: 10 } + * + * Without count: + * + * * (value, rank) = [removed items] + * * (11, 1) = { a: 17 } + * * (11, -1) = { j: 10, f: 15, a: 17 } + * + * @param bin - The name of the bin, which must contain a Map value. + * @param value - Find map items nearest to this value and greater. + * @param rank - Rank of items to be removed relative to the given value. + * @param count - Number of items to remove. If undefined, the range + * includes all items nearest to value and greater, until the end. + * @param returnType - The {@link maps.returnType|return type} + * indicating what data of the affected item(s) to return (if any). + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @see Instead of passing returnType, you can also use + * {@link maps~MapOperation#andReturn|MapOperation#andReturn} to + * select what data to return. + * + * @since v3.5.0 + * + * @example + * + * const Aerospike = require('aerospike') + * const maps = Aerospike.maps + * const key = new Aerospike.Key('test', 'demo', 'mapKey') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * read : new Aerospike.ReadPolicy({socketTimeout : 0, totalTimeout : 0}), + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * Aerospike.connect(config) + * .then(async client => { + * await client.put(key, { map: { e: 2, j: 10, f: 15, a: 17 } }) + * let result = await client.operate(key, [ + * maps.removeByValueRelRankRange('map', 11, -1) + * .andReturn(maps.returnType.KEY_VALUE)]) + * console.info(result.bins.map) // => [ 'j', 10, 'f', 15, 'a', 17 ] + * let record = await client.get(key) + * console.info(record.bins.map) // => { e: 2 } + * client.close() + * }) + */ + export function removeByValueRelRankRange(bin: string, value: number, rank: number, count?: number, returnType?: maps.returnType): MapOperation; + /** + * Removes a single item identified by its index value from the map. + * + * @remarks This operation returns the removed data specified by + * returnType. + * + * @param {string} bin - The name of the bin, which must contain a Map value. + * @param {number} index - Index of the entry to remove. + * @param {number} [returnType] - The {@link maps.returnType|return type} + * indicating what data of the affected item(s) to return (if any). + * @returns {Object} Operation that can be passed to the {@link Client#operate} command. + * + * @see Instead of passing returnType, you can also use + * {@link maps~MapOperation#andReturn|MapOperation#andReturn} to + * select what data to return. + */ + export function removeByIndex(bin: string, index: number, returnType?: maps.returnType): MapOperation; + /** + * Removes one or more items in the specified index range from the + * map. + * + * @remarks This operation returns the removed data specified by + * returnType. + * + * @param bin - The name of the bin, which must contain a Map value. + * @param index - Starting index. + * @param count - Number of items to delete. If undefined, the range + * includes all items starting from index. + * @param returnType - The {@link maps.returnType|return type} + * indicating what data of the affected item(s) to return (if any). + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @see Instead of passing returnType, you can also use + * {@link maps~MapOperation#andReturn|MapOperation#andReturn} to + * select what data to return. + */ + export function removeByIndexRange(bin: string, index: number, count?: number | null, returnType?: maps.returnType): MapOperation; + /** + * Removes a single item identified by its rank value from the map. + * + * @remarks This operation returns the removed data specified by + * returnType. + * + * @param bin - The name of the bin, which must contain a Map value. + * @param rank - Rank of the item to remove. + * @param returnType - The {@link maps.returnType|return type} + * indicating what data of the affected item(s) to return (if any). + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @see Instead of passing returnType, you can also use + * {@link maps~MapOperation#andReturn|MapOperation#andReturn} to + * select what data to return. + */ + export function removeByRank(bin: string, rank: number, returnType?: maps.returnType): MapOperation; + /** + * Removes one or more items in the specified rank range from the map. + * + * @remarks This operation returns the removed data specified by + * returnType. + * + * @param bin - The name of the bin, which must contain a Map value. + * @param rank - Starting rank. + * @param count - Number of items to delete. If undefined, the range + * includes all items starting from rank. + * @param returnType - The {@link maps.returnType|return type} + * indicating what data of the affected item(s) to return (if any). + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @see Instead of passing returnType, you can also use + * {@link maps~MapOperation#andReturn|MapOperation#andReturn} to + * select what data to return. + */ + export function removeByRankRange(bin: string, rank: number, count?: number | null, returnType?: maps.returnType): MapOperation; + /** + * Returns the size of the map. + * + * @param bin - The name of the bin, which must contain a Map value. + * @returns Operation that can be passed to the {@link Client#operate} command. + */ + export function size(bin: string): MapOperation; + /** + * Retrieves a single item identified by key from the map. + * + * @remarks This operation returns the data specified by + * returnType. + * + * @param bin - The name of the bin, which must contain a Map value. + * @param key - The map key. + * @param returnType - The {@link maps.returnType|return type} + * indicating what data of the selected item(s) to return. + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @see Instead of passing returnType, you can also use + * {@link maps~MapOperation#andReturn|MapOperation#andReturn} to + * select what data to return. + */ + export function getByKey(bin: string, key: string, returnType?: maps.returnType): MapOperation; + /** + * Retrieves map items identified by keys list. + * + * @remarks This operation returns the data specified by + * returnType. + * + * @param bin - The name of the bin, which must contain a Map value. + * @param keys - The map keys. + * @param returnType - The {@link maps.returnType|return type} + * indicating what data of the selected item(s) to return. + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @see Instead of passing returnType, you can also use + * {@link maps~MapOperation#andReturn|MapOperation#andReturn} to + * select what data to return. + */ + export function getByKeyList(bin: string, keys: string[], returnType?: maps.returnType) + /** + * Retrieves one or more items identified by a range of keys from the + * map. + * + * @remarks This operation returns the data specified by + * returnType. + * + * @param bin - The name of the bin, which must contain a Map value. + * @param begin - Start key in the range (inclusive). If set to + * null, the range includes all keys less than the + * end key. + * @param end - End key in the range (exclusive). If set to + * null, the range includes all keys greater than or equal to the + * begin key. + * @param returnType - The {@link maps.returnType|return type} + * indicating what data of the selected item(s) to return. + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @see Instead of passing returnType, you can also use + * {@link maps~MapOperation#andReturn|MapOperation#andReturn} to + * select what data to return. + */ + export function getByKeyRange(bin: string, begin: string | null, end: string | null, returnType?: maps.returnType): MapOperation; + /** + * Retrieves map items nearest to key and greater, by index, from the + * map. + * + * @remarks This operation returns the selected data specified by + * returnType. + * + * Examples for map { a: 17, e: 2, f: 15, j: 10 }: + * + * * (value, index, count) = [selected items] + * * ('f', 0, 1) = { f: 15 } + * * ('f', 1, 2) = { j: 10 } + * * ('f', -1, 1) = { e: 2 } + * * ('b', 2, 1) = { j: 10 } + * * ('b', -2, 2) = { a: 17 } + * + * Without count: + * + * * (value, index) = [selected items] + * * ('f', 0) = { f: 15, j: 10 } + * * ('f', 1) = { j: 10 } + * * ('f', -1) = { e: 2, f: 15, j: 10 } + * * ('b', 2) = { j: 10 } + * * ('b', -2) = { a: 17, e: 2, f: 15, j: 10 } + * + * Requires Aerospike Server v4.3.0 or later. + * + * @param bin - The name of the bin, which must contain a Map value. + * @param key - Find map items nearest to this key and greater. + * @param index - Index of items to be retrieved relative to the given key. + * @param count - Number of items to retrieve. If undefined, the + * range includes all items nearest to key and greater, until the end. + * @param returnType - The {@link maps.returnType|return type} + * indicating what data of the selected item(s) to return. + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @see Instead of passing returnType, you can also use + * {@link maps~MapOperation#andReturn|MapOperation#andReturn} to + * select what data to return. + * + * @since v3.5.0 + * + * @example + * + * const Aerospike = require('aerospike') + * const maps = Aerospike.maps + * const key = new Aerospike.Key('test', 'demo', 'mapKey') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * Aerospike.connect(config) + * .then(async client => { + * await client.put(key, { map: { a: 17, e: 2, f: 15, j: 10 } }) + * let result = await client.operate(key, [ + * maps.getByKeyRelIndexRange('map', 'b', 2, 1) + * .andReturn(maps.returnType.KEY_VALUE)]) + * console.info(result.bins.map) // => [ 'j', 10 ] + * client.close() + * }) + */ + export function getByKeyRelIndexRange(bin: string, key: string, index: number, count?: number, returnType?: maps.returnType): MapOperation; + /** + * Retrieves one or more items identified by a single value from the + * map. + * + * @remarks This operation returns the data specified by + * returnType. + * + * @param {string} bin - The name of the bin, which must contain a Map value. + * @param {any} value - The map value. + * @param {number} [returnType] - The {@link maps.returnType|return type} + * indicating what data of the selected item(s) to return. + * @returns {Object} Operation that can be passed to the {@link Client#operate} command. + * + * @see Instead of passing returnType, you can also use + * {@link maps~MapOperation#andReturn|MapOperation#andReturn} to + * select what data to return. + */ + export function getByValue(bin: string, value: AerospikeBinValue, returnType?: maps.returnType): MapOperation; + /** + * Retrieves map items identified by values from the + * map. + * + * @remarks This operation returns the data specified by + * returnType. + * + * @param bin - The name of the bin, which must contain a Map value. + * @param values - The map values. + * @param returnType - The {@link maps.returnType|return type} + * indicating what data of the selected item(s) to return. + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @see Instead of passing returnType, you can also use + * {@link maps~MapOperation#andReturn|MapOperation#andReturn} to + * select what data to return. + */ + export function getByValueList(bin: string, values: AerospikeBinValue[], returnType?: maps.returnType) + /** + * Retrieves one or more items identified by a range of values from + * the map. + * + * @remarks This operation returns the data specified by + * returnType. + * + * @param bin - The name of the bin, which must contain a Map value. + * @param begin - Start values in the range (inclusive). If set to + * null, the range includes all values less than the + * end value. + * @param end - End value in the range (exclusive). If set to + * null, the range includes all values greater than or equal to the + * begin value. + * @param returnType - The {@link maps.returnType|return type} + * indicating what data of the selected item(s) to return. + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @see Instead of passing returnType, you can also use + * {@link maps~MapOperation#andReturn|MapOperation#andReturn} to + * select what data to return. + */ + export function getByValueRange(bin: string, begin: number | null, end: number | null, returnType?: maps.returnType): MapOperation; + /** + * Retrieves map items nearest to value and greater, by relative rank. + * + * @remarks This operation returns the selected data specified by + * returnType. + * + * Examples for map { e: 2, j: 10, f: 15, a: 17 }: + * + * * (value, rank, count) = [selected items] + * * (11, 1, 1) = { a: 17 } + * * (11, -1, 1) = { j: 10 } + * + * Without count: + * + * * (value, rank) = [selected items] + * * (11, 1) = { a: 17 } + * * (11, -1) = { j: 10, f: 15, a: 17 } + * + * @param bin - The name of the bin, which must contain a Map value. + * @param value - Find map items nearest to this value and greater. + * @param rank - Rank of items to be retrieved relative to the given value. + * @param count - Number of items to retrieve. If undefined, the + * range includes all items nearest to value and greater, until the end. + * @param returnType - The {@link maps.returnType|return type} + * indicating what data of the selected item(s) to return. + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @see Instead of passing returnType, you can also use + * {@link maps~MapOperation#andReturn|MapOperation#andReturn} to + * select what data to return. + * + * @since v3.5.0 + * + * @example + * + * const Aerospike = require('aerospike') + * const maps = Aerospike.maps + * const key = new Aerospike.Key('test', 'demo', 'mapKey') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * write : new Aerospike.WritePolicy({socketTimeout : 0, totalTimeout : 0}), + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * + * } + * } + * Aerospike.connect(config) + * .then(async client => { + * await client.put(key, { map: { e: 2, j: 10, f: 15, a: 17 } }) + * let result = await client.operate(key, [ + * maps.getByValueRelRankRange('map', 11, 1, 1) + * .andReturn(maps.returnType.KEY_VALUE)]) + * console.info(result.bins.map) // => [ 'a', 17 ] + * client.close() + * }) + */ + export function getByValueRelRankRange(bin: string, value: number, rank: number, count?: number, returnType?: maps.returnType): MapOperation; + /** + * Retrieves a single item identified by it's index value from the + * map. + * + * @remarks This operation returns the data specified by + * returnType. + * + * @param bin - The name of the bin, which must contain a Map value. + * @param index - Index of the entry to remove. + * @param returnType - The {@link maps.returnType|return type} + * indicating what data of the selected item(s) to return. + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @see Instead of passing returnType, you can also use + * {@link maps~MapOperation#andReturn|MapOperation#andReturn} to + * select what data to return. + */ + export function getByIndex(bin: string, index: number, returnType?: maps.returnType): MapOperation; + /** + * Retrieves one or more items in the specified index range from the + * map. + * + * @remarks This operation returns the data specified by + * returnType. + * + * @param {string} bin - The name of the bin, which must contain a Map value. + * @param {number} index - Starting index. + * @param {number} [count] - Number of items to delete. If undefined, the range + * includes all items starting from index. + * @param {number} [returnType] - The {@link maps.returnType|return type} + * indicating what data of the selected item(s) to return. + * @returns {Object} Operation that can be passed to the {@link Client#operate} command. + * + * @see Instead of passing returnType, you can also use + * {@link maps~MapOperation#andReturn|MapOperation#andReturn} to + * select what data to return. + */ + export function getByIndexRange(bin: string, index: number, count?: number | null, returnType?: maps.returnType): MapOperation; + /** + * Retrieves a single item identified by it's rank value from the map. + * + * @remarks This operation returns the data specified by + * returnType. + * + * @param {string} bin - The name of the bin, which must contain a Map value. + * @param {number} rank - Rank of the entry to remove. + * @param {number} [returnType] - The {@link maps.returnType|return type} + * indicating what data of the selected item(s) to return. + * @returns {Object} Operation that can be passed to the {@link Client#operate} command. + * + * @see Instead of passing returnType, you can also use + * {@link maps~MapOperation#andReturn|MapOperation#andReturn} to + * select what data to return. + */ + export function getByRank(bin: string, rank: number, returnType?: maps.returnType): MapOperation; + /** + * Retrieves one or more items in the specified rank range from the + * map. + * + * @remarks This operation returns the data specified by + * returnType. + * + * @param bin - The name of the bin, which must contain a Map value. + * @param rank - Starting rank. + * @param count - Number of items to delete; if not specified, the + * range includes all items starting from rank. + * @param returnType - The {@link maps.returnType|return type} + * indicating what data of the selected item(s) to return. + * @returns Operation that can be passed to the {@link Client#operate} command. + * + * @see Instead of passing returnType, you can also use + * {@link maps~MapOperation#andReturn|MapOperation#andReturn} to + * select what data to return. + */ + export function getByRankRange(bin: string, rank: number, count?: number | null, returnType?: maps.returnType): MapOperation; + +} + +export namespace exp { + export namespace bit { + export const reSize: (bin: AerospikeExp, flags: bitwise.resizeFlags, byteSize: AerospikeExp, policy?: policy.BitwisePolicy) => AerospikeExp; + export const insert: (bin: AerospikeExp, value: AerospikeExp, byteOffset: AerospikeExp, policy?: policy.BitwisePolicy) => AerospikeExp; + export const remove: (bin: AerospikeExp, byteSize: AerospikeExp, byteOffset: AerospikeExp, policy?: policy.BitwisePolicy) => AerospikeExp; + export const set: (bin: AerospikeExp, value: AerospikeExp, bitSize: AerospikeExp, bitOffset: AerospikeExp, policy?: policy.BitwisePolicy) => AerospikeExp; + export const or: (bin: AerospikeExp, value: AerospikeExp, bitSize: AerospikeExp, bitOffset: AerospikeExp, policy?: policy.BitwisePolicy) => AerospikeExp; + export const xor: (bin: AerospikeExp, value: AerospikeExp, bitSize: AerospikeExp, bitOffset: AerospikeExp, policy?: policy.BitwisePolicy) => AerospikeExp; + export const and: (bin: AerospikeExp, value: AerospikeExp, bitSize: AerospikeExp, bitOffset: AerospikeExp, policy?: policy.BitwisePolicy) => AerospikeExp; + export const not: (bin: AerospikeExp, bitSize: AerospikeExp, bitOffset: AerospikeExp, policy?: policy.BitwisePolicy) => AerospikeExp; + export const lShift: (bin: AerospikeExp, shift: AerospikeExp, bitSize: AerospikeExp, bitOffset: AerospikeExp, policy?: policy.BitwisePolicy) => AerospikeExp; + export const rShift: (bin: AerospikeExp, shift: AerospikeExp, bitSize: AerospikeExp, bitOffset: AerospikeExp, policy?: policy.BitwisePolicy) => AerospikeExp; + export const add: (bin: AerospikeExp, action: bitwise.overflow, value: AerospikeExp, bitSize: AerospikeExp, bitOffset: AerospikeExp, policy?: policy.BitwisePolicy) => AerospikeExp; + export const subtract: (bin: AerospikeExp, action: bitwise.overflow, value: AerospikeExp, bitSize: AerospikeExp, bitOffset: AerospikeExp, policy?: policy.BitwisePolicy) => AerospikeExp; + export const setInt: (bin: AerospikeExp, value: AerospikeExp, bitSize: AerospikeExp, bitOffset: AerospikeExp, policy?: policy.BitwisePolicy) => AerospikeExp; + export const get: (bin: AerospikeExp, bitSize: AerospikeExp, bitOffset: AerospikeExp) => AerospikeExp; + export const count: (bin: AerospikeExp, bitSize: AerospikeExp, bitOffset: AerospikeExp) => AerospikeExp; + export const lScan: (bin: AerospikeExp, value: AerospikeExp, bitSize: AerospikeExp, bitOffset: AerospikeExp) => AerospikeExp; + export const rScan: (bin: AerospikeExp, value: AerospikeExp, bitSize: AerospikeExp, bitOffset: AerospikeExp) => AerospikeExp; + export const getInt: (bin: AerospikeExp, sign: boolean, bitSize: AerospikeExp, bitOffset: AerospikeExp) => AerospikeExp; + } + + export namespace hll { + export const initMH: (bin: AerospikeExp, mhBitCount: number, indexBitCount: number, policy?: policy.HLLPolicy) => AerospikeExp; + export const init: (bin: AerospikeExp, indexBitCount: number, policy?: policy.HLLPolicy) => AerospikeExp; + export const addMH: (bin: AerospikeExp, mhBitCount: number, indexBitCount: number, list: AerospikeExp, policy?: policy.HLLPolicy) => AerospikeExp; + export const add: (bin: AerospikeExp, indexBitCount: number, list: AerospikeExp, policy?: policy.HLLPolicy) => AerospikeExp; + export const update: (bin: AerospikeExp, list: AerospikeExp, policy?: policy.HLLPolicy) => AerospikeExp; + export const getCount: (bin: AerospikeExp) => AerospikeExp; + export const getUnion: (bin: AerospikeExp, list: AerospikeExp) => AerospikeExp; + export const getUnionCount: (bin: AerospikeExp, list: AerospikeExp) => AerospikeExp; + export const getIntersectCount: (bin: AerospikeExp, list: AerospikeExp) => AerospikeExp; + export const getSimilarity: (bin: AerospikeExp, list: AerospikeExp) => AerospikeExp; + export const describe: (bin: AerospikeExp) => AerospikeExp; + export const mayContain: (bin: AerospikeExp, list: AerospikeExp) => AerospikeExp; + } + + namespace listsExp { + export const size: (bin: AerospikeExp, ctx?: cdt.Context | null) => AerospikeExp; + export const getByValue: (bin: AerospikeExp, value: AerospikeExp, returnType: lists.returnType, ctx?: cdt.Context | null) => AerospikeExp; + export const getByValueRange: (bin: AerospikeExp, begin: AerospikeExp, end: AerospikeExp, returnType: lists.returnType, ctx?: cdt.Context | null) => AerospikeExp; + export const getByValueList: (bin: AerospikeExp, value: AerospikeExp, returnType: lists.returnType, ctx?: cdt.Context | null) => AerospikeExp; + export const getByRelRankRangeToEnd: (bin: AerospikeExp, value: AerospikeExp, rank: AerospikeExp, returnType: lists.returnType, ctx?: cdt.Context | null) => AerospikeExp; + export const getByRelRankRange: (bin: AerospikeExp, value: AerospikeExp, rank: AerospikeExp, count: AerospikeExp, returnType: lists.returnType, ctx?: cdt.Context | null) => AerospikeExp; + export const getByIndex: (bin: AerospikeExp, index: AerospikeExp, valueType: type, returnType: lists.returnType, ctx?: cdt.Context | null) => AerospikeExp; + export const getByIndexRangeToEnd: (bin: AerospikeExp, index: AerospikeExp, returnType: lists.returnType, ctx?: cdt.Context | null) => AerospikeExp; + export const getByIndexRange: (bin: AerospikeExp, index: AerospikeExp, count: AerospikeExp, returnType: lists.returnType, ctx?: cdt.Context | null) => AerospikeExp; + export const getByRank: (bin: AerospikeExp, rank: AerospikeExp, valueType: type, returnType: lists.returnType, ctx?: cdt.Context | null) => AerospikeExp; + export const getByRankRangeToEnd: (bin: AerospikeExp, rank: AerospikeExp, returnType: lists.returnType, ctx?: cdt.Context | null) => AerospikeExp; + export const getByRankRange: (bin: AerospikeExp, rank: AerospikeExp, count: AerospikeExp, returnType: lists.returnType, ctx?: cdt.Context | null) => AerospikeExp; + export const append: (bin: AerospikeExp, value: AerospikeExp, policy?: policy.ListPolicy | null, ctx?: cdt.Context | null) => AerospikeExp; + export const appendItems: (bin: AerospikeExp, value: AerospikeExp, policy?: policy.ListPolicy | null, ctx?: cdt.Context | null) => AerospikeExp; + export const insert: (bin: AerospikeExp, value: AerospikeExp, idx: AerospikeExp, policy?: policy.ListPolicy | null, ctx?: cdt.Context | null) => AerospikeExp; + export const insertItems: (bin: AerospikeExp, value: AerospikeExp, idx: AerospikeExp, policy?: policy.ListPolicy | null, ctx?: cdt.Context | null) => AerospikeExp; + export const increment: (bin: AerospikeExp, value: AerospikeExp, idx: AerospikeExp, policy?: policy.ListPolicy | null, ctx?: cdt.Context | null) => AerospikeExp; + export const set: (bin: AerospikeExp, value: AerospikeExp, idx: AerospikeExp, policy?: policy.ListPolicy | null, ctx?: cdt.Context | null) => AerospikeExp; + export const clear: (bin: AerospikeExp, ctx?: cdt.Context | null) => AerospikeExp; + export const sort: (bin: AerospikeExp, order: lists.sortFlags, ctx?: cdt.Context | null) => AerospikeExp; + export const removeByValue: (bin: AerospikeExp, value: AerospikeExp, ctx?: cdt.Context | null) => AerospikeExp; + export const removeByValueList: (bin: AerospikeExp, values: AerospikeExp, ctx?: cdt.Context | null) => AerospikeExp; + export const removeByValueRange: (bin: AerospikeExp, end: AerospikeExp, begin: AerospikeExp, ctx?: cdt.Context | null) => AerospikeExp; + export const removeByRelRankRangeToEnd: (bin: AerospikeExp, rank: AerospikeExp, value: AerospikeExp, ctx?: cdt.Context | null) => AerospikeExp; + export const removeByRelRankRange: (bin: AerospikeExp, count: AerospikeExp, rank: AerospikeExp, value: AerospikeExp, ctx?: cdt.Context | null) => AerospikeExp; + export const removeByIndex: (bin: AerospikeExp, idx: AerospikeExp, ctx?: cdt.Context | null) => AerospikeExp; + export const removeByIndexRangeToEnd: (bin: AerospikeExp, idx: AerospikeExp, ctx?: cdt.Context | null) => AerospikeExp; + export const removeByIndexRange: (bin: AerospikeExp, count: AerospikeExp, idx: AerospikeExp, ctx?: cdt.Context | null) => AerospikeExp; + export const removeByRank: (bin: AerospikeExp, rank: AerospikeExp, ctx?: cdt.Context | null) => AerospikeExp; + export const removeByRankRangeToEnd: (bin: AerospikeExp, rank: AerospikeExp, ctx?: cdt.Context | null) => AerospikeExp; + export const removeByRankRange: (bin: AerospikeExp, count: AerospikeExp, rank: AerospikeExp, ctx?: cdt.Context | null) => AerospikeExp; + } + + namespace mapsExp { + export const put: (bin: AerospikeExp, value: AerospikeExp, key: AerospikeExp, policy?: policy.MapPolicy | null, ctx?: cdt.Context | null) => AerospikeExp; + export const putItems: (bin: AerospikeExp, map: AerospikeExp, policy?: policy.MapPolicy | null, ctx?: cdt.Context | null) => AerospikeExp; + export const increment: (bin: AerospikeExp, value: AerospikeExp, key: AerospikeExp, policy?: policy.MapPolicy | null, ctx?: cdt.Context | null) => AerospikeExp; + export const clear: (bin: AerospikeExp, ctx?: cdt.Context | null) => AerospikeExp; + export const removeByKey: (bin: AerospikeExp, key: AerospikeExp, ctx?: cdt.Context | null, returnType?: maps.returnType) => AerospikeExp; + export const removeByKeyList: (bin: AerospikeExp, keys: AerospikeExp, ctx?: cdt.Context | null, returnType?: maps.returnType) => AerospikeExp; + export const removeByKeyRange: (bin: AerospikeExp, end: AerospikeExp, begin: AerospikeExp, ctx?: cdt.Context | null, returnType?: maps.returnType) => AerospikeExp; + export const removeByKeyRelIndexRangeToEnd: (bin: AerospikeExp, idx: AerospikeExp, key: AerospikeExp, ctx?: cdt.Context | null, returnType?: maps.returnType) => AerospikeExp; + export const removeByKeyRelIndexRange: (bin: AerospikeExp, count: AerospikeExp, idx: AerospikeExp, key: AerospikeExp, ctx?: cdt.Context | null, returnType?: maps.returnType) => AerospikeExp; + export const removeByValue: (bin: AerospikeExp, value: AerospikeExp, ctx?: cdt.Context | null, returnType?: maps.returnType) => AerospikeExp; + export const removeByValueList: (bin: AerospikeExp, values: AerospikeExp, ctx?: cdt.Context | null, returnType?: maps.returnType) => AerospikeExp; + export const removeByValueRange: (bin: AerospikeExp, end: AerospikeExp, begin: AerospikeExp, ctx?: cdt.Context | null, returnType?: maps.returnType) => AerospikeExp; + export const removeByValueRelRankRangeToEnd: (bin: AerospikeExp, rank: AerospikeExp, value: AerospikeExp, ctx?: cdt.Context | null, returnType?: maps.returnType) => AerospikeExp; + export const removeByValueRelRankRange: (bin: AerospikeExp, count: AerospikeExp, rank: AerospikeExp, value: AerospikeExp, ctx?: cdt.Context | null, returnType?: maps.returnType) => AerospikeExp; + export const removeByIndex: (bin: AerospikeExp, idx: AerospikeExp, ctx?: cdt.Context | null, returnType?: maps.returnType) => AerospikeExp; + export const removeByIndexRangeToEnd: (bin: AerospikeExp, idx: AerospikeExp, ctx?: cdt.Context | null, returnType?: maps.returnType) => AerospikeExp; + export const removeByIndexRange: (bin: AerospikeExp, count: AerospikeExp, idx: AerospikeExp, ctx?: cdt.Context | null, returnType?: maps.returnType) => AerospikeExp; + export const removeByRank: (bin: AerospikeExp, rank: AerospikeExp, ctx?: cdt.Context | null, returnType?: maps.returnType) => AerospikeExp; + export const removeByRankRangeToEnd: (bin: AerospikeExp, rank: AerospikeExp, ctx?: cdt.Context | null, returnType?: maps.returnType) => AerospikeExp; + export const removeByRankRange: (bin: AerospikeExp, count: AerospikeExp, rank: AerospikeExp, ctx?: cdt.Context | null, returnType?: maps.returnType) => AerospikeExp; + export const size: (bin: AerospikeExp, ctx?: cdt.Context | null) => AerospikeExp; + export const getByKey: (bin: AerospikeExp, key: AerospikeExp, valueType: type, returnType: maps.returnType, ctx?: cdt.Context | null) => AerospikeExp; + export const getByKeyRange: (bin: AerospikeExp, end: AerospikeExp, begin: AerospikeExp, returnType: maps.returnType, ctx?: cdt.Context | null) => AerospikeExp; + export const getByKeyList: (bin: AerospikeExp, keys: AerospikeExp, returnType: maps.returnType, ctx?: cdt.Context | null) => AerospikeExp; + export const getByKeyRelIndexRangeToEnd: (bin: AerospikeExp, idx: AerospikeExp, key: AerospikeExp, returnType: maps.returnType, ctx?: cdt.Context | null) => AerospikeExp; + export const getByKeyRelIndexRange: (bin: AerospikeExp, count: AerospikeExp, idx: AerospikeExp, key: AerospikeExp, returnType: maps.returnType, ctx?: cdt.Context | null) => AerospikeExp; + export const getByValue: (bin: AerospikeExp, value: AerospikeExp, returnType: maps.returnType, ctx?: cdt.Context | null) => AerospikeExp; + export const getByValueRange: (bin: AerospikeExp, end: AerospikeExp, begin: AerospikeExp, returnType: maps.returnType, ctx?: cdt.Context | null) => AerospikeExp; + export const getByValueList: (bin: AerospikeExp, values: AerospikeExp, returnType: maps.returnType, ctx?: cdt.Context | null) => AerospikeExp; + export const getByValueRelRankRangeToEnd: (bin: AerospikeExp, rank: AerospikeExp, value: AerospikeExp, returnType: maps.returnType, ctx?: cdt.Context | null) => AerospikeExp; + export const getByValueRelRankRange: (bin: AerospikeExp, count: AerospikeExp, rank: AerospikeExp, value: AerospikeExp, returnType: maps.returnType, ctx?: cdt.Context | null) => AerospikeExp; + export const getByIndex: (bin: AerospikeExp, idx: AerospikeExp, valueType: type, returnType: maps.returnType, ctx?: cdt.Context | null) => AerospikeExp; + export const getByIndexRangeToEnd: (bin: AerospikeExp, idx: AerospikeExp, returnType: maps.returnType, ctx?: cdt.Context | null) => AerospikeExp; + export const getByIndexRange: (bin: AerospikeExp, count: AerospikeExp, idx: AerospikeExp, returnType: maps.returnType, ctx?: cdt.Context | null) => AerospikeExp; + export const getByRank: (bin: AerospikeExp, rank: AerospikeExp, valueType: type, returnType: maps.returnType, ctx?: cdt.Context | null) => AerospikeExp; + export const getByRankRangeToEnd: (bin: AerospikeExp, rank: AerospikeExp, returnType: maps.returnType, ctx?: cdt.Context | null) => AerospikeExp; + export const getByRankRange: (bin: AerospikeExp, count: AerospikeExp, rank: AerospikeExp, returnType: maps.returnType, ctx?: cdt.Context | null) => AerospikeExp; + } + + namespace operationsExp { + // exp_operations.js + enum ExpOperations { + WRITE = 1280, + READ + } + export class ExpOperation extends operations.Operation { + public exp: AerospikeExp; + public flags: number; + constructor(op: ExpOperations, bin: string, exp: AerospikeExp, flags: number, props?: Record); + } + export const read: (bin: string, exp: AerospikeExp, flags?: number) => ExpOperation; + export const write: (bin: string, value: AerospikeExp, flags?: number) => ExpOperation; + } + export {mapsExp as maps, listsExp as lists, operationsExp as operations} + + export enum expReadFlags { + DEFAULT, + EVAL_NO_FAIL = 16 + } + + export enum expWriteFlags { + DEFAULT, + CREATE_ONLY, + UPDATE_ONLY, + ALLOW_DELETE = 4, + POLICY_NO_FAIL = 8, + EVAL_NO_FAIL = 16 + } + + export enum type { + NIL, + // BOOL - no boolean type in src/main/enums/exp_enum.cc#L127 + INT = 2, + STR, + LIST, + MAP, + BLOB, + FLOAT, + GEOJSON, + HLL, + + AUTO, + ERROR + } + + // Types for expresssions + type _valueExp = (value: T) => AerospikeExp; + type _keyTypeExp = () => AerospikeExp; + type _binTypeExp = (binName: string) => AerospikeExp; + type _metaExp = () => AerospikeExp; + type _nilExp = () => AerospikeExp; + type _infExp = () => AerospikeExp; + type _wildcardExp = () => AerospikeExp + type _cmpExp = (left: AerospikeExp, right: AerospikeExp) => AerospikeExp; + type _VAExp = (...expr: AerospikeExp[]) => AerospikeExp; + + // Scalar expressions + /** + * Create boolean value. + * + * @function + * @param {boolean} value boolean value. + */ + export const bool: _valueExp; + /** + * Create 64 bit signed integer value. + * + * @function + * @param {number} number value integer value. + * @return {AerospikeExp} + */ + export const int: _valueExp; + /** + * Create 64 bit unsigned integer value. + * + * @function + * @param {number} number value unsigned integer value. + * @return {AerospikeExp} + */ + export const uint: _valueExp; + /** + * Create 64 bit floating point value. + * + * @function + * @param {number} value floating point value. + * @return {AerospikeExp} + */ + export const float: _valueExp; + /** + * Create string value. + * + * @function + * @param {string} value string value. + * @return {AerospikeExp} + */ + export const str: _valueExp; + /** + * Create byte array value. + * * + * @function + * @param {string[]} value byte array value. + * @param {number} size number of bytes. + * @return {AerospikeExp} + */ + export const bytes: (value: string[] | Buffer, size?: number) => AerospikeExp; + /** + * Create geojson value. + * + * @function + * @param {Object} value geojson value. + * @return {AerospikeExp} + */ + export const geo: _valueExp; + /** + * Create list value. + * + * @function + * @param {array} value list value + * @return {AerospikeExp} + */ + export const list: _valueExp; + /** + * Create map value. + * + * @function + * @param {array} value map value + * @return {AerospikeExp} + */ + export const map: _valueExp>; + /** + * Create 'nil' value. + * + * @function + * @return {AerospikeExp} + */ + export const nil: _nilExp; + /** + * Create 'inf' value. + * + * @function + */ + export const inf: _infExp; + /** + * Create 'wildcard' value. + * + * @function + */ + export const wildcard: _wildcardExp; + /** + * Create expression that returns the key as an integer. Returns 'unknown' if + * the key is not an integer. + * + * @function + * @param integer value Integer value of the key if the key is an integer. + */ + export const keyInt: _keyTypeExp; + /** + * Create expression that returns the key as an string. Returns 'unknown' if + * the key is not a string. + * + * @function + * @param {string} string value String value of the key if the key is a string. + * @return + */ + export const keyStr: _keyTypeExp; + /** + * Create expression that returns the key as an blob. Returns 'unknown' if + * the key is not an blob. + * + * @function + * @param {Object} blob Blob value of the key if the key is a blob. + * @return {AerospikeExp} + */ + export const keyBlob: _keyTypeExp; + /** + * Create expression that returns if the primary key is stored in the record meta + * data as a boolean expression. This would occur when "policy write key" is + * SEND on record write. + * + * @function + * @param {boolean} - value True if the record has a stored key, false otherwise. + * @return {AerospikeExp} + */ + export const keyExist: _keyTypeExp; + /** + * Create expression that returns a bin as a boolean value. Returns 'unknown' + * if the bin is not a boolean. + * + * @function + * @param {string }binName Bin name. + * @return {AerospikeExp} boolean bin + */ + export const binBool: _binTypeExp; + /** + * Create expression that returns a bin as a signed integer. Returns 'unknown' + * if the bin is not an integer. + * + * @function + * @param {string} binName Bin name. + * @return {AerospikeExp} integer bin + */ + export const binInt: _binTypeExp; + /** + * Create expression that returns a bin as a float. Returns 'unknown' if the bin + * is not an float. + * + * @function + * @param {string} binName Bin name. + * @return {AerospikeExp} float bin + */ + export const binFloat: _binTypeExp; + /** + * Create expression that returns a bin as a string. Returns 'unknown' if the + * bin is not an string. + * + * @function + * @param {string} binName Bin name. + * @return {AerospikeExp} string bin + */ + export const binStr: _binTypeExp; + /** + * Create expression that returns a bin as a blob. Returns 'unknown' if the bin + * is not an blob. + * + * @function + * @param {string} binName Bin name. + * @return {AerospikeExp} blob bin + */ + export const binBlob: _binTypeExp; + /** + * Create expression that returns a bin as a geojson. Returns 'unknown' if the + * bin is not geojson. + * + * @function + * @param {string} binName Bin name. + * @return {AerospikeExp} geojson bin + */ + export const binGeo: _binTypeExp; + /** + * Create expression that returns a bin as a list. Returns 'unknown' if the bin + * is not an list. + * + * @function + * @param {string} binName Bin name. + * @return {AerospikeExp} list bin + */ + export const binList: _binTypeExp; + /** + * Create expression that returns a bin as a map. Returns 'unknown' if the bin + * is not an map. + * + * @function + * @param binName Bin name. + * @return {AerospikeExp} map bin + */ + export const binMap: _binTypeExp; + /** + * Create expression that returns a bin as a HyperLogLog (hll). Returns + * 'unknown' if the bin is not a HyperLogLog (hll). + * + * @function + * @param {string} binName Bin name. + * @return {AerospikeExp} hll bin + */ + export const binHll: _binTypeExp; + /** + * Create expression that returns the type of a bin as a integer. + * @param __bin_name Bin name. + * @returns returns the bin_type as an as_bytes_type. + + */ + export const binType: _binTypeExp; + /** + * Create expression that returns if bin of specified name exists. + * + * @param binName Bin name. + * @returns `True` if the bin exists, false otherwise. + */ + export const binExists: _binTypeExp; + /** + * Create expression that returns record set name string. This expression usually + * evaluates quickly because record meta data is cached in memory. + * + * @function + * @returns Name of the set this record belongs to. + */ + export const setName: _metaExp; + /** + * Create expression that returns record size on disk. If server storage-engine is + * memory, then zero is returned. This expression usually evaluates quickly + * because record meta data is cached in memory. + * Requires server version between 5.3.0 inclusive and 7.0 exclusive. + * Use {@link #recordSize} for server version 7.0+. + * + * @function + * @return {AerospikeExp} integer value Uncompressed storage size of the record. + */ + export const deviceSize: _metaExp; + /** + * Create expression that returns record last update time expressed as 64 bit + * integer nanoseconds since 1970-01-01 epoch. + * + * @function + * @return {AerospikeExp} integer value When the record was last updated. + */ + export const lastUpdate: _metaExp; + /** + * Create expression that returns milliseconds since the record was last updated. + * This expression usually evaluates quickly because record meta data is cached + * in memory. + * + * @function + * @return {AerospikeExp} integer value Number of milliseconds since last updated. + */ + export const sinceUpdate: _metaExp; + /** + * Create expression that returns record expiration time expressed as 64 bit + * integer nanoseconds since 1970-01-01 epoch. + * + * @function + * @return integer value Expiration time in nanoseconds since 1970-01-01. + */ + export const voidTime: _metaExp; + /** + * Create expression that returns record expiration time (time to live) in integer + * seconds. + * + * @function + * @return {AerospikeExp} integer value Number of seconds till the record will expire, + * returns -1 if the record never expires. + */ + export const ttl: _metaExp; + /** + * Create expression that returns if record has been deleted and is still in + * tombstone state. This expression usually evaluates quickly because record + * meta data is cached in memory. + * + * @function + * @return {AerospikeExp} - value True if the record is a tombstone, false otherwise. + */ + export const isTombstone: _metaExp; + /** + * Create expression that returns record size in memory when either the + * storage-engine is memory or data-in-memory is true, otherwise returns 0. + * This expression usually evaluates quickly because record meta data is cached + * in memory. + * Requires server version between 5.3.0 inclusive and 7.0 exclusive. + * Use {@link #recordSize} for server version 7.0+. + * + * @function + * @return {AerospikeExp} integer value memory size of the record. + */ + export const memorySize: _metaExp; + /** + * Create expression that returns the record size. This expression usually evaluates + * quickly because record meta data is cached in memory. + * Requires server version 7.0+. This expression replaces {@link #deviceSize} and + * {@link #memorySize} since those older expressions are equivalent on server version 7.0+. + * + * @function + * @return {AerospikeExp} integer value size of the record in Megabytes. + */ + export const recordSize: _metaExp; + /** + * Create expression that returns record digest modulo as integer. + * + * @function + * @param {number} mod Divisor used to divide the digest to get a remainder. + * @return {AerospikeExp} integer value Value in range 0 and mod (exclusive).. + */ + export const digestModulo: _metaExp; + + export const eq: _cmpExp; + /** + * Create equals (==) expression. + * + * @function + * @param {number} left left expression in comparison. + * @param {number} right right expression in comparison. + * @return {AerospikeExp} - boolean value + */ + export const ne: _cmpExp; + /** + * Create not equal (!=) expression. + * + * @function + * @param {number} left left expression in comparison. + * @param {number} right right expression in comparison. + * @return {AerospikeExp} - boolean value + */ + export const gt: _cmpExp; +/** + * Create a greater than or equals (>=) expression. + * + * @function + * @param {number} left left expression in comparison. + * @param {number} right right expression in comparison. + * @return {AerospikeExp} - boolean value + */ + export const ge: _cmpExp; +/** + * Create a less than (<) expression. + * + * @function + * @param {number} left left expression in comparison. + * @param {number} right right expression in comparison. + * @return {AerospikeExp} - boolean value + */ + export const lt: _cmpExp; +/** + * Create a less than or equals (<=) expression. + * + * @function + * @param {number} left left expression in comparison. + * @param {number} right right expression in comparison. + * @return {AerospikeExp} - boolean value + */ + + export const le: _cmpExp; +/** + * Create expression that performs a regex match on a string bin or value + * expression. + * + * @function + * @param {number} options POSIX regex flags defined in regex.h. + * @param {string} regex POSIX regex string. + * @param {AerospikeExp} cmpStr String expression to compare against. + * @return {AerospikeExp} - boolean value + */ + export const cmpRegex: (options: regex, regex: string, cmpStr: AerospikeExp) => AerospikeExp; +/** + * Create a point within region or region contains point expression. + * + * @function + * @param {number} left left expression in comparison. + * @param {number} right right expression in comparison. + * @return {AerospikeExp} - boolean value + */ + export const cmpGeo: _cmpExp; +/** + * Create "not" (!) operator expression. + * + * @function + * @param {AerospikeExp} expr Boolean expression to negate. + * @return {AerospikeExp} - boolean value + */ + export const not: (expr: AerospikeExp) => AerospikeExp; + +/** + * Create "and" (&&) operator that applies to a variable number of expressions. + * + * @function + * @param {AerospikeExp} ... Variable number of boolean expressions. + * @return {AerospikeExp} - boolean value + */ + export const and: _VAExp; +/** + * Create "or" (||) operator that applies to a variable number of expressions. + * + * @function + * @param {AerospikeExp} ... Variable number of boolean expressions. + * @return {AerospikeExp} - boolean value + */ + export const or: _VAExp; +/** + * Create expression that returns true if only one of the expressions are true. + * Requires server version 5.6.0+. + * + * @function + * @param {AerospikeExp} ... Variable number of boolean expressions. + * @return {AerospikeExp} - boolean value + */ + export const exclusive: _VAExp; +/** + * Create "add" (+) operator that applies to a variable number of expressions. + * Return the sum of all arguments. + * All arguments must be the same type (integer or float). + * Requires server version 5.6.0+. + * + * @function + * @param {number[]} ... Variable number of integer or float expressions. + * @return {AerospikeExp} integer or float value + */ + export const add: _VAExp; +/** + * Create "subtract" (-) operator that applies to a variable number of expressions. + * If only one argument is provided, return the negation of that argument. + * Otherwise, return the sum of the 2nd to Nth argument subtracted from the 1st + * argument. All arguments must resolve to the same type (integer or float). + * Requires server version 5.6.0+. + * + * @function + * @param {number[]} ... Variable number of integer or float expressions. + * @return {AerospikeExp} integer or float value + */ + export const sub: _VAExp; +/** + * Create "multiply" (*) operator that applies to a variable number of expressions. + * Return the product of all arguments. If only one argument is supplied, return + * that argument. All arguments must resolve to the same type (integer or float). + * Requires server version 5.6.0+. + * + * @function + * @param {number[]} ... Variable number of integer or float expressions. + * @return {AerospikeExp} integer or float value + */ + export const mul: _VAExp; +/** + * Create "divide" (/) operator that applies to a variable number of expressions. + * If there is only one argument, returns the reciprocal for that argument. + * Otherwise, return the first argument divided by the product of the rest. + * All arguments must resolve to the same type (integer or float). + * Requires server version 5.6.0+. + * + * @function + * @param {number[]} ... Variable number of integer or float expressions. + * @return {AerospikeExp} integer or float value + */ + export const div: _VAExp; +/** + * Create "pow" operator that raises a "base" to the "exponent" power. + * All arguments must resolve to floats. + * Requires server version 5.6.0+. + * + * @function + * @param {number} base Base value. + * @param {number} exponent Exponent value. + * @return {AerospikeExp} float value + */ + export const pow: _VAExp; +/** + * Create "log" operator for logarithm of "num" with base "base". + * All arguments must resolve to floats. + * Requires server version 5.6.0+. + * + * @function + * @param {number} num Number. + * @param {number}base Base value. + * @return {AerospikeExp} float value + */ + export const log: _VAExp; +/** + * Create "modulo" (%) operator that determines the remainder of "numerator" + * divided by "denominator". All arguments must resolve to integers. + * Requires server version 5.6.0+. + * + * @function + * @return {AerospikeExp} integer value + */ + export const mod: _VAExp; +/** + * Create operator that returns absolute value of a number. + * All arguments must resolve to integer or float. + * Requires server version 5.6.0+. + * + * @function + * @return {AerospikeExp} number value + */ + export const abs: _VAExp; + export const floor: _VAExp; + export const ceil: _VAExp; + export const toInt: _VAExp; + export const toFloat: _VAExp; + export const intAnd: _VAExp; + export const intOr: _VAExp; + export const intXor: _VAExp; + export const intNot: _VAExp; + export const intLshift: _VAExp; + export const intRshift: _VAExp; + export const intArshift: _VAExp; + export const intCount: _VAExp; + export const intLscan: _VAExp; + export const intRscan: _VAExp; + export const min: _VAExp; + export const max: _VAExp; + export const cond: _VAExp; + const letValue: _VAExp; // Your implementation + export { letValue as let }; // Export as `let` + + export const def: (varName: string, expr: AerospikeExp) => AerospikeExp; + export const _var: (varName: string) => AerospikeExp; +} + +export namespace operations { + + export class Operation { + public op: ScalarOperations; + public bin: string; + public value?: AerospikeBinValue; + public ttl?: number; + } + + export function read(bin: string): Operation; + export function write(bin: string, value: AerospikeBinValue): Operation; + export function add(bin: string, value: number | Double): Operation; + export function incr(bin: string, value: number | Double): Operation; + export function append(bin: string, value: string | Buffer): Operation; + export function prepend(bin: string, value: string | Buffer): Operation; + export function touch(ttl: number): Operation; + function deleteOp(): Operation; + export {deleteOp as delete} + +} + + +/** + * This namespace provides functions to create secondary index (SI) filter + * predicates for use in query operations via the {@link Client#query} command. + * + * @see {@link Query} + * + * @example + * + * const Aerospike = require('aerospike') + * const Context = Aerospike.cdt.Context + * + * Aerospike.connect().then(async (client) => { + * // find any records that have a recent location within 1000m radius of the specified coordinates + * let geoFilter = Aerospike.filter.geoWithinRadius('recent', 103.8, 1.305, 1000, Aerospike.indexType.LIST, new Context().addListIndex(0)) + * let query = client.query('test', 'demo') + * query.where(geoFilter) + * + * let results = await query.results() + * for (let record in results) { + * console.log(record.bins.recent) + * } + * client.close() + * }) + */ +export namespace filter { + + enum Predicates { + EQUAL, + RANGE + } + + /** + * Secondary Index filter predicate to limit the scope of a {@link Query}. + * + * Filter predicates must be instantiated using the methods in the {@link filter} namespace. + */ + export class SindexFilterPredicate { + protected constructor ( + predicate: Predicates, + bin: string, + dataType: indexDataType, + indexType: indexType, + props?: Record + ); + public predicate: Predicates; + public bin: string; + public datatype: indexDataType; + public type: indexType; + } + + /** + * Filter predicated returned by {@link contains} and {@link equal} for use in Secondary Index queries. + */ + class EqualPredicate extends SindexFilterPredicate { + constructor(bin: string, value: string | number, dataType: indexDataType, indexType: indexType); + public val: string | number; + } + + /** + * Filter predicate returned by {@link geoWithinGeoJSONRegion}, {@link geoContainsGeoJSONPoint}, {@link geoWithinRadius}, and {@link geoContainsPoint} for use in Secondary Index queries. + */ + class RangePredicate extends SindexFilterPredicate { + constructor(bin: string, min: number, max: number, dataType: indexDataType, indexType: indexType); + public min: number; + public max: number; + } + /** + * Filter predicate returned by {@link range} for use in Secondary Index queries. + */ + class GeoPredicate extends SindexFilterPredicate { + constructor (bin: string, value: GeoJSON, indexType: indexType); + public val: GeoJSON; + } + + /** + * Filter for list/map membership. + * + * The filter matches records with a bin that has a list or map + * value that contain the given string or integer. + * + * @param bin - The name of the bin. + * @param value - The value that should be a member of the + * list or map in the bin. + * @param indexType - One of {@link indexType}, + * i.e. LIST, MAPVALUES or MAPKEYS. + * @param ctx - The {@link cdt.Context} of the index. + * + * @returns Secondary Index filter predicate, that can be applied to queries using {@link Query#where}. + * + * @since v2.0 + */ + export function contains(bin: string, value: AerospikeBinValue, indexType?: indexType, ctx?: cdt.Context): filter.EqualPredicate; + /** + * String/integer equality filter. + * + * The filter matches records with a bin that matches a specified + * string or integer value. + * + * @param {string} bin - The name of the bin. + * @param {string} value - The filter value. + * @param ctx - The {@link cdt.Context} of the index. + * @returns Secondary Index filter predicate, that can be applied to queries using {@link Query#where}. + */ + export function equal(bin: string, value: AerospikeBinValue, ctx?: cdt.Context): filter.EqualPredicate; + /** + * Geospatial filter that matches points within a given GeoJSON + * region. + * + * Depending on the index type, the filter will match GeoJSON + * values contained in list or map values as well (requires Aerospike server + * version >= 3.8). + * + * @param bin - The name of the bin. + * @param value - GeoJSON region value. + * @param indexType - One of {@link indexType}, i.e. LIST or MAPVALUES. + * @param ctx - The {@link cdt.Context} of the index. + * + * @returns Secondary Index filter predicate, that can be applied to queries using {@link Query#where}. + * + * @since v2.0 + */ + export function geoWithinGeoJSONRegion(bin: string, value: GeoJSON | GeoJSONType, indexType?: indexType, ctx?: cdt.Context): filter.GeoPredicate; + /** + * Geospatial filter that matches regions that contain a given GeoJSON point. + * + * Depending on the index type, the filter will match GeoJSON + * regions within list or map values as well (requires server + * >= 3.8). + * + * @param bin - The name of the bin. + * @param value - GeoJSON point value. + * @param indexType - One of {@link indexType}, i.e. LIST or MAPVALUES. + * @param {Object} ctx - The {@link cdt.Context} of the index. + * + * @returns Secondary Index filter predicate, that can be applied to queries using {@link Query#where}. + * + * @since v2.0 + */ + export function geoContainsGeoJSONPoint(bin: string, value: GeoJSON | GeoJSONType, indexType?: indexType, ctx?: cdt.Context): filter.GeoPredicate; + /** + * Geospatial filter that matches points within a radius from a given point. + * + * Depending on the index type, the filter will match GeoJSON + * values contained in list or map values as well (requires Aerospike server + * version >= 3.8). + * + * @param bin - The name of the bin. + * @param lng - Longitude of the center point. + * @param lat - Latitude of the center point. + * @param radius - Radius in meters. + * @param indexType - One of {@link indexType}, i.e. LIST or MAPVALUES. + * @param ctx - The {@link cdt.Context} of the index. + * + * @returns Secondary Index filter predicate, that can be applied to queries using {@link Query#where}. + * + * @since v2.0 + */ + export function geoWithinRadius(bin: string, lng: number, lat: number, radius: number, indexType?: indexType, ctx?: cdt.Context): filter.GeoPredicate; + /** + * Geospatial filter that matches regions that contain a given lng/lat coordinate. + * + * Depending on the index type, the filter will match GeoJSON + * regions within list or map values as well (requires server + * >= 3.8). + * + * @param bin - The name of the bin. + * @param lng - Longitude of the point. + * @param lat - Latitude of the point. + * @param indexType - One of {@link indexType}, i.e. LIST or MAPVALUES. + * @param ctx - The {@link cdt.Context} of the index. + * + * @returns Secondary Index filter predicate, that can be applied to queries using {@link Query#where}. + * + * @since v2.0 + */ + export function geoContainsPoint(bin: string, lng: number, lat: number, indexType?: indexType, ctx?: cdt.Context): filter.GeoPredicate; + /** + * Integer range filter. + * + * The filter matches records with a bin value in the given + * integer range. The filter can also be used to match for integer values + * within the given range that are contained with a list or map by specifying + * the appropriate index type. + * + * @param bin - The name of the bin. + * @param min - Lower end of the range (inclusive). + * @param max - Upper end of the range (inclusive). + * @param indexType - One of {@link indexType}, i.e. LIST or MAPVALUES. + * @param ctx - The {@link cdt.Context} of the index. + * + * @returns Secondary Index filter predicate, that can be applied to queries using {@link Query#where}. + */ + export function range(bin: string, min: number, max: number, indexType?: indexType, ctx?: cdt.Context): filter.RangePredicate; +} + + +declare namespace statusNamespace { + /** + * One or more keys failed in a batch. + */ + export const AEROSPIKE_BATCH_FAILED = -16; + /** + * One or more keys failed in a batch. + */ + export const BATCH_FAILED = -16; + /** + * No response received from server. + */ + export const AEROSPIKE_NO_RESPONSE = -15; + /** + * No response received from server. + */ + export const NO_RESPONSE = -15; + /** + * Max errors limit reached. + */ + export const AEROSPIKE_MAX_ERROR_RATE = -14; + /** + * Max errors limit reached. + */ + export const MAX_ERROR_RATE = -14; + /** + * Abort split batch retry and use normal node retry instead. + * Used internally and should not be returned to user. + */ + export const AEROSPIKE_USE_NORMAL_RETRY = -13; + /** + * Abort split batch retry and use normal node retry instead. + * Used internally and should not be returned to user. + */ + export const USE_NORMAL_RETRY = -13; + /** + * Max retries limit reached. + */ + export const AEROSPIKE_ERR_MAX_RETRIES_EXCEEDED = -12; + /** + * Max retries limit reached. + */ + export const ERR_MAX_RETRIES_EXCEEDED = -12; + /** + * Async command delay queue is full. + */ + export const AEROSPIKE_ERR_ASYNC_QUEUE_FULL = -11; + /** + * Async command delay queue is full. + */ + export const ERR_ASYNC_QUEUE_FULL = -11; + /** + * Synchronous connection error. + */ + export const AEROSPIKE_ERR_CONNECTION = -10; + /** + * Synchronous connection error. + */ + export const ERR_CONNECTION = -10; + /** + * TLS related error + */ + export const AEROSPIKE_ERR_TLS_ERROR = -9; + /** + * TLS related error + */ + export const ERR_TLS_ERROR = -9; + /** + * Node invalid or could not be found. + */ + export const AEROSPIKE_ERR_INVALID_NODE = -8; + /** + * Node invalid or could not be found. + */ + export const ERR_INVALID_NODE = -8; + /** + * Asynchronous connection error. + */ + export const AEROSPIKE_ERR_NO_MORE_CONNECTIONS = -7; + /** + * Asynchronous connection error. + */ + export const ERR_NO_MORE_CONNECTIONS = -7; + /** + * Asynchronous connection error. + */ + export const AEROSPIKE_ERR_ASYNC_CONNECTION = -6; + /** + * Asynchronous connection error. + */ + export const ERR_ASYNC_CONNECTION = -6; + /** + * Query or scan was aborted in user's callback. + */ + export const AEROSPIKE_ERR_CLIENT_ABORT = -5; + /** + * Query or scan was aborted in user's callback. + */ + export const ERR_CLIENT_ABORT = -5; + /** + * Host name could not be found in DNS lookup. + */ + export const AEROSPIKE_ERR_INVALID_HOST = -4; + /** + * Host name could not be found in DNS lookup. + */ + export const ERR_INVALID_HOST = -4; + /** + * No more records available when parsing batch, scan or query records. + */ + export const AEROSPIKE_NO_MORE_RECORDS = -3; + /** + * No more records available when parsing batch, scan or query records. + */ + export const NO_MORE_RECORDS = -3; + /** + * Invalid client API parameter. + */ + export const AEROSPIKE_ERR_PARAM = -2; + /** + * Invalid client API parameter. + */ + export const ERR_PARAM = -2; + /** + * Generic client API usage error. + */ + export const AEROSPIKE_ERR_CLIENT = -1; + /** + * Generic client API usage error. + */ + export const ERR_CLIENT = -1; + /** + * Generic success. + */ + export const AEROSPIKE_OK = 0; + /** + * Generic success. + */ + export const OK = 0; + /** + * Generic error returned by server. + */ + export const AEROSPIKE_ERR_SERVER = 1; + /** + * Generic error returned by server. + */ + export const ERR_SERVER = 1; + /** + * Record does not exist in database. May be returned by read, or write + * with policy AS_POLICY_EXISTS_UPDATE. + */ + export const AEROSPIKE_ERR_RECORD_NOT_FOUND = 2; + /** + * Record does not exist in database. May be returned by read, or write + * with policy AS_POLICY_EXISTS_UPDATE. + */ + export const ERR_RECORD_NOT_FOUND = 2; + /** + * Generation of record in database does not satisfy write policy. + */ + export const AEROSPIKE_ERR_RECORD_GENERATION = 3; + /** + * Generation of record in database does not satisfy write policy. + */ + export const ERR_RECORD_GENERATION = 3; + /** + * Request protocol invalid, or invalid protocol field. + */ + export const AEROSPIKE_ERR_REQUEST_INVALID = 4; + /** + * Request protocol invalid, or invalid protocol field. + */ + export const ERR_REQUEST_INVALID = 4; + /** + * Record already exists. May be returned by write with policy + * AS_POLICY_EXISTS_CREATE. + */ + export const AEROSPIKE_ERR_RECORD_EXISTS = 5; + /** + * Record already exists. May be returned by write with policy + * AS_POLICY_EXISTS_CREATE. + */ + export const ERR_RECORD_EXISTS = 5; + /** + * Bin already exists on a create-only operation. + */ + export const AEROSPIKE_ERR_BIN_EXISTS = 6; + /** + * Bin already exists on a create-only operation. + */ + export const ERR_BIN_EXISTS = 6; + /** + * A cluster state change occurred during the request. This may also be + * returned by scan operations with the fail_on_cluster_change flag set. + */ + export const AEROSPIKE_ERR_CLUSTER_CHANGE = 7; + /** + * A cluster state change occurred during the request. This may also be + * returned by scan operations with the fail_on_cluster_change flag set. + */ + export const ERR_CLUSTER_CHANGE = 7; + /** + * The server node is running out of memory and/or storage device space + * reserved for the specified namespace. + */ + export const AEROSPIKE_ERR_SERVER_FULL = 8; + /** + * The server node is running out of memory and/or storage device space + * reserved for the specified namespace. + */ + export const ERR_SERVER_FULL = 8; + /** + * Request timed out. Can be triggered by client or server. + */ + export const AEROSPIKE_ERR_TIMEOUT = 9; + /** + * Request timed out. Can be triggered by client or server. + */ + export const ERR_TIMEOUT = 9; + /** + * Operation not allowed in current configuration. + */ + export const AEROSPIKE_ERR_ALWAYS_FORBIDDEN = 10; + /** + * Operation not allowed in current configuration. + */ + export const ERR_ALWAYS_FORBIDDEN = 10; + /** + * Partition is unavailable. + */ + export const AEROSPIKE_ERR_CLUSTER = 11; + /** + * Partition is unavailable. + */ + export const ERR_CLUSTER = 11; + /** + * Bin modification operation can't be done on an existing bin due to its + * value type. + */ + export const AEROSPIKE_ERR_BIN_INCOMPATIBLE_TYPE = 12; + /** + * Bin modification operation can't be done on an existing bin due to its + * value type. + */ + export const ERR_BIN_INCOMPATIBLE_TYPE = 12; + /** + * Record being (re-)written can't fit in a storage write block. + */ + export const AEROSPIKE_ERR_RECORD_TOO_BIG = 13; + /** + * Record being (re-)written can't fit in a storage write block. + */ + export const ERR_RECORD_TOO_BIG = 13; + /** + * Too many concurrent requests for one record - a "hot-key" situation. + */ + export const AEROSPIKE_ERR_RECORD_BUSY = 14; + /** + * Too many concurrent requests for one record - a "hot-key" situation. + */ + export const ERR_RECORD_BUSY = 14; + /** + * Scan aborted by user. + */ + export const AEROSPIKE_ERR_SCAN_ABORTED = 15; + /** + * Scan aborted by user. + */ + export const ERR_SCAN_ABORTED = 15; + /** + * Sometimes our doc, or our customers' wishes, get ahead of us. We may have + * processed something that the server is not ready for (unsupported feature). + */ + export const AEROSPIKE_ERR_UNSUPPORTED_FEATURE = 16; + /** + * Sometimes our doc, or our customers' wishes, get ahead of us. We may have + * processed something that the server is not ready for (unsupported feature). + */ + export const ERR_UNSUPPORTED_FEATURE = 16; + /** + * Bin not found on update-only operation. + */ + export const AEROSPIKE_ERR_BIN_NOT_FOUND = 17; + /** + * Bin not found on update-only operation. + */ + export const ERR_BIN_NOT_FOUND = 17; + /** + * The server node's storage device(s) can't keep up with the write load. + */ + export const AEROSPIKE_ERR_DEVICE_OVERLOAD = 18; + /** + * The server node's storage device(s) can't keep up with the write load. + */ + export const ERR_DEVICE_OVERLOAD = 18; + /** + * Record key sent with transaction did not match key stored on server. + */ + export const AEROSPIKE_ERR_RECORD_KEY_MISMATCH = 19; + /** + * Record key sent with transaction did not match key stored on server. + */ + export const ERR_RECORD_KEY_MISMATCH = 19; + /** + * Namespace in request not found on server. + */ + export const AEROSPIKE_ERR_NAMESPACE_NOT_FOUND = 20; + /** + * Namespace in request not found on server. + */ + export const ERR_NAMESPACE_NOT_FOUND = 20; + /** + * Sent too-long bin name (should be impossible in this client) or exceeded + * namespace's bin name quota. + */ + export const AEROSPIKE_ERR_BIN_NAME = 21; + /** + * Sent too-long bin name (should be impossible in this client) or exceeded + * namespace's bin name quota. + */ + export const ERR_BIN_NAME = 21; + /** + * Operation not allowed at this time. + */ + export const AEROSPIKE_ERR_FAIL_FORBIDDEN = 22; + /** + * Operation not allowed at this time. + */ + export const ERR_FAIL_FORBIDDEN = 22; + /** + * Map element not found in UPDATE_ONLY write mode. + */ + export const AEROSPIKE_ERR_FAIL_ELEMENT_NOT_FOUND = 23; + /** + * Map element not found in UPDATE_ONLY write mode. + */ + export const ERR_FAIL_ELEMENT_NOT_FOUND = 23; + /** + * Map element exists in CREATE_ONLY write mode. + */ + export const AEROSPIKE_ERR_FAIL_ELEMENT_EXISTS = 24; + /** + * Map element exists in CREATE_ONLY write mode. + */ + export const ERR_FAIL_ELEMENT_EXISTS = 24; + /** + * Attempt to use an Enterprise feature on a Community server or a server + * without the applicable feature key. + */ + export const AEROSPIKE_ERR_ENTERPRISE_ONLY = 25; + /** + * Attempt to use an Enterprise feature on a Community server or a server + * without the applicable feature key. + */ + export const ERR_ENTERPRISE_ONLY = 25; + /** + * The operation cannot be applied to the current bin value on the server. + */ + export const AEROSPIKE_ERR_OP_NOT_APPLICABLE = 26; + /** + * The operation cannot be applied to the current bin value on the server. + */ + export const ERR_OP_NOT_APPLICABLE = 26; + /** + * The transaction was not performed because the filter expression was + * false. + */ + export const AEROSPIKE_FILTERED_OUT = 27; + /** + * The transaction was not performed because the filter expression was + * false. + */ + export const FILTERED_OUT = 27; + /** + * Write command loses conflict to XDR. + */ + export const AEROSPIKE_LOST_CONFLICT = 28; + /** + * Write command loses conflict to XDR. + */ + export const LOST_CONFLICT = 28; + /** + * There are no more records left for query. + */ + export const AEROSPIKE_QUERY_END = 50; + /** + * There are no more records left for query. + */ + export const QUERY_END = 50; + /** + * Security functionality not supported by connected server. + */ + export const AEROSPIKE_SECURITY_NOT_SUPPORTED = 51; + /** + * Security functionality not supported by connected server. + */ + export const SECURITY_NOT_SUPPORTED = 51; + /** + * Security functionality not enabled by connected server. + */ + export const AEROSPIKE_SECURITY_NOT_ENABLED = 52; + /** + * Security functionality not enabled by connected server. + */ + export const SECURITY_NOT_ENABLED = 52; + /** + * Security type not supported by connected server. + */ + export const AEROSPIKE_SECURITY_SCHEME_NOT_SUPPORTED = 53; + /** + * Security type not supported by connected server. + */ + export const SECURITY_SCHEME_NOT_SUPPORTED = 53; + /** + * Administration command is invalid. + */ + export const AEROSPIKE_INVALID_COMMAND = 54; + /** + * Administration command is invalid. + */ + export const INVALID_COMMAND = 54; + /** + * Administration field is invalid. + */ + export const AEROSPIKE_INVALID_FIELD = 55; + /** + * Administration field is invalid. + */ + export const INVALID_FIELD = 55; + /** + * Security protocol not followed. + */ + export const AEROSPIKE_ILLEGAL_STATE = 56; + /** + * Security protocol not followed. + */ + export const ILLEGAL_STATE = 56; + /** + * User name is invalid. + */ + export const AEROSPIKE_INVALID_USER = 60; + /** + * User name is invalid. + */ + export const INVALID_USER = 60; + /** + * User was previously created. + */ + export const AEROSPIKE_USER_ALREADY_EXISTS = 61; + /** + * User was previously created. +x */ + export const USER_ALREADY_EXISTS = 61; + /** + * Password is invalid. + */ + export const AEROSPIKE_INVALID_PASSWORD = 62; + /** + * Password is invalid. + */ + export const INVALID_PASSWORD = 62; + /** + * Password has expired. + */ + export const AEROSPIKE_EXPIRED_PASSWORD = 63; + /** + * Password has expired. + */ + export const EXPIRED_PASSWORD = 63; + /** + * Forbidden password (e.g. recently used) + */ + export const AEROSPIKE_FORBIDDEN_PASSWORD = 64; + /** + * Forbidden password (e.g. recently used) + */ + export const FORBIDDEN_PASSWORD = 64; + /** + * Security credential is invalid. + */ + export const AEROSPIKE_INVALID_CREDENTIAL = 65; + /** + * Security credential is invalid. + */ + export const INVALID_CREDENTIAL = 65; + /** + * Login session expired. + */ + export const AEROSPIKE_EXPIRED_SESSION = 66; + /** + * Login session expired. + */ + export const EXPIRED_SESSION = 66; + /** + * Role name is invalid. + */ + export const AEROSPIKE_INVALID_ROLE = 70; + /** + * Role name is invalid. + */ + export const INVALID_ROLE = 70; + /** + * Role already exists. + */ + export const AEROSPIKE_ROLE_ALREADY_EXISTS = 71; + /** + * Role already exists. + */ + export const ROLE_ALREADY_EXISTS = 71; + /** + * Privilege is invalid. + */ + export const AEROSPIKE_INVALID_PRIVILEGE = 72; + /** + * Privilege is invalid. + */ + export const INVALID_PRIVILEGE = 72; + /** + * Invalid IP whitelist. + */ + export const AEROSPIKE_INVALID_WHITELIST = 73; + /** + * Invalid IP whitelist. + */ + export const INVALID_WHITELIST = 73; + /** + * Quotas not enabled on server. + */ + export const AEROSPIKE_QUOTAS_NOT_ENABLED = 74; + /** + * Quotas not enabled on server. + */ + export const QUOTAS_NOT_ENABLED = 74; + /** + * Invalid quota. + */ + export const AEROSPIKE_INVALID_QUOTA = 75; + /** + * Invalid quota. + */ + export const INVALID_QUOTA = 75; + /** + * User must be authentication before performing database operations. + */ + export const AEROSPIKE_NOT_AUTHENTICATED = 80; + /** + * User must be authentication before performing database operations. + */ + export const NOT_AUTHENTICATED = 80; + /** + * User does not possess the required role to perform the database operation. + */ + export const AEROSPIKE_ROLE_VIOLATION = 81; + /** + * User does not possess the required role to perform the database operation. + */ + export const ROLE_VIOLATION = 81; + /** + * Command not allowed because sender IP not whitelisted. + */ + export const AEROSPIKE_NOT_WHITELISTED = 82; + /** + * Command not allowed because sender IP not whitelisted. + */ + export const NOT_WHITELISTED = 82; + /** + * Quota exceeded. + */ + export const AEROSPIKE_QUOTA_EXCEEDED = 83; + /** + * Quota exceeded. + */ + export const QUOTA_EXCEEDED = 83; + /** + * Generic UDF error. + */ + export const AEROSPIKE_ERR_UDF = 100; + /** + * Generic UDF error. + */ + export const ERR_UDF = 100; + /** + * Batch functionality has been disabled. + */ + export const AEROSPIKE_ERR_BATCH_DISABLED = 150; + /** + * Batch functionality has been disabled. + */ + export const ERR_BATCH_DISABLED = 150; + /** + * Batch max requests have been exceeded. + */ + export const AEROSPIKE_ERR_BATCH_MAX_REQUESTS_EXCEEDED = 151; + /** + * Batch max requests have been exceeded. + */ + export const ERR_BATCH_MAX_REQUESTS_EXCEEDED = 151; + /** + * All batch queues are full. + */ + export const AEROSPIKE_ERR_BATCH_QUEUES_FULL = 152; + /** + * All batch queues are full. + */ + export const ERR_BATCH_QUEUES_FULL = 152; + /** + * Invalid/Unsupported GeoJSON + */ + export const AEROSPIKE_ERR_GEO_INVALID_GEOJSON = 160; + /** + * Invalid/Unsupported GeoJSON + */ + export const ERR_GEO_INVALID_GEOJSON = 160; + /** + * Index found. + */ + export const AEROSPIKE_ERR_INDEX_FOUND = 200; + /** + * Index found. + */ + export const ERR_INDEX_FOUND = 200; + /** + * Index not found + */ + export const AEROSPIKE_ERR_INDEX_NOT_FOUND = 201; + /** + * Index not found + */ + export const ERR_INDEX_NOT_FOUND = 201; + /** + * Index is out of memory + */ + export const AEROSPIKE_ERR_INDEX_OOM = 202; + /** + * Index is out of memory + */ + export const ERR_INDEX_OOM = 202; + /** + * Unable to read the index. + */ + export const AEROSPIKE_ERR_INDEX_NOT_READABLE = 203; + /** + * Unable to read the index. + */ + export const ERR_INDEX_NOT_READABLE = 203; + /** + * Generic secondary index error. + */ + export const AEROSPIKE_ERR_INDEX = 204; + /** + * Generic secondary index error. + */ + export const ERR_INDEX = 204; + /** + * Index name is too long. + */ + export const AEROSPIKE_ERR_INDEX_NAME_MAXLEN = 205; + /** + * Index name is too long. + */ + export const ERR_INDEX_NAME_MAXLEN = 205; + /** + * System already has maximum allowed indices. + */ + export const AEROSPIKE_ERR_INDEX_MAXCOUNT = 206; + /** + * System already has maximum allowed indices. + */ + export const ERR_INDEX_MAXCOUNT = 206; + /** + * Query was aborted. + */ + export const AEROSPIKE_ERR_QUERY_ABORTED = 210; + /** + * Query was aborted. + */ + export const ERR_QUERY_ABORTED = 210; + /** + * Query processing queue is full. + */ + export const AEROSPIKE_ERR_QUERY_QUEUE_FULL = 211; + /** + * Query processing queue is full. + */ + export const ERR_QUERY_QUEUE_FULL = 211; + /** + * Secondary index query timed out on server. + */ + export const AEROSPIKE_ERR_QUERY_TIMEOUT = 212; + /** + * Secondary index query timed out on server. + */ + export const ERR_QUERY_TIMEOUT = 212; + /** + * Generic query error. + */ + export const AEROSPIKE_ERR_QUERY = 213; + /** + * Generic query error. + */ + export const ERR_QUERY = 213; + /** + * UDF does not exist. + */ + export const AEROSPIKE_ERR_UDF_NOT_FOUND = 1301; + /** + * UDF does not exist. + */ + export const ERR_UDF_NOT_FOUND = 1301; + /** + * LUA file does not exist. + */ + export const AEROSPIKE_ERR_LUA_FILE_NOT_FOUND = 1302; + /** + * LUA file does not exist. + */ + export const ERR_LUA_FILE_NOT_FOUND = 1302; + /** + * Produces a human-readable error message for the given status code. + */ + export function getMessage(code: typeof statusNamespace[keyof typeof statusNamespace]): string; +} + +export {statusNamespace as status}