From 0194817cc24af36ad268fb057911765df674fead Mon Sep 17 00:00:00 2001 From: Stephen Cresswell <229672+cressie176@users.noreply.github.com> Date: Sun, 7 Jan 2024 16:49:56 +0000 Subject: [PATCH] Rename rdf -> filby --- .github/workflows/node-js-ci.yml | 12 ++-- .github/workflows/node-js-publish.yml | 6 +- README.md | 42 +++++------ examples/javascript/config.json | 8 +-- examples/javascript/docker-compose.yaml | 8 +-- examples/javascript/index.js | 24 +++---- examples/javascript/package-lock.json | 4 +- examples/javascript/package.json | 4 +- examples/javascript/routes/changelog-v1.js | 6 +- examples/javascript/routes/park-v1.js | 8 +-- examples/typescript/config.json | 8 +-- examples/typescript/docker-compose.yaml | 8 +-- examples/typescript/index.ts | 24 +++---- examples/typescript/package-lock.json | 6 +- examples/typescript/package.json | 4 +- examples/typescript/routes/changelog-v1.ts | 10 +-- examples/typescript/routes/park-v1.ts | 10 +-- index.d.ts | 2 +- index.js | 26 +++---- ...arv-rdf-driver.js => marv-filby-driver.js} | 0 lib/schema.json | 2 +- lib/template.hbs | 38 +++++----- migrations/.marvrc | 2 +- .../001.create-rdf-entity-relations.sql | 4 +- .../002.create-rdf-projection-relations.sql | 4 +- ...create-rdf-projection-entity-relations.sql | 6 +- migrations/004.create-rdf-hook-relations.sql | 6 +- .../005.create-rdf-notification-mechanism.sql | 52 +++++++------- .../006.create-rdf-change-set-relations.sql | 12 ++-- .../007.create-rdf-data-frame-relations.sql | 10 +-- ...08.create-rdf-projection-change-log-vw.sql | 12 ++-- package-lock.json | 6 +- package.json | 15 ++-- test/TestFilby.js | 35 +++++++++ test/TestReferenceDataFramework.js | 35 --------- test/api.test.js | 72 +++++++++---------- test/docker-compose.yaml | 8 +-- test/dsl.test.js | 44 ++++++------ test/notifications.test.js | 72 +++++++++---------- test/schema.test.js | 64 ++++++++--------- 40 files changed, 361 insertions(+), 358 deletions(-) rename lib/{marv-rdf-driver.js => marv-filby-driver.js} (100%) create mode 100644 test/TestFilby.js delete mode 100644 test/TestReferenceDataFramework.js diff --git a/.github/workflows/node-js-ci.yml b/.github/workflows/node-js-ci.yml index 7665425..1ca7150 100644 --- a/.github/workflows/node-js-ci.yml +++ b/.github/workflows/node-js-ci.yml @@ -12,9 +12,9 @@ jobs: - 5432:5432 env: POSTGRES_HOST_AUTH_METHOD: trust - POSTGRES_DB: rdf_test - POSTGRES_USER: rdf_test - POSTGRES_PASSWORD: rdf_test + POSTGRES_DB: fby_test + POSTGRES_USER: fby_test + POSTGRES_PASSWORD: fby_test strategy: matrix: node-version: [18.x, 20.x] @@ -38,9 +38,9 @@ jobs: - 5432:5432 env: POSTGRES_HOST_AUTH_METHOD: trust - POSTGRES_DB: rdf_test - POSTGRES_USER: rdf_test - POSTGRES_PASSWORD: rdf_test + POSTGRES_DB: fby_test + POSTGRES_USER: fby_test + POSTGRES_PASSWORD: fby_test steps: - uses: actions/checkout@v3 - uses: actions/setup-node@v3 diff --git a/.github/workflows/node-js-publish.yml b/.github/workflows/node-js-publish.yml index b369742..1fea514 100644 --- a/.github/workflows/node-js-publish.yml +++ b/.github/workflows/node-js-publish.yml @@ -14,9 +14,9 @@ jobs: - 5432:5432 env: POSTGRES_HOST_AUTH_METHOD: trust - POSTGRES_DB: rdf_test - POSTGRES_USER: rdf_test - POSTGRES_PASSWORD: rdf_test + POSTGRES_DB: fby_test + POSTGRES_USER: fby_test + POSTGRES_PASSWORD: fby_test steps: - uses: actions/checkout@v3 - uses: actions/setup-node@v3 diff --git a/README.md b/README.md index 07eef36..dcb78b7 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,8 @@ # Filby - A framework for managing temporal reference data -[![Node.js CI](https://github.com/acuminous/reference-data-framework/workflows/Node.js%20CI/badge.svg)](https://github.com/acuminous/reference-data-framework/actions?query=workflow%3A%22Node.js+CI%22) -[![Code Climate](https://codeclimate.com/github/acuminous/reference-data-framework/badges/gpa.svg)](https://codeclimate.com/github/acuminous/reference-data-framework) -[![Test Coverage](https://codeclimate.com/github/acuminous/reference-data-framework/badges/coverage.svg)](https://codeclimate.com/github/acuminous/reference-data-framework/coverage) +[![Node.js CI](https://github.com/acuminous/filby/workflows/Node.js%20CI/badge.svg)](https://github.com/acuminous/filby/actions?query=workflow%3A%22Node.js+CI%22) +[![Code Climate](https://codeclimate.com/github/acuminous/filby/badges/gpa.svg)](https://codeclimate.com/github/acuminous/filby) +[![Test Coverage](https://codeclimate.com/github/acuminous/filby/badges/coverage.svg)](https://codeclimate.com/github/acuminous/filby/coverage) [![Discover zUnit](https://img.shields.io/badge/Discover-zUnit-brightgreen)](https://www.npmjs.com/package/zunit) *There is no difference between Time and any of the three dimensions of Space except that our consciousness moves along it.* @@ -28,7 +28,7 @@ Most applications require slow moving reference data, which presents the followi | Evolution | Both reference data, and our understanding of the application domain evolves over time. We will at some point need to make backwards incompatible changes to our reference data, and will need to do so without breaking client applications. This suggests a versioning and validation mechanism. The issue of temporality compounds the challenge of evolution, since we may need to retrospecively add data to historic records. In some cases this data will not be known. | | Local Testing | Applications may be tested locally, and therefore any solution sould work well on a development laptop. | -Solving such a complex problem becomes simpler when broken down. This project provides a server side framework for managing slow moving, time dependent reference data. In the following diagram, the mechanism for defining, loading, accessing and receiving notifications about reference data are provided by this framework. The RESTful API and Webhook must be manually created by the application developer. An [example application](#example-application) is provided to demonstrate how. +Solving such a complex problem becomes simpler when broken down. This project provides a server side framework for managing temporal reference data. In the following diagram, the mechanism for defining, loading, accessing and receiving notifications about reference data are provided by this framework. The RESTful API and Webhook must be manually created by the application developer. An [example application](#example-application) is provided to demonstrate how.
                              Change
@@ -121,7 +121,7 @@ Refering back to the previous list of challenges:
 - **Local Testing** is possible through HTTP mocking libraries.
 
 ## How it works
-RDF has the following important concepts
+filby has the following important concepts
 
 ┌─────────────────┐
 │                 │
@@ -173,39 +173,39 @@ A change set groups a set of data frames (potentially for different entities) in
 ### Notifications
 Notifications are published whenever a new data frame is created. By subscribing to the notifications that are emitted per projection when the backing data changes, downstream systems can maintain copies of the data, with reduced risk of it becoming stale. For example, the client in the above diagram could be another backend system, caching proxy, a web application, a websocket application, a CI / CD pipeline responsible for building a client side data module, or an ETL process for exporting the reference data to the company data lake.
 
-Notifications are retried a configurable number of times using an exponential backoff algorithm. It is save for multiple instances of the framework to poll for notifications concurrently.
+Notifications are retried a configurable number of times using an exponential backoff algorithm. It is safe for multiple instances of the framework to poll for notifications concurrently.
 
 ### Hook
 A hook is an event the framework will emit to whenenver a data frame used to build a projection is added. Your application can handle these events how it chooses, e.g. by making an HTTP request, or publishing a message to an SNS topic. Unlike node events, the handlers can be (and should be) asynchronous. It is advised not to share hooks between handlers since if one handler fails but another succeeds the built in retry mechanism will re-notify both handlers.
 
 ## API
-RDF provides a set of lifecycle methods and an API for retrieving change sets and projections, and for executing database queries (although you are free to use your preferred PostgreSQL client too).
+filby provides a set of lifecycle methods and an API for retrieving change sets and projections, and for executing database queries (although you are free to use your preferred PostgreSQL client too).
 
-#### rdf.init(config: RdfConfig): Promise<void>
+#### filby.init(config: RdfConfig): Promise<void>
 Connects to the database and runs migrations
 
-#### rdf.startNotifications(): Promise<void>
+#### filby.startNotifications(): Promise<void>
 Starts polling the database for notifications
 
-#### rdf.stopNotifications(): Promise<void>
+#### filby.stopNotifications(): Promise<void>
 Stops polling the database for notifications, and waits for any inflight notifications to complete.
 
-#### rdf.stop(): Promise<void>
+#### filby.stop(): Promise<void>
 Stops polling for notifications then disconnects from the database
 
-#### rdf.getProjections(): Promise<RdfProjection>[]
+#### filby.getProjections(): Promise<RdfProjection>[]
 Returns the list of projections.
 
-#### rdf.getProjection(name: string, version: number): Promise<RdfProjection>
+#### filby.getProjection(name: string, version: number): Promise<RdfProjection>
 Returns the specified projection.
 
-#### rdf.getChangeLog(projection): Promise<RdfChangeSet[]>
+#### filby.getChangeLog(projection): Promise<RdfChangeSet[]>
 Returns the change log (an ordered list of change sets) for the given projection.
 
-#### rdf.getChangeSet(changeSetId): Promise<RdfChangeSet>
+#### filby.getChangeSet(changeSetId): Promise<RdfChangeSet>
 Returns the specified change set
 
-#### rdf.withTransaction(callback: (client: PoolClient) => Promise<T>): Promise Promise<T>): Promise {
+  return filby.withTransaction(async (client) => {
     const { rows } = await client.query(query, [changeSetId]);
     return rows.map(toPark);
   });
@@ -259,7 +259,7 @@ define enums:
 
 # Defining entities performs the following:
 #
-# 1. Inserts a row into the 'rdf_entity' table,
+# 1. Inserts a row into the 'fby_entity' table,
 # 2. Creates a table 'park_v1' for holding reference data
 # 3. Creates an aggregate function 'park_v1_aggregate' to be used by projections
 #
@@ -278,7 +278,7 @@ define entities:
       park_code_len: LENGTH(code) >= 2 # Creates PostgreSQL check constraints
 
 # Defining projections and their dependent entities
-# RDF uses the dependencies to work out what projections are affected by reference data updates
+# filby uses the dependencies to work out what projections are affected by reference data updates
 add projections:
   - name: park
     version: 1
@@ -333,8 +333,8 @@ This project includes proof of concept applications based on a Caravan Park busi
 
 ### Installation
 ```bash
-git clone git@github.com:acuminous/reference-data-framework.git
-cd reference-data-framework
+git clone git@github.com:acuminous/filby.git
+cd filby
 npm i
 ```
 
diff --git a/examples/javascript/config.json b/examples/javascript/config.json
index b4ed506..64ed519 100644
--- a/examples/javascript/config.json
+++ b/examples/javascript/config.json
@@ -6,11 +6,11 @@
     "port": 3000
   },
   "database": {
-    "user": "rdf_example",
-    "database": "rdf_example",
-    "password": "rdf_example"
+    "user": "fby_example",
+    "database": "fby_example",
+    "password": "fby_example"
   },
-  "rdf": {
+  "filby": {
     "migrations": "../migrations",
     "notifications": {
       "interval": "5s",
diff --git a/examples/javascript/docker-compose.yaml b/examples/javascript/docker-compose.yaml
index 10159ae..8c35a9d 100644
--- a/examples/javascript/docker-compose.yaml
+++ b/examples/javascript/docker-compose.yaml
@@ -3,10 +3,10 @@ version: '3.8'
 services:
   postgres:
     image: postgres:16-alpine
-    container_name: rdf_example
+    container_name: fby_example
     environment:
-      POSTGRES_DB: rdf_example
-      POSTGRES_USER: rdf_example
-      POSTGRES_PASSWORD: rdf_example
+      POSTGRES_DB: fby_example
+      POSTGRES_USER: fby_example
+      POSTGRES_PASSWORD: fby_example
     ports:
       - "5432:5432"
diff --git a/examples/javascript/index.js b/examples/javascript/index.js
index df38fbc..767112d 100644
--- a/examples/javascript/index.js
+++ b/examples/javascript/index.js
@@ -8,11 +8,11 @@ const swaggerUI = require('@fastify/swagger-ui');
 
 const config = require('./config.json');
 const changeLogRoute = require('./routes/changelog-v1');
-const ReferenceDataFramework = require('../..');
+const Filby = require('../..');
 
 const fastify = Fastify(config.fastify);
 
-const rdf = new ReferenceDataFramework({ ...config.rdf, ...{ database: config.database } });
+const filby = new Filby({ ...config.filby, ...{ database: config.database } });
 
 (async () => {
 
@@ -20,7 +20,7 @@ const rdf = new ReferenceDataFramework({ ...config.rdf, ...{ database: config.da
     swagger: {
       info: {
         title: 'Holiday Park Data Service',
-        description: 'A proof of concept reference data application',
+        description: 'A proof of concept Filby application',
         version: '1.0.0',
       },
       schemes: ['http'],
@@ -46,20 +46,20 @@ const rdf = new ReferenceDataFramework({ ...config.rdf, ...{ database: config.da
   });
 
   try {
-    await rdf.init();
+    await filby.init();
 
     await registerChangelog();
     await registerProjections();
 
     await fastify.listen(config.server);
 
-    rdf.on('park_v1_change', (event) => {
+    filby.on('park_v1_change', (event) => {
       console.log({ event });
     });
-    rdf.on('change', (event) => {
+    filby.on('change', (event) => {
       console.log({ event });
     });
-    await rdf.startNotifications();
+    await filby.startNotifications();
 
     registerShutdownHooks();
     console.log(`Server is listening on port ${config.server?.port}`);
@@ -72,16 +72,16 @@ const rdf = new ReferenceDataFramework({ ...config.rdf, ...{ database: config.da
 })();
 
 async function registerChangelog() {
-  fastify.register(changeLogRoute, { prefix: '/api/changelog', rdf });
+  fastify.register(changeLogRoute, { prefix: '/api/changelog', filby });
 }
 
 async function registerProjections() {
-  const projections = await rdf.getProjections();
+  const projections = await filby.getProjections();
   projections.forEach((projection) => {
     // eslint-disable-next-line global-require
     const route = require(path.resolve(`routes/${projection.name}-v${projection.version}`));
     const prefix = `/api/projection/v${projection.version}/${projection.name}`;
-    fastify.register(route, { prefix, rdf });
+    fastify.register(route, { prefix, filby });
   });
 }
 
@@ -90,9 +90,9 @@ function registerShutdownHooks() {
   process.once('SIGTERM', () => process.emit('app_stop'));
   process.once('app_stop', async () => {
     process.removeAllListeners('app_stop');
-    await rdf.stopNotifications();
+    await filby.stopNotifications();
     await fastify.close();
-    await rdf.stop();
+    await filby.stop();
     console.log('Server has stopped');
   });
 }
diff --git a/examples/javascript/package-lock.json b/examples/javascript/package-lock.json
index 21a9c65..828ee84 100644
--- a/examples/javascript/package-lock.json
+++ b/examples/javascript/package-lock.json
@@ -1,11 +1,11 @@
 {
-  "name": "rdf-example",
+  "name": "filby-example",
   "version": "1.0.0",
   "lockfileVersion": 2,
   "requires": true,
   "packages": {
     "": {
-      "name": "rdf-example",
+      "name": "filby-example",
       "version": "1.0.0",
       "license": "ISC",
       "dependencies": {
diff --git a/examples/javascript/package.json b/examples/javascript/package.json
index 3503c95..3c3d048 100644
--- a/examples/javascript/package.json
+++ b/examples/javascript/package.json
@@ -1,7 +1,7 @@
 {
-  "name": "rdf-example",
+  "name": "filby-example",
   "version": "1.0.0",
-  "description": "An example project using the reference data framework",
+  "description": "An example project using Filby",
   "scripts": {
     "start": "node index.js",
     "docker": "docker-compose up --detach",
diff --git a/examples/javascript/routes/changelog-v1.js b/examples/javascript/routes/changelog-v1.js
index 907af2f..63f1b0d 100644
--- a/examples/javascript/routes/changelog-v1.js
+++ b/examples/javascript/routes/changelog-v1.js
@@ -15,13 +15,13 @@ const getChangelogSchema = {
   },
 };
 
-module.exports = (fastify, { rdf }, done) => {
+module.exports = (fastify, { filby }, done) => {
 
   fastify.get('/', { schema: getChangelogSchema }, async (request, reply) => {
 
     const projection = await getProjection(request);
 
-    const changeLog = await rdf.getChangeLog(projection);
+    const changeLog = await filby.getChangeLog(projection);
     if (changeLog.length === 0) throw createError(404, `Projection ${projection.name}-v${projection.version} has no change sets`);
 
     const changeSet = changeLog[changeLog.length - 1];
@@ -38,7 +38,7 @@ module.exports = (fastify, { rdf }, done) => {
   async function getProjection(request) {
     const name = String(request.query.projection);
     const version = Number(request.query.version);
-    const projection = await rdf.getProjection(name, version);
+    const projection = await filby.getProjection(name, version);
     if (!projection) throw createError(404, `Projection not found: ${name}-v${version}`);
     return projection;
   }
diff --git a/examples/javascript/routes/park-v1.js b/examples/javascript/routes/park-v1.js
index 575e211..780ab53 100644
--- a/examples/javascript/routes/park-v1.js
+++ b/examples/javascript/routes/park-v1.js
@@ -1,6 +1,6 @@
 const createError = require('http-errors');
 
-module.exports = (fastify, { rdf }, done) => {
+module.exports = (fastify, { filby }, done) => {
 
   const getParksSchema = {
     querystring: {
@@ -65,13 +65,13 @@ module.exports = (fastify, { rdf }, done) => {
 
   async function getChangeSet(request) {
     const changeSetId = Number(request.query.changeSetId);
-    const changeSet = await rdf.getChangeSet(changeSetId);
+    const changeSet = await filby.getChangeSet(changeSetId);
     if (!changeSet) throw createError(400, 'Invalid changeSetId');
     return changeSet;
   }
 
   async function getParks(changeSet) {
-    return rdf.withTransaction(async (tx) => {
+    return filby.withTransaction(async (tx) => {
       const { rows } = await tx.query('SELECT code, name, calendar_event, calendar_occurs FROM get_park_v1($1)', [changeSet.id]);
       const parkDictionary = rows.reduce(toParkDictionary, new Map());
       return Array.from(parkDictionary.values());
@@ -79,7 +79,7 @@ module.exports = (fastify, { rdf }, done) => {
   }
 
   async function getPark(changeSet, code) {
-    return rdf.withTransaction(async (tx) => {
+    return filby.withTransaction(async (tx) => {
       const { rows } = await tx.query('SELECT code, name, calendar_event, calendar_occurs FROM get_park_v1($1) WHERE code = upper($2)', [changeSet.id, code]);
       const parkDictionary = rows.reduce(toParkDictionary, new Map());
       return parkDictionary.get(code);
diff --git a/examples/typescript/config.json b/examples/typescript/config.json
index 49a792c..f5f14e0 100644
--- a/examples/typescript/config.json
+++ b/examples/typescript/config.json
@@ -6,11 +6,11 @@
     "port": 3000
   },
   "database": {
-    "user": "rdf_example",
-    "database": "rdf_example",
-    "password": "rdf_example"
+    "user": "fby_example",
+    "database": "fby_example",
+    "password": "fby_example"
   },
-  "rdf": {
+  "filby": {
     "migrations": "../migrations",
     "notifications": {
       "interval": "5s",
diff --git a/examples/typescript/docker-compose.yaml b/examples/typescript/docker-compose.yaml
index 10159ae..8c35a9d 100644
--- a/examples/typescript/docker-compose.yaml
+++ b/examples/typescript/docker-compose.yaml
@@ -3,10 +3,10 @@ version: '3.8'
 services:
   postgres:
     image: postgres:16-alpine
-    container_name: rdf_example
+    container_name: fby_example
     environment:
-      POSTGRES_DB: rdf_example
-      POSTGRES_USER: rdf_example
-      POSTGRES_PASSWORD: rdf_example
+      POSTGRES_DB: fby_example
+      POSTGRES_USER: fby_example
+      POSTGRES_PASSWORD: fby_example
     ports:
       - "5432:5432"
diff --git a/examples/typescript/index.ts b/examples/typescript/index.ts
index 0cd6e6f..b59bb91 100644
--- a/examples/typescript/index.ts
+++ b/examples/typescript/index.ts
@@ -6,11 +6,11 @@ import swagger from '@fastify/swagger';
 import swaggerUI from '@fastify/swagger-ui';
 
 import changeLogRoute from './routes/changelog-v1';
-import ReferenceDataFramework, { RdfProjection, RdfEvent } from '../..';
+import Filby, { RdfProjection, RdfEvent } from '../..';
 
 const fastify = Fastify(config.fastify);
 
-const rdf = new ReferenceDataFramework({ ...config.rdf, ...{ database: config.database } });
+const filby = new Filby({ ...config.filby, ...{ database: config.database } });
 
 type AppProcess = NodeJS.Process & {
   emit(event: string): boolean;
@@ -24,7 +24,7 @@ const app: AppProcess = process;
     swagger: {
       info: {
         title: 'Holiday Park Data Service',
-        description: 'A proof of concept reference data application',
+        description: 'A proof of concept Filby application',
         version: '1.0.0'
       },
       schemes: ['http'],
@@ -50,20 +50,20 @@ const app: AppProcess = process;
   });
 
   try {
-    await rdf.init();
+    await filby.init();
 
     await registerChangelog();
     await registerProjections();
 
     await fastify.listen(config.server);
 
-    rdf.on('park_v1_change', (event: RdfEvent) => {
+    filby.on('park_v1_change', (event: RdfEvent) => {
       console.log({ event })
     });
-    rdf.on('change', (event: RdfEvent) => {
+    filby.on('change', (event: RdfEvent) => {
       console.log({ event })
     });
-    await rdf.startNotifications();
+    await filby.startNotifications();
 
     registerShutdownHooks();
     console.log(`Server is listening on port ${config.server?.port}`);
@@ -76,15 +76,15 @@ const app: AppProcess = process;
 })();
 
 async function registerChangelog() {
-  fastify.register(changeLogRoute, { prefix: '/api/changelog', rdf });
+  fastify.register(changeLogRoute, { prefix: '/api/changelog', filby });
 }
 
 async function registerProjections() {
-  const projections = await rdf.getProjections();
+  const projections = await filby.getProjections();
   projections.forEach((projection: RdfProjection) => {
     const route = require(path.resolve(`routes/${projection.name}-v${projection.version}`));
     const prefix = `/api/projection/v${projection.version}/${projection.name}`;
-    fastify.register(route, { prefix, rdf });
+    fastify.register(route, { prefix, filby });
   })
 }
 
@@ -93,9 +93,9 @@ function registerShutdownHooks() {
   app.once('SIGTERM', () => app.emit('app_stop'));
   app.once('app_stop', async () => {
     app.removeAllListeners('app_stop');
-    await rdf.stopNotifications();
+    await filby.stopNotifications();
     await fastify.close();
-    await rdf.stop();
+    await filby.stop();
     console.log('Server has stopped');
   })
 }
diff --git a/examples/typescript/package-lock.json b/examples/typescript/package-lock.json
index 326b935..de4272d 100644
--- a/examples/typescript/package-lock.json
+++ b/examples/typescript/package-lock.json
@@ -1,11 +1,11 @@
 {
-  "name": "rdf-example",
+  "name": "filby-example",
   "version": "1.0.0",
   "lockfileVersion": 2,
   "requires": true,
   "packages": {
     "": {
-      "name": "rdf-example",
+      "name": "filby-example",
       "version": "1.0.0",
       "license": "ISC",
       "dependencies": {
@@ -1908,4 +1908,4 @@
       "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q=="
     }
   }
-}
+}
\ No newline at end of file
diff --git a/examples/typescript/package.json b/examples/typescript/package.json
index fb447c5..2c9cc87 100644
--- a/examples/typescript/package.json
+++ b/examples/typescript/package.json
@@ -1,7 +1,7 @@
 {
-  "name": "rdf-example",
+  "name": "filby-example",
   "version": "1.0.0",
-  "description": "An example project using the reference data framework",
+  "description": "An example project using Filbyk",
   "scripts": {
     "start": "npx ts-node index.ts",
     "docker": "docker-compose up --detach",
diff --git a/examples/typescript/routes/changelog-v1.ts b/examples/typescript/routes/changelog-v1.ts
index 94738d1..b0f3bb3 100644
--- a/examples/typescript/routes/changelog-v1.ts
+++ b/examples/typescript/routes/changelog-v1.ts
@@ -1,11 +1,11 @@
 import { FastifyInstance, FastifyRequest } from 'fastify';
 import createError from 'http-errors';
-import ReferenceDataFramework from '../../..';
+import Filby from '../../..';
 
 const getChangelogSchema = {
 	querystring: {
 		type: "object",
-	  required: ["projection", "version"],
+		required: ["projection", "version"],
 		properties: {
 			projection: {
 				type: "string"
@@ -17,7 +17,7 @@ const getChangelogSchema = {
 	}
 } as const;
 
-export default (fastify: FastifyInstance, { rdf } : { rdf: ReferenceDataFramework }, done: (err?: Error) => void) => {
+export default (fastify: FastifyInstance, { filby }: { filby: Filby }, done: (err?: Error) => void) => {
 
 	fastify.get<{
 		Querystring: typeof getChangelogSchema.querystring.properties
@@ -25,7 +25,7 @@ export default (fastify: FastifyInstance, { rdf } : { rdf: ReferenceDataFramewor
 
 		const projection = await getProjection(request)
 
-		const changeLog = await rdf.getChangeLog(projection);
+		const changeLog = await filby.getChangeLog(projection);
 		if (changeLog.length === 0) throw createError(404, `Projection ${projection.name}-v${projection.version} has no change sets`);
 
 		const changeSet = changeLog[changeLog.length - 1];
@@ -42,7 +42,7 @@ export default (fastify: FastifyInstance, { rdf } : { rdf: ReferenceDataFramewor
 	async function getProjection(request: FastifyRequest<{ Querystring: typeof getChangelogSchema.querystring.properties }>) {
 		const name = String(request.query.projection);
 		const version = Number(request.query.version);
-		const projection = await rdf.getProjection(name, version);
+		const projection = await filby.getProjection(name, version);
 		if (!projection) throw createError(404, `Projection not found: ${name}-v${version}`);
 		return projection;
 	}
diff --git a/examples/typescript/routes/park-v1.ts b/examples/typescript/routes/park-v1.ts
index a740027..c7f92b6 100644
--- a/examples/typescript/routes/park-v1.ts
+++ b/examples/typescript/routes/park-v1.ts
@@ -1,8 +1,8 @@
 import { FastifyInstance, FastifyRequest } from 'fastify';
 import createError from 'http-errors';
-import ReferenceDataFramework, { RdfChangeSet } from '../../..';
+import Filby, { RdfChangeSet } from '../../..';
 
-export default (fastify: FastifyInstance, { rdf }: { rdf: ReferenceDataFramework }, done: (err?: Error) => void) => {
+export default (fastify: FastifyInstance, { filby }: { filby: Filby }, done: (err?: Error) => void) => {
 
   const getParksSchema = {
     querystring: {
@@ -72,13 +72,13 @@ export default (fastify: FastifyInstance, { rdf }: { rdf: ReferenceDataFramework
 
   async function getChangeSet(request: FastifyRequest<{ Querystring: typeof getParksSchema.querystring.properties }> | FastifyRequest<{ Querystring: typeof getParkSchema.querystring.properties }>) {
     const changeSetId = Number(request.query.changeSetId);
-    const changeSet = await rdf.getChangeSet(changeSetId);
+    const changeSet = await filby.getChangeSet(changeSetId);
     if (!changeSet) throw createError(400, `Invalid changeSetId`)
     return changeSet;
   }
 
   async function getParks(changeSet: RdfChangeSet) {
-    return rdf.withTransaction(async (tx) => {
+    return filby.withTransaction(async (tx) => {
       const { rows } = await tx.query('SELECT code, name, calendar_event, calendar_occurs FROM get_park_v1($1)', [changeSet.id]);
       const parkDictionary = rows.reduce(toParkDictionary, new Map());
       return Array.from(parkDictionary.values());
@@ -86,7 +86,7 @@ export default (fastify: FastifyInstance, { rdf }: { rdf: ReferenceDataFramework
   }
 
   async function getPark(changeSet: RdfChangeSet, code: string) {
-    return rdf.withTransaction(async (tx) => {
+    return filby.withTransaction(async (tx) => {
       const { rows } = await tx.query('SELECT code, name, calendar_event, calendar_occurs FROM get_park_v1($1) WHERE code = upper($2)', [changeSet.id, code]);
       const parkDictionary = rows.reduce(toParkDictionary, new Map());
       return parkDictionary.get(code);
diff --git a/index.d.ts b/index.d.ts
index 419332e..24e0eec 100644
--- a/index.d.ts
+++ b/index.d.ts
@@ -1,7 +1,7 @@
 import { EventEmitter2 as EventEmitter } from 'eventemitter2';
 import { PoolClient, PoolConfig } from 'pg';
 
-export default class ReferenceDataFramework extends EventEmitter {
+export default class Filby extends EventEmitter {
   constructor(config: RdfConfig);
   init(): Promise;
   startNotifications(): Promise;
diff --git a/index.js b/index.js
index 892ad7c..ae80727 100644
--- a/index.js
+++ b/index.js
@@ -6,9 +6,9 @@ const marv = require('marv/api/promise');
 const { Pool } = require('pg');
 const parseDuration = require('parse-duration');
 
-const driver = require('./lib/marv-rdf-driver');
+const driver = require('./lib/marv-filby-driver');
 
-module.exports = class ReferenceDataFramework extends EventEmitter {
+module.exports = class Filby extends EventEmitter {
 
   #config;
   #maxRescheduleDelay;
@@ -61,28 +61,28 @@ module.exports = class ReferenceDataFramework extends EventEmitter {
 
   async getProjections() {
     return this.withTransaction(async (tx) => {
-      const { rows } = await tx.query('SELECT id, name, version FROM rdf_projection');
+      const { rows } = await tx.query('SELECT id, name, version FROM fby_projection');
       return rows;
     });
   }
 
   async getProjection(name, version) {
     return this.withTransaction(async (tx) => {
-      const { rows } = await tx.query('SELECT id, name, version FROM rdf_projection WHERE name = $1 AND version = $2', [name, version]);
+      const { rows } = await tx.query('SELECT id, name, version FROM fby_projection WHERE name = $1 AND version = $2', [name, version]);
       return rows[0];
     });
   }
 
   async getChangeLog(projection) {
     return this.withTransaction(async (tx) => {
-      const { rows } = await tx.query('SELECT DISTINCT ON (change_set_id) change_set_id, effective, notes, last_modified, entity_tag FROM rdf_projection_change_log_vw WHERE projection_id = $1', [projection.id]);
+      const { rows } = await tx.query('SELECT DISTINCT ON (change_set_id) change_set_id, effective, notes, last_modified, entity_tag FROM fby_projection_change_log_vw WHERE projection_id = $1', [projection.id]);
       return rows.map(toChangeSet);
     });
   }
 
   async getChangeSet(changeSetId) {
     return this.withTransaction(async (tx) => {
-      const { rows } = await tx.query('SELECT id AS change_set_id, effective, notes, last_modified, entity_tag FROM rdf_change_set WHERE id = $1', [changeSetId]);
+      const { rows } = await tx.query('SELECT id AS change_set_id, effective, notes, last_modified, entity_tag FROM fby_change_set WHERE id = $1', [changeSetId]);
       return rows.map(toChangeSet)[0];
     });
   }
@@ -113,21 +113,21 @@ module.exports = class ReferenceDataFramework extends EventEmitter {
     };
 
     return pipsqueak({
-      name: 'rdf-notifications', factory, interval, delay: initialDelay,
+      name: 'filby-notifications', factory, interval, delay: initialDelay,
     });
   }
 
   async #getNextNotification(tx, maxAttempts) {
-    const { rows } = await tx.query('SELECT id, hook_id, attempts FROM rdf_get_next_notification($1)', [maxAttempts]);
+    const { rows } = await tx.query('SELECT id, hook_id, attempts FROM fby_get_next_notification($1)', [maxAttempts]);
     const notifications = rows.map((row) => ({ id: row.id, hookId: row.hook_id, attempts: row.attempts }));
     return notifications[0];
   }
 
   async #getHook(tx, notification) {
     const { rows } = await tx.query(
-      `SELECT h.event, p.id, p.name, p.version FROM rdf_hook h
-INNER JOIN rdf_notification n ON n.hook_id = h.id
-INNER JOIN rdf_projection p ON p.id = n.projection_id
+      `SELECT h.event, p.id, p.name, p.version FROM fby_hook h
+INNER JOIN fby_notification n ON n.hook_id = h.id
+INNER JOIN fby_projection p ON p.id = n.projection_id
 WHERE h.id = $1`,
       [notification.hookId],
     );
@@ -136,13 +136,13 @@ WHERE h.id = $1`,
   }
 
   async #passNotification(tx, notification) {
-    await tx.query('SELECT rdf_pass_notification($1)', [notification.id]);
+    await tx.query('SELECT fby_pass_notification($1)', [notification.id]);
   }
 
   async #failNotification(tx, notification, err) {
     const rescheduleDelay = Math.min(2 ** notification.attempts * 1000, this.#maxRescheduleDelay);
     const scheduledFor = new Date(Date.now() + rescheduleDelay);
-    await tx.query('SELECT rdf_fail_notification($1, $2, $3)', [notification.id, scheduledFor, err.stack]);
+    await tx.query('SELECT fby_fail_notification($1, $2, $3)', [notification.id, scheduledFor, err.stack]);
   }
 };
 
diff --git a/lib/marv-rdf-driver.js b/lib/marv-filby-driver.js
similarity index 100%
rename from lib/marv-rdf-driver.js
rename to lib/marv-filby-driver.js
diff --git a/lib/schema.json b/lib/schema.json
index 3151ab5..072d34c 100644
--- a/lib/schema.json
+++ b/lib/schema.json
@@ -1,6 +1,6 @@
 {
   "$schema": "http://json-schema.org/draft-07/schema#",
-  "title": "JSON Schema for RDF",
+  "title": "JSON Schema for filby",
   "type": "object",
   "properties": {
     "define enums": {
diff --git a/lib/template.hbs b/lib/template.hbs
index 1b21680..ee92d87 100644
--- a/lib/template.hbs
+++ b/lib/template.hbs
@@ -9,14 +9,14 @@ CREATE TYPE {{name}} AS ENUM (
 {{/define_enums}}
 
 {{#define_entities}}
-INSERT INTO rdf_entity (name, version) VALUES ('{{name}}', {{version}});
+INSERT INTO fby_entity (name, version) VALUES ('{{name}}', {{version}});
 
 CREATE TABLE {{tableName name version}} (
-  rdf_frame_id INTEGER PRIMARY KEY REFERENCES rdf_data_frame (id),
+  fby_frame_id INTEGER PRIMARY KEY REFERENCES fby_data_frame (id),
   {{#fields}}
   {{name}} {{type}},
   {{/fields}}
-  CONSTRAINT {{tableName name version}}_frame_id_{{#identified_by}}{{tableName name version}}_{{#if @last}}uniq{{/if}}{{/identified_by}} UNIQUE (rdf_frame_id, {{#identified_by}}{{name}}{{/identified_by}}){{#if checks}},{{/if}}
+  CONSTRAINT {{tableName name version}}_frame_id_{{#identified_by}}{{tableName name version}}_{{#if @last}}uniq{{/if}}{{/identified_by}} UNIQUE (fby_frame_id, {{#identified_by}}{{name}}{{/identified_by}}){{#if checks}},{{/if}}
   {{#each checks}}
   CONSTRAINT {{@key}}_chk CHECK ({{{this}}}){{#unless @last}},{{/unless}}
   {{/each}}
@@ -35,26 +35,26 @@ BEGIN
   WITH aggregated AS (
     SELECT
       DISTINCT ON ({{#identified_by}}{{name}}{{#unless @last}},{{/unless}}{{/identified_by}})
-      f.action AS rdf_action,
+      f.action AS fby_action,
       {{#fields}}
       x.{{name}}{{#unless @last}},{{/unless}}
       {{/fields}}
     FROM
-      rdf_data_frame f
-    INNER JOIN rdf_entity e ON e.id = f.entity_id
-    INNER JOIN {{tableName name version}} x ON x.rdf_frame_id = f.id
+      fby_data_frame f
+    INNER JOIN fby_entity e ON e.id = f.entity_id
+    INNER JOIN {{tableName name version}} x ON x.fby_frame_id = f.id
     WHERE e.name = '{{name}}' AND e.version = {{version}}
       AND f.change_set_id <= p_change_set_id
     ORDER BY
       {{#identified_by}}x.{{name}}{{/identified_by}} ASC,
-      x.rdf_frame_id DESC
+      x.fby_frame_id DESC
   )
   SELECT
     {{#fields}}
       a.{{name}}{{#unless @last}},{{/unless}}
     {{/fields}}
   FROM aggregated a
-  WHERE a.rdf_action <> 'DELETE';
+  WHERE a.fby_action <> 'DELETE';
 END;
 $$ LANGUAGE plpgsql;
 
@@ -69,14 +69,14 @@ DECLARE
   v_entity_id INTEGER;
 
 BEGIN
-  INSERT INTO rdf_projection (name, version) VALUES
+  INSERT INTO fby_projection (name, version) VALUES
     ('{{name}}', {{version}})
   RETURNING id INTO v_projection_id;
 
   {{#dependencies}}
-  SELECT id INTO v_entity_id FROM rdf_entity WHERE name = '{{entity}}' AND version = {{version}};
+  SELECT id INTO v_entity_id FROM fby_entity WHERE name = '{{entity}}' AND version = {{version}};
 
-  INSERT INTO rdf_projection_entity (projection_id, entity_id) VALUES
+  INSERT INTO fby_projection_entity (projection_id, entity_id) VALUES
     (v_projection_id, v_entity_id);
 
   {{/dependencies}}
@@ -93,9 +93,9 @@ DECLARE
 
 BEGIN
   {{#if (and projection version)}}
-  SELECT id INTO v_projection_id FROM rdf_projection WHERE name = '{{projection}}' AND version = {{version}};
+  SELECT id INTO v_projection_id FROM fby_projection WHERE name = '{{projection}}' AND version = {{version}};
   {{/if}}
-  INSERT INTO rdf_hook (projection_id, event) VALUES (v_projection_id, '{{event}}');
+  INSERT INTO fby_hook (projection_id, event) VALUES (v_projection_id, '{{event}}');
 END;
 $$ LANGUAGE plpgsql;
 
@@ -108,23 +108,23 @@ DO $$
     v_entity_id INTEGER;
     v_frame_id INTEGER;
   BEGIN
-    INSERT INTO rdf_change_set (effective, notes) VALUES
+    INSERT INTO fby_change_set (effective, notes) VALUES
       ('{{effective}}', '{{notes}}')
     RETURNING id INTO v_change_set_id;
 
     {{#frames}}
     {{#data}}
-    SELECT id INTO v_entity_id FROM rdf_entity WHERE name = '{{../entity}}' AND version = {{../version}};
+    SELECT id INTO v_entity_id FROM fby_entity WHERE name = '{{../entity}}' AND version = {{../version}};
 
-    INSERT INTO rdf_data_frame (change_set_id, entity_id, action) VALUES
+    INSERT INTO fby_data_frame (change_set_id, entity_id, action) VALUES
       (v_change_set_id, v_entity_id, '{{../action}}')
     RETURNING id INTO v_frame_id;
 
-    INSERT INTO {{tableName ../entity ../version}} (rdf_frame_id, {{#xkeys .}}{{item}}{{#unless isLast}}, {{/unless}}{{/xkeys}}) VALUES
+    INSERT INTO {{tableName ../entity ../version}} (fby_frame_id, {{#xkeys .}}{{item}}{{#unless isLast}}, {{/unless}}{{/xkeys}}) VALUES
       (v_frame_id, {{#xvalues .}}'{{item}}'{{#unless isLast}}, {{/unless}}{{/xvalues}});
     {{/data}}
 
-    PERFORM rdf_notify('{{entity}}', {{version}});
+    PERFORM fby_notify('{{entity}}', {{version}});
 
     {{/frames}}
 
diff --git a/migrations/.marvrc b/migrations/.marvrc
index 3452f97..8af25f6 100644
--- a/migrations/.marvrc
+++ b/migrations/.marvrc
@@ -1,3 +1,3 @@
 {
-  "namespace": "rdf"
+  "namespace": "filby"
 }
diff --git a/migrations/001.create-rdf-entity-relations.sql b/migrations/001.create-rdf-entity-relations.sql
index 9f03a42..2b6ed44 100644
--- a/migrations/001.create-rdf-entity-relations.sql
+++ b/migrations/001.create-rdf-entity-relations.sql
@@ -1,10 +1,10 @@
 START TRANSACTION;
 
-CREATE TABLE rdf_entity (
+CREATE TABLE fby_entity (
   id SERIAL PRIMARY KEY,
   name TEXT NOT NULL,
   version INTEGER NOT NULL,
-  CONSTRAINT rdf_entity_name_version_uniq UNIQUE (name, version)
+  CONSTRAINT fby_entity_name_version_uniq UNIQUE (name, version)
 );
 
 END TRANSACTION;
\ No newline at end of file
diff --git a/migrations/002.create-rdf-projection-relations.sql b/migrations/002.create-rdf-projection-relations.sql
index bddae4a..98f3217 100644
--- a/migrations/002.create-rdf-projection-relations.sql
+++ b/migrations/002.create-rdf-projection-relations.sql
@@ -1,10 +1,10 @@
 START TRANSACTION;
 
-CREATE TABLE rdf_projection (
+CREATE TABLE fby_projection (
   id SERIAL PRIMARY KEY,
   name TEXT NOT NULL,
   version INTEGER NOT NULL,
-  CONSTRAINT rdf_projection_name_version_uniq UNIQUE (name, version)
+  CONSTRAINT fby_projection_name_version_uniq UNIQUE (name, version)
 );
 
 END TRANSACTION;
\ No newline at end of file
diff --git a/migrations/003.create-rdf-projection-entity-relations.sql b/migrations/003.create-rdf-projection-entity-relations.sql
index f439890..c059eaf 100644
--- a/migrations/003.create-rdf-projection-entity-relations.sql
+++ b/migrations/003.create-rdf-projection-entity-relations.sql
@@ -1,8 +1,8 @@
 START TRANSACTION;
 
-CREATE TABLE rdf_projection_entity (
-  projection_id INTEGER REFERENCES rdf_projection (id) NOT NULL,
-  entity_id INTEGER REFERENCES rdf_entity (id) NOT NULL,
+CREATE TABLE fby_projection_entity (
+  projection_id INTEGER REFERENCES fby_projection (id) NOT NULL,
+  entity_id INTEGER REFERENCES fby_entity (id) NOT NULL,
   PRIMARY KEY (projection_id, entity_id)
 );
 
diff --git a/migrations/004.create-rdf-hook-relations.sql b/migrations/004.create-rdf-hook-relations.sql
index 3dd2a11..7698a7a 100644
--- a/migrations/004.create-rdf-hook-relations.sql
+++ b/migrations/004.create-rdf-hook-relations.sql
@@ -1,10 +1,10 @@
 START TRANSACTION;
 
-CREATE TABLE rdf_hook (
+CREATE TABLE fby_hook (
   id SERIAL PRIMARY KEY,
-  projection_id INTEGER REFERENCES rdf_projection (id),
+  projection_id INTEGER REFERENCES fby_projection (id),
   event TEXT NOT NULL,
-  CONSTRAINT rdf_hook_projection_id_consumer_uniq UNIQUE NULLS NOT DISTINCT (projection_id, event)
+  CONSTRAINT fby_hook_projection_id_consumer_uniq UNIQUE NULLS NOT DISTINCT (projection_id, event)
 );
 
 END TRANSACTION;
\ No newline at end of file
diff --git a/migrations/005.create-rdf-notification-mechanism.sql b/migrations/005.create-rdf-notification-mechanism.sql
index 649c31f..b556087 100644
--- a/migrations/005.create-rdf-notification-mechanism.sql
+++ b/migrations/005.create-rdf-notification-mechanism.sql
@@ -1,23 +1,23 @@
 START TRANSACTION;
 
-CREATE TYPE rdf_notification_status AS ENUM ('PENDING', 'OK');
+CREATE TYPE fby_notification_status AS ENUM ('PENDING', 'OK');
 
-CREATE TABLE rdf_notification (
+CREATE TABLE fby_notification (
   id SERIAL PRIMARY KEY,
-  hook_id INTEGER REFERENCES rdf_hook (id) NOT NULL,
-  projection_id INTEGER REFERENCES rdf_projection (id) NOT NULL,
+  hook_id INTEGER REFERENCES fby_hook (id) NOT NULL,
+  projection_id INTEGER REFERENCES fby_projection (id) NOT NULL,
   scheduled_for TIMESTAMP WITH TIME ZONE NOT NULL,
   attempts INTEGER DEFAULT 0,
-  status rdf_notification_status NOT NULL DEFAULT 'PENDING',
+  status fby_notification_status NOT NULL DEFAULT 'PENDING',
   last_attempted TIMESTAMP WITH TIME ZONE,
   last_error TEXT,
-  CONSTRAINT rdf_notification_hook_id_projection_id_status_uniq UNIQUE (hook_id, projection_id, status)
+  CONSTRAINT fby_notification_hook_id_projection_id_status_uniq UNIQUE (hook_id, projection_id, status)
 );
 
-CREATE FUNCTION rdf_schedule_notification(p_hook_id INTEGER, p_projection_id INTEGER) RETURNS VOID
+CREATE FUNCTION fby_schedule_notification(p_hook_id INTEGER, p_projection_id INTEGER) RETURNS VOID
 AS $$
 BEGIN
-  INSERT INTO rdf_notification (hook_id, projection_id, scheduled_for) VALUES (p_hook_id, p_projection_id, now())
+  INSERT INTO fby_notification (hook_id, projection_id, scheduled_for) VALUES (p_hook_id, p_projection_id, now())
   ON CONFLICT (hook_id, projection_id, status) DO UPDATE SET
     id = EXCLUDED.id,
     scheduled_for = EXCLUDED.scheduled_for,
@@ -27,28 +27,28 @@ BEGIN
 END;
 $$ LANGUAGE plpgsql;
 
-CREATE FUNCTION rdf_notify(p_name TEXT, p_version INTEGER) RETURNS VOID
+CREATE FUNCTION fby_notify(p_name TEXT, p_version INTEGER) RETURNS VOID
 AS $$
 DECLARE
   projection RECORD;
 BEGIN
   FOR projection IN (
     SELECT DISTINCT p.id
-    FROM rdf_entity e
-    INNER JOIN rdf_projection_entity pe ON pe.entity_id = e.id
-    INNER JOIN rdf_projection p ON p.id = pe.projection_id
+    FROM fby_entity e
+    INNER JOIN fby_projection_entity pe ON pe.entity_id = e.id
+    INNER JOIN fby_projection p ON p.id = pe.projection_id
     WHERE e.name = p_name AND e.version = p_version
-  ) 
+  )
   LOOP
-    PERFORM rdf_schedule_notification(h.id, projection.id)
-    FROM rdf_hook h
+    PERFORM fby_schedule_notification(h.id, projection.id)
+    FROM fby_hook h
     WHERE h.projection_id = projection.id
        OR h.projection_id IS NULL;
   END LOOP;
 END;
 $$ LANGUAGE plpgsql;
 
-CREATE FUNCTION rdf_get_next_notification(p_max_attempts INTEGER)
+CREATE FUNCTION fby_get_next_notification(p_max_attempts INTEGER)
 RETURNS TABLE (
   id INTEGER,
   hook_id INTEGER,
@@ -62,24 +62,24 @@ BEGIN
     n.hook_id,
     n.attempts
   FROM
-    rdf_notification n
+    fby_notification n
   WHERE n.status = 'PENDING'
     AND n.scheduled_for <= now()
-    AND n.attempts < p_max_attempts  
+    AND n.attempts < p_max_attempts
   FOR UPDATE SKIP LOCKED
   LIMIT 1;
 END;
 $$ LANGUAGE plpgsql;
 
-CREATE FUNCTION rdf_pass_notification(p_id INTEGER) RETURNS VOID
+CREATE FUNCTION fby_pass_notification(p_id INTEGER) RETURNS VOID
 AS $$
 DECLARE
   v_hook_id INTEGER;
 BEGIN
-  SELECT hook_id FROM rdf_notification n WHERE n.id = p_id INTO v_hook_id;
-  DELETE FROM rdf_notification n WHERE n.hook_id = v_hook_id AND n.status = 'OK';
-  UPDATE rdf_notification n
-  SET 
+  SELECT hook_id FROM fby_notification n WHERE n.id = p_id INTO v_hook_id;
+  DELETE FROM fby_notification n WHERE n.hook_id = v_hook_id AND n.status = 'OK';
+  UPDATE fby_notification n
+  SET
     attempts = n.attempts + 1,
     status = 'OK',
     last_attempted = now(),
@@ -89,11 +89,11 @@ BEGIN
 END;
 $$ LANGUAGE plpgsql;
 
-CREATE FUNCTION rdf_fail_notification(p_id INTEGER, p_scheduled_for TIMESTAMP WITH TIME ZONE, p_error TEXT) RETURNS VOID
+CREATE FUNCTION fby_fail_notification(p_id INTEGER, p_scheduled_for TIMESTAMP WITH TIME ZONE, p_error TEXT) RETURNS VOID
 AS $$
 BEGIN
-  UPDATE rdf_notification n
-  SET 
+  UPDATE fby_notification n
+  SET
     attempts = n.attempts + 1,
     scheduled_for = p_scheduled_for,
     last_attempted = now(),
diff --git a/migrations/006.create-rdf-change-set-relations.sql b/migrations/006.create-rdf-change-set-relations.sql
index ee80a9e..f6c2721 100644
--- a/migrations/006.create-rdf-change-set-relations.sql
+++ b/migrations/006.create-rdf-change-set-relations.sql
@@ -2,7 +2,7 @@ START TRANSACTION;
 
 CREATE EXTENSION pgcrypto;
 
-CREATE TABLE rdf_change_set (
+CREATE TABLE fby_change_set (
   id SERIAL PRIMARY KEY,
   effective TIMESTAMP WITH TIME ZONE NOT NULL,
   notes TEXT,
@@ -10,9 +10,9 @@ CREATE TABLE rdf_change_set (
   entity_tag TEXT NOT NULL
 );
 
-CREATE INDEX rdf_change_set_effective_idx ON rdf_change_set (effective DESC);
+CREATE INDEX fby_change_set_effective_idx ON fby_change_set (effective DESC);
 
-CREATE FUNCTION rdf_on_new_change_set()
+CREATE FUNCTION fby_on_new_change_set()
 RETURNS TRIGGER AS $$
 BEGIN
   NEW.last_modified := now();
@@ -21,9 +21,9 @@ BEGIN
 END;
 $$ LANGUAGE plpgsql;
 
-CREATE TRIGGER rdf_change_set_insert_trigger
-BEFORE INSERT ON rdf_change_set
+CREATE TRIGGER fby_change_set_insert_trigger
+BEFORE INSERT ON fby_change_set
 FOR EACH ROW
-EXECUTE FUNCTION rdf_on_new_change_set();
+EXECUTE FUNCTION fby_on_new_change_set();
 
 END TRANSACTION;
\ No newline at end of file
diff --git a/migrations/007.create-rdf-data-frame-relations.sql b/migrations/007.create-rdf-data-frame-relations.sql
index db76003..0361c27 100644
--- a/migrations/007.create-rdf-data-frame-relations.sql
+++ b/migrations/007.create-rdf-data-frame-relations.sql
@@ -1,12 +1,12 @@
 START TRANSACTION;
 
-CREATE TYPE rdf_action_type AS ENUM ('POST', 'DELETE');
+CREATE TYPE fby_action_type AS ENUM ('POST', 'DELETE');
 
-CREATE TABLE rdf_data_frame (
+CREATE TABLE fby_data_frame (
   id SERIAL PRIMARY KEY,
-  change_set_id INTEGER REFERENCES rdf_change_set (id) NOT NULL,
-  entity_id INTEGER REFERENCES rdf_entity (id) NOT NULL,
-  action rdf_action_type NOT NULL
+  change_set_id INTEGER REFERENCES fby_change_set (id) NOT NULL,
+  entity_id INTEGER REFERENCES fby_entity (id) NOT NULL,
+  action fby_action_type NOT NULL
 );
 
 END TRANSACTION;
\ No newline at end of file
diff --git a/migrations/008.create-rdf-projection-change-log-vw.sql b/migrations/008.create-rdf-projection-change-log-vw.sql
index e2fd8b7..da00a1e 100644
--- a/migrations/008.create-rdf-projection-change-log-vw.sql
+++ b/migrations/008.create-rdf-projection-change-log-vw.sql
@@ -1,6 +1,6 @@
 START TRANSACTION;
 
-CREATE VIEW rdf_projection_change_log_vw AS (
+CREATE VIEW fby_projection_change_log_vw AS (
   SELECT
     p.id AS projection_id,
     p.name AS projection_name,
@@ -11,11 +11,11 @@ CREATE VIEW rdf_projection_change_log_vw AS (
     c.last_modified,
     c.entity_tag
   FROM
-    rdf_projection p
-  INNER JOIN rdf_projection_entity pe ON pe.projection_id = p.id
-  INNER JOIN rdf_entity e ON e.id = pe.entity_id
-  INNER JOIN rdf_data_frame f ON f.entity_id = pe.entity_id
-  INNER JOIN rdf_change_set c ON c.id = f.change_set_id
+    fby_projection p
+  INNER JOIN fby_projection_entity pe ON pe.projection_id = p.id
+  INNER JOIN fby_entity e ON e.id = pe.entity_id
+  INNER JOIN fby_data_frame f ON f.entity_id = pe.entity_id
+  INNER JOIN fby_change_set c ON c.id = f.change_set_id
 );
 
 END TRANSACTION;
\ No newline at end of file
diff --git a/package-lock.json b/package-lock.json
index 3a41bd1..5fc2eec 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,11 +1,11 @@
 {
-  "name": "reference-data-framework",
+  "name": "filby",
   "version": "1.0.0",
   "lockfileVersion": 2,
   "requires": true,
   "packages": {
     "": {
-      "name": "reference-data-framework",
+      "name": "filby",
       "version": "1.0.0",
       "license": "ISC",
       "dependencies": {
@@ -8868,4 +8868,4 @@
       "dev": true
     }
   }
-}
+}
\ No newline at end of file
diff --git a/package.json b/package.json
index 43b8e1f..619ac9f 100644
--- a/package.json
+++ b/package.json
@@ -1,10 +1,13 @@
 {
-  "name": "reference-data-framework",
+  "name": "filby",
   "version": "1.0.0",
-  "description": "A framework for managing slow moving, temporal reference data",
+  "description": "A framework for managing temporal reference data",
   "keywords": [
     "temporal",
-    "reference data",
+    "time",
+    "travelling",
+    "series",
+    "reference",
     "data",
     "database",
     "postgres",
@@ -55,10 +58,10 @@
   },
   "repository": {
     "type": "git",
-    "url": "git+https://github.com/acuminous/reference-data-framework.git"
+    "url": "git+https://github.com/acuminous/filby.git"
   },
   "bugs": {
-    "url": "https://github.com/acuminous/reference-data-framework/issues"
+    "url": "https://github.com/acuminous/filby/issues"
   },
-  "homepage": "https://acuminous.github.io/reference-data-framework"
+  "homepage": "https://acuminous.github.io/filby"
 }
\ No newline at end of file
diff --git a/test/TestFilby.js b/test/TestFilby.js
new file mode 100644
index 0000000..f920365
--- /dev/null
+++ b/test/TestFilby.js
@@ -0,0 +1,35 @@
+const Filby = require('..');
+
+const noop = () => { };
+
+module.exports = class TestFilby extends Filby {
+
+  #nukeCustomObjects;
+
+  constructor(config) {
+    super(config);
+    this.#nukeCustomObjects = config.nukeCustomObjects || noop;
+  }
+
+  async reset() {
+    await this.init();
+    await this.wipe();
+  }
+
+  async wipe() {
+    await this.withTransaction(async (tx) => {
+      await this.#nukeCustomObjects(tx);
+      await this.#wipeRdfData(tx);
+    });
+  }
+
+  async #wipeRdfData(tx) {
+    await tx.query('DELETE FROM fby_notification');
+    await tx.query('DELETE FROM fby_hook');
+    await tx.query('DELETE FROM fby_data_frame');
+    await tx.query('DELETE FROM fby_projection_entity');
+    await tx.query('DELETE FROM fby_entity');
+    await tx.query('DELETE FROM fby_change_set');
+    await tx.query('DELETE FROM fby_projection');
+  }
+};
diff --git a/test/TestReferenceDataFramework.js b/test/TestReferenceDataFramework.js
deleted file mode 100644
index 77c0c9a..0000000
--- a/test/TestReferenceDataFramework.js
+++ /dev/null
@@ -1,35 +0,0 @@
-const ReferenceDataFramework = require('..');
-
-const noop = () => { };
-
-module.exports = class TestReferenceDataFramework extends ReferenceDataFramework {
-
-  #nukeCustomObjects;
-
-  constructor(config) {
-    super(config);
-    this.#nukeCustomObjects = config.nukeCustomObjects || noop;
-  }
-
-  async reset() {
-    await this.init();
-    await this.wipe();
-  }
-
-  async wipe() {
-    await this.withTransaction(async (tx) => {
-      await this.#nukeCustomObjects(tx);
-      await this.#wipeRdfData(tx);
-    });
-  }
-
-  async #wipeRdfData(tx) {
-    await tx.query('DELETE FROM rdf_notification');
-    await tx.query('DELETE FROM rdf_hook');
-    await tx.query('DELETE FROM rdf_data_frame');
-    await tx.query('DELETE FROM rdf_projection_entity');
-    await tx.query('DELETE FROM rdf_entity');
-    await tx.query('DELETE FROM rdf_change_set');
-    await tx.query('DELETE FROM rdf_projection');
-  }
-};
diff --git a/test/api.test.js b/test/api.test.js
index a359f96..b17b386 100644
--- a/test/api.test.js
+++ b/test/api.test.js
@@ -5,13 +5,13 @@ const {
   describe, it, before, beforeEach, after, afterEach,
 } = require('zunit');
 
-const TestReferenceDataFramework = require('./TestReferenceDataFramework');
+const TestFilby = require('./TestFilby');
 
 const config = {
   migrations: 'test',
   database: {
-    user: 'rdf_test',
-    password: 'rdf_test',
+    user: 'fby_test',
+    password: 'fby_test',
   },
   notifications: {
     initialDelay: '0ms',
@@ -28,38 +28,38 @@ const config = {
 
 describe('API', () => {
 
-  let rdf;
+  let filby;
 
   before(async () => {
-    rdf = new TestReferenceDataFramework(config);
-    await rdf.reset();
+    filby = new TestFilby(config);
+    await filby.reset();
   });
 
   beforeEach(async () => {
-    rdf.removeAllListeners();
-    await rdf.wipe();
+    filby.removeAllListeners();
+    await filby.wipe();
   });
 
   afterEach(async () => {
-    await rdf.stopNotifications();
-    rdf.removeAllListeners();
+    await filby.stopNotifications();
+    filby.removeAllListeners();
   });
 
   after(async () => {
-    await rdf.stop();
+    await filby.stop();
   });
 
   describe('Projections', () => {
 
     it('should list projections', async () => {
-      await rdf.withTransaction(async (tx) => {
-        await tx.query(`INSERT INTO rdf_projection VALUES
+      await filby.withTransaction(async (tx) => {
+        await tx.query(`INSERT INTO fby_projection VALUES
           (1, 'VAT Rates', 1),
           (2, 'VAT Rates', 2),
           (3, 'CGT Rates', 1)`);
       });
 
-      const projections = await rdf.getProjections();
+      const projections = await filby.getProjections();
       eq(projections.length, 3);
       deq(projections[0], { id: 1, name: 'VAT Rates', version: 1 });
       deq(projections[1], { id: 2, name: 'VAT Rates', version: 2 });
@@ -67,14 +67,14 @@ describe('API', () => {
     });
 
     it('should get projection by name and version', async () => {
-      await rdf.withTransaction(async (tx) => {
-        await tx.query(`INSERT INTO rdf_projection VALUES
+      await filby.withTransaction(async (tx) => {
+        await tx.query(`INSERT INTO fby_projection VALUES
           (1, 'VAT Rates', 1),
           (2, 'VAT Rates', 2),
           (3, 'CGT Rates', 1)`);
       });
 
-      const projection = await rdf.getProjection('VAT Rates', 2);
+      const projection = await filby.getProjection('VAT Rates', 2);
       deq(projection, { id: 2, name: 'VAT Rates', version: 2 });
     });
   });
@@ -82,27 +82,27 @@ describe('API', () => {
   describe('Change Sets', () => {
 
     it('should list change sets for the given projection', async () => {
-      await rdf.withTransaction(async (tx) => {
-        await tx.query(`INSERT INTO rdf_projection (id, name, version) VALUES
+      await filby.withTransaction(async (tx) => {
+        await tx.query(`INSERT INTO fby_projection (id, name, version) VALUES
           (1, 'VAT Rates', 1),
           (2, 'CGT Rates', 1)`);
-        await tx.query(`INSERT INTO rdf_entity (id, name, version) VALUES
+        await tx.query(`INSERT INTO fby_entity (id, name, version) VALUES
           (1, 'Country', 1),
           (2, 'VAT Rate', 1),
           (3, 'CGT Rate', 1)
         `);
-        await tx.query(`INSERT INTO rdf_projection_entity (projection_id, entity_id) VALUES
+        await tx.query(`INSERT INTO fby_projection_entity (projection_id, entity_id) VALUES
           (1, 1),
           (1, 2),
           (2, 1),
           (2, 3)`);
-        await tx.query(`INSERT INTO rdf_change_set (id, effective, notes) VALUES
+        await tx.query(`INSERT INTO fby_change_set (id, effective, notes) VALUES
           (1, '2020-04-05T00:00:00.000Z', 'Countries'),
           (2, '2020-04-05T00:00:00.000Z', '2020 VAT Rates'),
           (3, '2020-04-05T00:00:00.000Z', '2020 CGT Rates'),
           (4, '2021-04-05T00:00:00.000Z', '2021 VAT Rates'),
           (5, '2021-04-05T00:00:00.000Z', '2021 CGT Rates')`);
-        await tx.query(`INSERT INTO rdf_data_frame (change_set_id, entity_id, action) VALUES
+        await tx.query(`INSERT INTO fby_data_frame (change_set_id, entity_id, action) VALUES
           (1, 1, 'POST'),
           (2, 2, 'POST'),
           (3, 3, 'POST'),
@@ -110,8 +110,8 @@ describe('API', () => {
           (5, 3, 'POST')`);
       });
 
-      const projection = await rdf.getProjection('VAT Rates', 1);
-      const changelog = (await rdf.getChangeLog(projection)).map(({ id, effective, notes }) => ({ id, effective: effective.toISOString(), notes }));
+      const projection = await filby.getProjection('VAT Rates', 1);
+      const changelog = (await filby.getChangeLog(projection)).map(({ id, effective, notes }) => ({ id, effective: effective.toISOString(), notes }));
 
       eq(changelog.length, 3);
       deq(changelog[0], { id: 1, effective: '2020-04-05T00:00:00.000Z', notes: 'Countries' });
@@ -120,39 +120,39 @@ describe('API', () => {
     });
 
     it('should dedupe change sets', async () => {
-      await rdf.withTransaction(async (tx) => {
-        await tx.query(`INSERT INTO rdf_projection (id, name, version) VALUES
+      await filby.withTransaction(async (tx) => {
+        await tx.query(`INSERT INTO fby_projection (id, name, version) VALUES
           (1, 'VAT Rates', 1)`);
-        await tx.query(`INSERT INTO rdf_entity (id, name, version) VALUES
+        await tx.query(`INSERT INTO fby_entity (id, name, version) VALUES
           (1, 'Country', 1),
           (2, 'VAT Rate', 1)
         `);
-        await tx.query(`INSERT INTO rdf_projection_entity (projection_id, entity_id) VALUES
+        await tx.query(`INSERT INTO fby_projection_entity (projection_id, entity_id) VALUES
           (1, 1),
           (1, 2)`);
-        await tx.query(`INSERT INTO rdf_change_set (id, effective, notes) VALUES
+        await tx.query(`INSERT INTO fby_change_set (id, effective, notes) VALUES
           (1, '2020-04-05T00:00:00.000Z', 'Everything')`);
-        await tx.query(`INSERT INTO rdf_data_frame (change_set_id, entity_id, action) VALUES
+        await tx.query(`INSERT INTO fby_data_frame (change_set_id, entity_id, action) VALUES
           (1, 1, 'POST'),
           (1, 2, 'POST'),
           (1, 2, 'POST')`);
       });
 
-      const projection = await rdf.getProjection('VAT Rates', 1);
-      const changelog = (await rdf.getChangeLog(projection)).map(({ id, effective, notes }) => ({ id, effective: effective.toISOString(), notes }));
+      const projection = await filby.getProjection('VAT Rates', 1);
+      const changelog = (await filby.getChangeLog(projection)).map(({ id, effective, notes }) => ({ id, effective: effective.toISOString(), notes }));
       eq(changelog.length, 1);
       deq(changelog[0], { id: 1, effective: '2020-04-05T00:00:00.000Z', notes: 'Everything' });
     });
 
     it('should get change set by id', async () => {
-      await rdf.withTransaction(async (tx) => {
-        await tx.query(`INSERT INTO rdf_change_set (id, effective, notes) VALUES
+      await filby.withTransaction(async (tx) => {
+        await tx.query(`INSERT INTO fby_change_set (id, effective, notes) VALUES
           (1, '2020-04-05T00:00:00.000Z', 'Countries'),
           (2, '2020-04-05T00:00:00.000Z', '2020 VAT Rates'),
           (3, '2020-04-05T00:00:00.000Z', '2020 CGT Rates')`);
       });
 
-      const changeSet = await rdf.getChangeSet(2);
+      const changeSet = await filby.getChangeSet(2);
       eq(changeSet.id, 2);
       eq(changeSet.effective.toISOString(), '2020-04-05T00:00:00.000Z');
       eq(changeSet.notes, '2020 VAT Rates');
diff --git a/test/docker-compose.yaml b/test/docker-compose.yaml
index 528e466..0970b6e 100644
--- a/test/docker-compose.yaml
+++ b/test/docker-compose.yaml
@@ -3,10 +3,10 @@ version: '3.8'
 services:
   postgres:
     image: postgres:16-alpine
-    container_name: rdf_test
+    container_name: fby_test
     environment:
-      POSTGRES_DB: rdf_test
-      POSTGRES_USER: rdf_test
-      POSTGRES_PASSWORD: rdf_test
+      POSTGRES_DB: fby_test
+      POSTGRES_USER: fby_test
+      POSTGRES_PASSWORD: fby_test
     ports:
       - "5432:5432"
diff --git a/test/dsl.test.js b/test/dsl.test.js
index 07d9741..c20ae4b 100644
--- a/test/dsl.test.js
+++ b/test/dsl.test.js
@@ -7,13 +7,13 @@ const {
   describe, it, before, beforeEach, after, afterEach,
 } = require('zunit');
 
-const TestReferenceDataFramework = require('./TestReferenceDataFramework');
+const TestFilby = require('./TestFilby');
 
 const config = {
   migrations: 'test/dsl',
   database: {
-    user: 'rdf_test',
-    password: 'rdf_test',
+    user: 'fby_test',
+    password: 'fby_test',
   },
   notifications: {
     initialDelay: '0ms',
@@ -33,21 +33,21 @@ const config = {
 
 describe('DSL', () => {
 
-  let rdf;
+  let filby;
 
   before(async () => {
     deleteMigrations();
-    rdf = new TestReferenceDataFramework(config);
-    await rdf.reset();
+    filby = new TestFilby(config);
+    await filby.reset();
   });
 
   beforeEach(async () => {
     deleteMigrations();
-    await rdf.wipe();
+    await filby.wipe();
   });
 
   after(async () => {
-    await rdf.stop();
+    await filby.stop();
   });
 
   describe('Projections', () => {
@@ -71,7 +71,7 @@ describe('DSL', () => {
           - entity: VAT Rate
             version: 1
         `);
-      const { rows: projections } = await rdf.withTransaction((tx) => tx.query('SELECT name, version FROM rdf_projection'));
+      const { rows: projections } = await filby.withTransaction((tx) => tx.query('SELECT name, version FROM fby_projection'));
 
       eq(projections.length, 1);
       deq(projections[0], { name: 'VAT Rates', version: 1 });
@@ -164,7 +164,7 @@ describe('DSL', () => {
           - type
       `);
 
-      const { rows: entities } = await rdf.withTransaction((tx) => tx.query('SELECT name, version FROM rdf_entity'));
+      const { rows: entities } = await filby.withTransaction((tx) => tx.query('SELECT name, version FROM fby_entity'));
 
       eq(entities.length, 1);
       deq(entities[0], { name: 'VAT Rate', version: 1 });
@@ -509,10 +509,10 @@ describe('DSL', () => {
               rate: 0
       `);
 
-      const projection = await rdf.getProjection('VAT Rates', 1);
-      const changeLog = await rdf.getChangeLog(projection);
+      const projection = await filby.getProjection('VAT Rates', 1);
+      const changeLog = await filby.getChangeLog(projection);
 
-      await rdf.withTransaction(async (tx) => {
+      await filby.withTransaction(async (tx) => {
         const { rows: aggregate1 } = await tx.query('SELECT * FROM get_vat_rate_v1_aggregate($1) ORDER BY rate DESC', [changeLog[0].id]);
         eq(aggregate1.length, 3);
         deq(aggregate1[0], { type: 'standard', rate: 0.10 });
@@ -614,10 +614,10 @@ describe('DSL', () => {
             - type: zero
       `);
 
-      const projection = await rdf.getProjection('VAT Rates', 1);
-      const changeLog = await rdf.getChangeLog(projection);
+      const projection = await filby.getProjection('VAT Rates', 1);
+      const changeLog = await filby.getChangeLog(projection);
 
-      await rdf.withTransaction(async (tx) => {
+      await filby.withTransaction(async (tx) => {
         const { rows: aggregate1 } = await tx.query('SELECT * FROM get_vat_rate_v1_aggregate($1) ORDER BY rate DESC', [changeLog[0].id]);
         eq(aggregate1.length, 3);
         deq(aggregate1[0], { type: 'standard', rate: 0.10 });
@@ -647,7 +647,7 @@ describe('DSL', () => {
           - type
       `);
 
-      const { rows: entities } = await rdf.withTransaction((tx) => tx.query('SELECT name, version FROM rdf_entity'));
+      const { rows: entities } = await filby.withTransaction((tx) => tx.query('SELECT name, version FROM fby_entity'));
 
       eq(entities.length, 1);
       deq(entities[0], { name: 'VAT Rate', version: 1 });
@@ -678,7 +678,7 @@ describe('DSL', () => {
         }
       `);
 
-      const { rows: entities } = await rdf.withTransaction((tx) => tx.query('SELECT name, version FROM rdf_entity'));
+      const { rows: entities } = await filby.withTransaction((tx) => tx.query('SELECT name, version FROM fby_entity'));
 
       eq(entities.length, 1);
       deq(entities[0], { name: 'VAT Rate', version: 1 });
@@ -686,11 +686,11 @@ describe('DSL', () => {
 
     it('supports SQL', async (t) => {
       await applySql(t.name, `
-        INSERT INTO rdf_entity (id, name, version) VALUES
+        INSERT INTO fby_entity (id, name, version) VALUES
         (1, 'VAT Rate', 1);
       `);
 
-      const { rows: entities } = await rdf.withTransaction((tx) => tx.query('SELECT name, version FROM rdf_entity'));
+      const { rows: entities } = await filby.withTransaction((tx) => tx.query('SELECT name, version FROM fby_entity'));
 
       eq(entities.length, 1);
       deq(entities[0], { name: 'VAT Rate', version: 1 });
@@ -732,7 +732,7 @@ describe('DSL', () => {
         - event: Any Change
       `);
 
-      const { rows: hooks } = await rdf.withTransaction((tx) => tx.query('SELECT name, version, event FROM rdf_hook h LEFT JOIN rdf_projection p ON h.projection_id = p.id'));
+      const { rows: hooks } = await filby.withTransaction((tx) => tx.query('SELECT name, version, event FROM fby_hook h LEFT JOIN fby_projection p ON h.projection_id = p.id'));
 
       eq(hooks.length, 2);
       deq(hooks[0], { name: 'VAT Rates', version: 1, event: 'VAT Rates Change' });
@@ -787,7 +787,7 @@ describe('DSL', () => {
 
   async function apply(name, script, extension) {
     fs.writeFileSync(path.join(__dirname, 'dsl', `001.${name.replace(/ /g, '-')}.${extension}`), script, { encoding: 'utf-8' });
-    return rdf.init();
+    return filby.init();
   }
 
   function deleteMigrations() {
diff --git a/test/notifications.test.js b/test/notifications.test.js
index ae39e2d..f27a3be 100644
--- a/test/notifications.test.js
+++ b/test/notifications.test.js
@@ -5,13 +5,13 @@ const {
   describe, it, before, beforeEach, after, afterEach,
 } = require('zunit');
 
-const TestReferenceDataFramework = require('./TestReferenceDataFramework');
+const TestFilby = require('./TestFilby');
 
 const config = {
   migrations: 'test',
   database: {
-    user: 'rdf_test',
-    password: 'rdf_test',
+    user: 'fby_test',
+    password: 'fby_test',
   },
   notifications: {
     initialDelay: '0ms',
@@ -26,83 +26,83 @@ const config = {
 
 describe('Notifications', () => {
 
-  let rdf;
+  let filby;
 
   before(async () => {
-    rdf = new TestReferenceDataFramework(config);
-    await rdf.reset();
+    filby = new TestFilby(config);
+    await filby.reset();
   });
 
   beforeEach(async () => {
-    rdf.removeAllListeners();
-    await rdf.wipe();
+    filby.removeAllListeners();
+    await filby.wipe();
   });
 
   afterEach(async () => {
-    await rdf.stopNotifications();
-    rdf.removeAllListeners();
+    await filby.stopNotifications();
+    filby.removeAllListeners();
   });
 
   after(async () => {
-    await rdf.stop();
+    await filby.stop();
   });
 
   it('should notify interested parties of projection changes', async (t, done) => {
-    await rdf.withTransaction(async (tx) => {
-      await tx.query(`INSERT INTO rdf_projection (id, name, version) VALUES
+    await filby.withTransaction(async (tx) => {
+      await tx.query(`INSERT INTO fby_projection (id, name, version) VALUES
           (1, 'VAT Rates', 1),
           (2, 'CGT Rates', 1)`);
-      await tx.query(`INSERT INTO rdf_hook (id, projection_id, event) VALUES
+      await tx.query(`INSERT INTO fby_hook (id, projection_id, event) VALUES
           (1, 1, 'VAT Rate Changed'),
           (2, 2, 'CGT Rate Changed')`);
-      await tx.query(`INSERT INTO rdf_notification (hook_id, projection_id, scheduled_for) VALUES
+      await tx.query(`INSERT INTO fby_notification (hook_id, projection_id, scheduled_for) VALUES
           (1, 1, now())`);
     });
 
-    rdf.once('VAT Rate Changed', ({ event, projection }) => {
+    filby.once('VAT Rate Changed', ({ event, projection }) => {
       eq(event, 'VAT Rate Changed');
       deq(projection, { name: 'VAT Rates', version: 1 });
       done();
     });
 
-    rdf.startNotifications();
+    filby.startNotifications();
   });
 
   it('should not redeliver successful notifications', async (t, done) => {
-    await rdf.withTransaction(async (tx) => {
-      await tx.query(`INSERT INTO rdf_projection (id, name, version) VALUES
+    await filby.withTransaction(async (tx) => {
+      await tx.query(`INSERT INTO fby_projection (id, name, version) VALUES
           (1, 'VAT Rates', 1),
           (2, 'CGT Rates', 1)`);
-      await tx.query(`INSERT INTO rdf_hook (id, projection_id, event) VALUES
+      await tx.query(`INSERT INTO fby_hook (id, projection_id, event) VALUES
           (1, 1, 'VAT Rate Changed'),
           (2, 2, 'CGT Rate Changed')`);
-      await tx.query(`INSERT INTO rdf_notification (hook_id, projection_id, scheduled_for) VALUES
+      await tx.query(`INSERT INTO fby_notification (hook_id, projection_id, scheduled_for) VALUES
           (1, 1, now())`);
     });
 
-    rdf.on('VAT Rate Changed', ({ event, projection }) => {
+    filby.on('VAT Rate Changed', ({ event, projection }) => {
       eq(event, 'VAT Rate Changed');
       deq(projection, { name: 'VAT Rates', version: 1 });
       setTimeout(done, 1000);
     });
 
-    rdf.startNotifications();
+    filby.startNotifications();
   });
 
   it('should redeliver unsuccessful notifications up to the maximum number of attempts', async (t, done) => {
-    await rdf.withTransaction(async (tx) => {
-      await tx.query(`INSERT INTO rdf_projection (id, name, version) VALUES
+    await filby.withTransaction(async (tx) => {
+      await tx.query(`INSERT INTO fby_projection (id, name, version) VALUES
           (1, 'VAT Rates', 1),
           (2, 'CGT Rates', 1)`);
-      await tx.query(`INSERT INTO rdf_hook (id, projection_id, event) VALUES
+      await tx.query(`INSERT INTO fby_hook (id, projection_id, event) VALUES
           (1, 1, 'VAT Rate Changed'),
           (2, 2, 'CGT Rate Changed')`);
-      await tx.query(`INSERT INTO rdf_notification (hook_id, projection_id, scheduled_for) VALUES
+      await tx.query(`INSERT INTO fby_notification (hook_id, projection_id, scheduled_for) VALUES
           (1, 1, now())`);
     });
 
     let attempt = 0;
-    rdf.on('VAT Rate Changed', async () => {
+    filby.on('VAT Rate Changed', async () => {
       attempt++;
       throw new Error('Oh Noes!');
     });
@@ -112,31 +112,31 @@ describe('Notifications', () => {
       done();
     }, 500);
 
-    rdf.startNotifications();
+    filby.startNotifications();
   });
 
   it('should capture the last delivery error', async (t, done) => {
     const checkpoint = new Date();
 
-    await rdf.withTransaction(async (tx) => {
-      await tx.query(`INSERT INTO rdf_projection (id, name, version) VALUES
+    await filby.withTransaction(async (tx) => {
+      await tx.query(`INSERT INTO fby_projection (id, name, version) VALUES
           (1, 'VAT Rates', 1),
           (2, 'CGT Rates', 1)`);
-      await tx.query(`INSERT INTO rdf_hook (id, projection_id, event) VALUES
+      await tx.query(`INSERT INTO fby_hook (id, projection_id, event) VALUES
           (1, 1, 'VAT Rate Changed'),
           (2, 2, 'CGT Rate Changed')`);
-      await tx.query(`INSERT INTO rdf_notification (hook_id, projection_id, scheduled_for) VALUES
+      await tx.query(`INSERT INTO fby_notification (hook_id, projection_id, scheduled_for) VALUES
           (1, 1, now())`);
     });
 
     let attempt = 0;
-    rdf.on('VAT Rate Changed', () => {
+    filby.on('VAT Rate Changed', () => {
       attempt++;
       throw new Error(`Oh Noes! ${attempt}`);
     });
 
     setTimeout(async () => {
-      const { rows: notifications } = await rdf.withTransaction(async (tx) => tx.query('SELECT * FROM rdf_notification'));
+      const { rows: notifications } = await filby.withTransaction(async (tx) => tx.query('SELECT * FROM fby_notification'));
 
       eq(notifications.length, 1);
       eq(notifications[0].status, 'PENDING');
@@ -145,6 +145,6 @@ describe('Notifications', () => {
       done();
     }, 500);
 
-    rdf.startNotifications();
+    filby.startNotifications();
   });
 });
diff --git a/test/schema.test.js b/test/schema.test.js
index 6c33962..f6b958b 100644
--- a/test/schema.test.js
+++ b/test/schema.test.js
@@ -5,13 +5,13 @@ const {
   describe, it, before, beforeEach, after, afterEach,
 } = require('zunit');
 
-const TestReferenceDataFramework = require('./TestReferenceDataFramework');
+const TestFilby = require('./TestFilby');
 
 const config = {
   migrations: 'test',
   database: {
-    user: 'rdf_test',
-    password: 'rdf_test',
+    user: 'fby_test',
+    password: 'fby_test',
   },
   notifications: {
     initialDelay: '0ms',
@@ -23,36 +23,36 @@ const config = {
 
 describe('Schema', () => {
 
-  let rdf;
+  let filby;
 
   before(async () => {
-    rdf = new TestReferenceDataFramework(config);
-    await rdf.reset();
+    filby = new TestFilby(config);
+    await filby.reset();
   });
 
   beforeEach(async () => {
-    await rdf.wipe();
+    await filby.wipe();
   });
 
   after(async () => {
-    await rdf.stop();
+    await filby.stop();
   });
 
   describe('Projections', () => {
     it('should prevent duplicate projections', async () => {
 
-      await rdf.withTransaction(async (tx) => {
-        await tx.query("INSERT INTO rdf_projection (name, version) VALUES ('NOT DUPLICATE', 1)");
-        await tx.query("INSERT INTO rdf_projection (name, version) VALUES ('NOT DUPLICATE', 2)");
+      await filby.withTransaction(async (tx) => {
+        await tx.query("INSERT INTO fby_projection (name, version) VALUES ('NOT DUPLICATE', 1)");
+        await tx.query("INSERT INTO fby_projection (name, version) VALUES ('NOT DUPLICATE', 2)");
 
-        await tx.query("INSERT INTO rdf_projection (name, version) VALUES ('NOT DUPLICATE A', 1)");
-        await tx.query("INSERT INTO rdf_projection (name, version) VALUES ('NOT DUPLICATE B', 1)");
+        await tx.query("INSERT INTO fby_projection (name, version) VALUES ('NOT DUPLICATE A', 1)");
+        await tx.query("INSERT INTO fby_projection (name, version) VALUES ('NOT DUPLICATE B', 1)");
       });
 
       await rejects(async () => {
-        await rdf.withTransaction(async (tx) => {
-          await tx.query("INSERT INTO rdf_projection (name, version) VALUES ('DUPLICATE', 1)");
-          await tx.query("INSERT INTO rdf_projection (name, version) VALUES ('DUPLICATE', 1)");
+        await filby.withTransaction(async (tx) => {
+          await tx.query("INSERT INTO fby_projection (name, version) VALUES ('DUPLICATE', 1)");
+          await tx.query("INSERT INTO fby_projection (name, version) VALUES ('DUPLICATE', 1)");
         });
       }, (err) => {
         eq(err.code, '23505');
@@ -62,8 +62,8 @@ describe('Schema', () => {
 
     it('should enforce projections are named', async () => {
       await rejects(async () => {
-        await rdf.withTransaction(async (tx) => {
-          await tx.query('INSERT INTO rdf_projection (name, version) VALUES (NULL, 1)');
+        await filby.withTransaction(async (tx) => {
+          await tx.query('INSERT INTO fby_projection (name, version) VALUES (NULL, 1)');
         });
       }, (err) => {
         eq(err.code, '23502');
@@ -73,8 +73,8 @@ describe('Schema', () => {
 
     it('should enforce projections are versioned', async () => {
       await rejects(async () => {
-        await rdf.withTransaction(async (tx) => {
-          await tx.query("INSERT INTO rdf_projection (name, version) VALUES ('OK', NULL)");
+        await filby.withTransaction(async (tx) => {
+          await tx.query("INSERT INTO fby_projection (name, version) VALUES ('OK', NULL)");
         });
       }, (err) => {
         eq(err.code, '23502');
@@ -86,16 +86,16 @@ describe('Schema', () => {
   describe('Change Sets', () => {
     it('should prevent duplicate change sets', async () => {
 
-      await rdf.withTransaction(async (tx) => {
-        await tx.query(`INSERT INTO rdf_change_set (id, effective) VALUES
+      await filby.withTransaction(async (tx) => {
+        await tx.query(`INSERT INTO fby_change_set (id, effective) VALUES
           (1, '2023-01-01T00:00:00.000Z'),
           (2, '2023-01-01T00:00:00.000Z')
         `);
       });
 
       await rejects(async () => {
-        await rdf.withTransaction(async (tx) => {
-          await tx.query(`INSERT INTO rdf_change_set (id, effective) VALUES
+        await filby.withTransaction(async (tx) => {
+          await tx.query(`INSERT INTO fby_change_set (id, effective) VALUES
             (3, '2023-01-01T00:00:00.000Z'),
             (3, '2023-01-01T00:00:00.000Z')`);
         });
@@ -107,8 +107,8 @@ describe('Schema', () => {
 
     it('should enforce change sets have effective dates', async () => {
       await rejects(async () => {
-        await rdf.withTransaction(async (tx) => {
-          await tx.query('INSERT INTO rdf_change_set (id, effective) VALUES (1, NULL)');
+        await filby.withTransaction(async (tx) => {
+          await tx.query('INSERT INTO fby_change_set (id, effective) VALUES (1, NULL)');
         });
       }, (err) => {
         eq(err.code, '23502');
@@ -119,22 +119,22 @@ describe('Schema', () => {
     it('should default last modified date to now', async () => {
       const checkpoint = new Date();
 
-      await rdf.withTransaction(async (tx) => {
-        await tx.query(`INSERT INTO rdf_change_set (id, effective, notes) VALUES
+      await filby.withTransaction(async (tx) => {
+        await tx.query(`INSERT INTO fby_change_set (id, effective, notes) VALUES
           (1, '2020-04-05T00:00:00.000Z', 'Countries')`);
       });
 
-      const changeSet = await rdf.getChangeSet(1);
+      const changeSet = await filby.getChangeSet(1);
       ok(changeSet.lastModified >= checkpoint);
     });
 
     it('should default entity tag to random hex', async () => {
-      await rdf.withTransaction(async (tx) => {
-        await tx.query(`INSERT INTO rdf_change_set (id, effective, notes) VALUES
+      await filby.withTransaction(async (tx) => {
+        await tx.query(`INSERT INTO fby_change_set (id, effective, notes) VALUES
           (1, '2020-04-05T00:00:00.000Z', 'Countries')`);
       });
 
-      const changeSet = await rdf.getChangeSet(1);
+      const changeSet = await filby.getChangeSet(1);
       match(changeSet.entityTag, /^[a-f|0-9]{20}$/);
     });
   });