diff --git a/.credo.exs b/.credo.exs
index 446acb5bc6..2dfa19776b 100644
--- a/.credo.exs
+++ b/.credo.exs
@@ -168,18 +168,17 @@
{Credo.Check.Warning.WrongTestFileExtension, []},
# Controversial but included
{Credo.Check.Consistency.MultiAliasImportRequireUse, []},
- {Credo.Check.Design.DuplicatedCode, []},
{Credo.Check.Readability.MultiAlias, []},
{Credo.Check.Readability.SeparateAliasRequire, []},
- {Credo.Check.Readability.StrictModuleLayout, []},
- # Checks scheduled for next check update (opt-in for now, will bump exit_status soon)
- {Credo.Check.Consistency.UnusedVariableNames, [exit_status: 0]}
+ {Credo.Check.Readability.StrictModuleLayout, []}
],
disabled: [
#
# Controversial and experimental checks (opt-in, just move the check to `:enabled`
# and be sure to use `mix credo --strict` to see low priority checks)
#
+ {Credo.Check.Consistency.UnusedVariableNames, [exit_status: 0]},
+ {Credo.Check.Design.DuplicatedCode, []},
{Credo.Check.Design.SkipTestWithoutComment, []},
{Credo.Check.Readability.AliasAs, []},
{Credo.Check.Readability.BlockPipe, []},
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 8ce343d8c4..2ab13a0fb2 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -17,6 +17,11 @@ and this project adheres to
### Added
+- Adds a UI for managing collections
+ [#2567](https://github.com/OpenFn/lightning/issues/2567)
+- Introduces collections, a programatic workflow data sharing resource.
+ [#2551](https://github.com/OpenFn/lightning/issues/2551)
+
### Changed
### Fixed
diff --git a/Dockerfile b/Dockerfile
index 1d9ee7a1d8..fdfdf80891 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -10,7 +10,7 @@
# - https://hub.docker.com/r/hexpm/elixir/tags - for the build image
# - https://hub.docker.com/_/debian?tab=tags&page=1&name=bullseye-20210902-slim - for the release image
# - https://pkgs.org/ - resource for finding needed packages
-# - Ex: hexpm/elixir:1.13.2-erlang-24.2.1-debian-bullseye-20210902-slim
+# - Ex: hexpm/elixir:1.16.2-erlang-26.2.5-debian-bookworm-20240513
#
ARG ELIXIR_VERSION=1.16.2
ARG OTP_VERSION=26.2.5
diff --git a/assets/package-lock.json b/assets/package-lock.json
index a92abc81a8..d4b0a5343d 100644
--- a/assets/package-lock.json
+++ b/assets/package-lock.json
@@ -34,7 +34,7 @@
"zustand": "^4.3.7"
},
"devDependencies": {
- "@openfn/ws-worker": "^1.7.0",
+ "@openfn/ws-worker": "^1.8.0",
"@types/marked": "^4.0.8",
"@types/react": "^18.0.15",
"@types/react-dom": "^18.0.6",
@@ -568,9 +568,9 @@
}
},
"node_modules/@openfn/compiler": {
- "version": "0.3.3",
- "resolved": "https://registry.npmjs.org/@openfn/compiler/-/compiler-0.3.3.tgz",
- "integrity": "sha512-aL7dgUDhz6jrRpnsuk1YJyud7vx3TGIzfDSoNP4J6T1l542jF7fGQ46pJj72Jejl4kycM5MYRKvhtAKPfeFZTw==",
+ "version": "0.4.0",
+ "resolved": "https://registry.npmjs.org/@openfn/compiler/-/compiler-0.4.0.tgz",
+ "integrity": "sha512-oMBlaippanMkbWLsEZssxoz+OFOS45Ts1++jCf7fSS3KJUXjeuIGK+xSQPuXRcgRwL81lCbkJsF/MtBeR2a2eQ==",
"dev": true,
"dependencies": {
"@openfn/describe-package": "0.1.2",
@@ -600,16 +600,16 @@
}
},
"node_modules/@openfn/engine-multi": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/@openfn/engine-multi/-/engine-multi-1.3.0.tgz",
- "integrity": "sha512-ZZI9zc/zIjt1VSRLysTqhvHubb2WQqVjtFiDht/p8umW9QIf2GcTLfCfaZYFgOsDlBxJpzoXWCu/tYcg+L40SQ==",
+ "version": "1.4.1",
+ "resolved": "https://registry.npmjs.org/@openfn/engine-multi/-/engine-multi-1.4.1.tgz",
+ "integrity": "sha512-mzoK7iYaNLGmACE30iCQzK2/qt56wU7+60fMMx1PrQA4sG4hxCMqnnrYnF7qjTFkwuNH1++4RfkcxOhVd8fSPg==",
"dev": true,
"dependencies": {
- "@openfn/compiler": "0.3.3",
+ "@openfn/compiler": "0.4.0",
"@openfn/language-common": "2.0.0-rc3",
"@openfn/lexicon": "^1.1.0",
"@openfn/logger": "1.0.2",
- "@openfn/runtime": "1.4.2",
+ "@openfn/runtime": "1.5.1",
"fast-safe-stringify": "^2.1.1"
}
},
@@ -641,9 +641,9 @@
}
},
"node_modules/@openfn/runtime": {
- "version": "1.4.2",
- "resolved": "https://registry.npmjs.org/@openfn/runtime/-/runtime-1.4.2.tgz",
- "integrity": "sha512-JLrOg3iUZN8zSlTv9IQMjE5RAPqvG0+bKvnrRrgLFM08JpTuclGzQTnzi+019lTA5B2r/zScKen1gqH6KbMSTQ==",
+ "version": "1.5.1",
+ "resolved": "https://registry.npmjs.org/@openfn/runtime/-/runtime-1.5.1.tgz",
+ "integrity": "sha512-Gy91JcjEqcN/u9xRY9Ur4FQJ8c2xseFibE5n+oDqHhb8bti6QWlOlwrywkfg6rs19FcNeveErWflZceimkgSFA==",
"dev": true,
"dependencies": {
"@openfn/logger": "1.0.2",
@@ -652,16 +652,16 @@
}
},
"node_modules/@openfn/ws-worker": {
- "version": "1.7.0",
- "resolved": "https://registry.npmjs.org/@openfn/ws-worker/-/ws-worker-1.7.0.tgz",
- "integrity": "sha512-0vCu9pNvsVE/EwhfyW5I04cHHk7OUVbTavoZgbKEViIeIQL0F8pNes5PD1a/bBW7NdYuMLEd74ybLZjlmEeIUw==",
+ "version": "1.8.1",
+ "resolved": "https://registry.npmjs.org/@openfn/ws-worker/-/ws-worker-1.8.1.tgz",
+ "integrity": "sha512-4DkO7gn67DlwKQhT85UCLM5A8Ac5bUR9tdwswEUSLgfeWK5/VJpwNAVJzg2EBEjUFx/+MZb+CfXZ63Q8XhLMTQ==",
"dev": true,
"dependencies": {
"@koa/router": "^12.0.0",
- "@openfn/engine-multi": "1.3.0",
+ "@openfn/engine-multi": "1.4.1",
"@openfn/lexicon": "^1.1.0",
"@openfn/logger": "1.0.2",
- "@openfn/runtime": "1.4.2",
+ "@openfn/runtime": "1.5.1",
"@types/koa-logger": "^3.1.2",
"@types/ws": "^8.5.6",
"fast-safe-stringify": "^2.1.1",
diff --git a/assets/package.json b/assets/package.json
index b7f0e6f6ff..d7cc4e7bcc 100644
--- a/assets/package.json
+++ b/assets/package.json
@@ -36,7 +36,7 @@
"zustand": "^4.3.7"
},
"devDependencies": {
- "@openfn/ws-worker": "^1.7.0",
+ "@openfn/ws-worker": "^1.8.0",
"@types/marked": "^4.0.8",
"@types/react": "^18.0.15",
"@types/react-dom": "^18.0.6",
diff --git a/config/config.exs b/config/config.exs
index b7bb8d38cd..30c18fcfff 100644
--- a/config/config.exs
+++ b/config/config.exs
@@ -144,6 +144,8 @@ config :lightning, :default_retention_period, nil
config :lightning, Lightning.Runtime.RuntimeManager, start: false
+config :lightning, LightningWeb.CollectionsController, stream_limit: 1_000
+
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{config_env()}.exs"
diff --git a/config/test.exs b/config/test.exs
index 1300902c31..bf91361612 100644
--- a/config/test.exs
+++ b/config/test.exs
@@ -161,3 +161,5 @@ config :lightning, :github_app,
FaFp+DyAe+b4nDwuJaW2LURbr8AEZga7oQj0uYxcYw==
-----END RSA PRIVATE KEY-----
"""
+
+config :lightning, LightningWeb.CollectionsController, stream_limit: 50
diff --git a/lib/lightning/accounts.ex b/lib/lightning/accounts.ex
index 9f5b2b543f..7caa061057 100644
--- a/lib/lightning/accounts.ex
+++ b/lib/lightning/accounts.ex
@@ -159,16 +159,22 @@ defmodule Lightning.Accounts do
Raises `Ecto.NoResultsError` if the User does not exist.
+ See `get_user/1`.
+ """
+ def get_user!(id), do: Repo.get!(User, id)
+
+ @doc """
+ Gets a single user.
+
## Examples
- iex> get_user!(123)
+ iex> get_user(123)
%User{}
iex> get_user!(456)
- ** (Ecto.NoResultsError)
-
+ nil
"""
- def get_user!(id), do: Repo.get!(User, id)
+ def get_user(id), do: Repo.get(User, id)
@doc """
Gets a single token.
@@ -696,8 +702,8 @@ defmodule Lightning.Accounts do
Gets the user with the given signed token.
"""
def get_user_by_session_token(token) do
- {:ok, query} = UserToken.verify_token_query(token, "session")
- Repo.one(query)
+ UserToken.verify_token_query(token, "session")
+ |> Repo.one()
end
@doc """
@@ -723,8 +729,7 @@ defmodule Lightning.Accounts do
Checks if the given sudo token for the user is valid
"""
def sudo_session_token_valid?(user, token) do
- {:ok, token_query} =
- UserToken.verify_token_query(token, "sudo_session")
+ token_query = UserToken.verify_token_query(token, "sudo_session")
query = from t in token_query, where: t.user_id == ^user.id
Repo.exists?(query)
@@ -770,8 +775,8 @@ defmodule Lightning.Accounts do
Gets the user with the given signed token.
"""
def get_user_by_auth_token(token) do
- {:ok, query} = UserToken.verify_token_query(token, "auth")
- Repo.one(query)
+ UserToken.verify_token_query(token, "auth")
+ |> Repo.one()
end
@doc """
@@ -796,9 +801,19 @@ defmodule Lightning.Accounts do
@doc """
Gets the user with the given signed token.
"""
+ def get_user_by_api_token(claims) when is_map(claims) do
+ case claims do
+ %{sub: "user:" <> id} ->
+ Repo.get(User, id)
+
+ _ ->
+ nil
+ end
+ end
+
def get_user_by_api_token(token) do
- {:ok, query} = UserToken.verify_token_query(token, "api")
- Repo.one(query)
+ UserToken.verify_token_query(token, "api")
+ |> Repo.one()
end
@doc """
diff --git a/lib/lightning/accounts/user_token.ex b/lib/lightning/accounts/user_token.ex
index d079c35108..f4d8a7895e 100644
--- a/lib/lightning/accounts/user_token.ex
+++ b/lib/lightning/accounts/user_token.ex
@@ -18,7 +18,6 @@ defmodule Lightning.Accounts.UserToken do
"""
use Lightning.Schema
- use Joken.Config
import Ecto.Query
@@ -47,15 +46,6 @@ defmodule Lightning.Accounts.UserToken do
timestamps updated_at: false
end
- def token_config do
- default_claims(skip: [:exp])
- |> add_claim(
- "my_key",
- fn -> "My custom claim" end,
- &(&1 == "My custom claim")
- )
- end
-
@doc """
Generates a token that will be stored in a signed place,
such as session or cookie. As they are signed, those
@@ -65,9 +55,10 @@ defmodule Lightning.Accounts.UserToken do
{binary(), Ecto.Changeset.t(%__MODULE__{})}
def build_token(user, "api" = context) do
token =
- Joken.generate_and_sign!(default_claims(skip: [:exp]), %{
- "user_id" => user.id
- })
+ Lightning.Tokens.PersonalAccessToken.generate_and_sign!(
+ %{"sub" => "user:#{user.id}"},
+ Lightning.Config.token_signer()
+ )
{token,
changeset(%__MODULE__{}, %{token: token, context: context, user_id: user.id})}
@@ -103,48 +94,36 @@ defmodule Lightning.Accounts.UserToken do
not expired (after @auth_validity_in_seconds or @session_validity_in_days).
"""
def verify_token_query(token, "auth" = context) do
- query =
- from(token in token_and_context_query(token, context),
- join: user in assoc(token, :user),
- where: token.inserted_at > ago(@auth_validity_in_seconds, "second"),
- select: user
- )
-
- {:ok, query}
+ from(token in token_and_context_query(token, context),
+ join: user in assoc(token, :user),
+ where: token.inserted_at > ago(@auth_validity_in_seconds, "second"),
+ select: user
+ )
end
def verify_token_query(token, "api" = context) do
- query =
- from(token in token_and_context_query(token, context),
- join: user in assoc(token, :user),
- select: user
- )
-
- {:ok, query}
+ from(token in token_and_context_query(token, context),
+ join: user in assoc(token, :user),
+ select: user
+ )
end
def verify_token_query(token, "session" = context) do
- query =
- from(token in token_and_context_query(token, context),
- join: user in assoc(token, :user),
- where: token.inserted_at > ago(@session_validity_in_days, "day"),
- select: user
- )
-
- {:ok, query}
+ from(token in token_and_context_query(token, context),
+ join: user in assoc(token, :user),
+ where: token.inserted_at > ago(@session_validity_in_days, "day"),
+ select: user
+ )
end
def verify_token_query(token, "sudo_session" = context) do
- query =
- from(token in token_and_context_query(token, context),
- join: user in assoc(token, :user),
- where:
- token.inserted_at >
- ago(@sudo_session_validity_in_seconds, "second"),
- select: user
- )
-
- {:ok, query}
+ from(token in token_and_context_query(token, context),
+ join: user in assoc(token, :user),
+ where:
+ token.inserted_at >
+ ago(@sudo_session_validity_in_seconds, "second"),
+ select: user
+ )
end
@doc """
diff --git a/lib/lightning/application.ex b/lib/lightning/application.ex
index 01b970124a..066bc937d3 100644
--- a/lib/lightning/application.ex
+++ b/lib/lightning/application.ex
@@ -126,7 +126,8 @@ defmodule Lightning.Application do
adaptor_service_childspec,
{Lightning.TaskWorker, name: :cli_task_worker},
{Lightning.Runtime.RuntimeManager,
- worker_secret: Lightning.Config.worker_secret()},
+ worker_secret: Lightning.Config.worker_secret(),
+ endpoint: LightningWeb.Endpoint},
{Lightning.KafkaTriggers.Supervisor, type: :supervisor}
# Start a worker by calling: Lightning.Worker.start_link(arg)
# {Lightning.Worker, arg}
diff --git a/lib/lightning/collections.ex b/lib/lightning/collections.ex
new file mode 100644
index 0000000000..e039fa321d
--- /dev/null
+++ b/lib/lightning/collections.ex
@@ -0,0 +1,260 @@
+defmodule Lightning.Collections do
+ @moduledoc """
+ Access to collections of unique key-value pairs shared across multiple workflows.
+ """
+ import Ecto.Query
+
+ alias Lightning.Collections.Collection
+ alias Lightning.Collections.Item
+ alias Lightning.Repo
+
+ @doc """
+ Returns the list of collections with optional ordering and preloading.
+
+ ## Parameters
+
+ - `opts`: A keyword list of options.
+ - `:order_by` (optional): The field by which to order the results. Default is `[asc: :name]`.
+ - `:preload` (optional): A list of associations to preload. Default is `[:project]`.
+
+ ## Examples
+
+ iex> list_collections()
+ [%Collection{}, ...]
+
+ iex> list_collections(order_by: [asc: :inserted_at], preload: [:project, :user])
+ [%Collection{}, ...]
+
+ ## Returns
+
+ - A list of `%Collection{}` structs, preloaded and ordered as specified.
+ """
+ @spec list_collections(keyword()) :: [Collection.t()]
+ def list_collections(opts \\ []) do
+ order_by = Keyword.get(opts, :order_by, asc: :name)
+ preload = Keyword.get(opts, :preload, [:project])
+
+ Repo.all(from(c in Collection, order_by: ^order_by, preload: ^preload))
+ end
+
+ @spec get_collection(String.t()) ::
+ {:ok, Collection.t()} | {:error, :not_found}
+ def get_collection(name) do
+ case Repo.get_by(Collection, name: name) do
+ nil -> {:error, :not_found}
+ collection -> {:ok, collection}
+ end
+ end
+
+ @doc """
+ Creates a new collection with the given attributes.
+
+ ## Parameters
+
+ - `attrs`: A map of attributes to create the collection.
+
+ ## Examples
+
+ iex> create_collection(%{name: "New Collection", description: "Description here"})
+ {:ok, %Collection{}}
+
+ iex> create_collection(%{name: nil})
+ {:error, %Ecto.Changeset{}}
+
+ ## Returns
+
+ - `{:ok, %Collection{}}` on success.
+ - `{:error, %Ecto.Changeset{}}` on failure due to validation errors.
+ """
+ @spec create_collection(map()) ::
+ {:ok, Collection.t()} | {:error, Ecto.Changeset.t()}
+ def create_collection(attrs) do
+ %Collection{}
+ |> Collection.changeset(attrs)
+ |> Repo.insert()
+ end
+
+ @doc """
+ Updates an existing collection with the given attributes.
+
+ ## Parameters
+
+ - `collection`: The existing `%Collection{}` struct to update.
+ - `attrs`: A map of attributes to update the collection.
+
+ ## Examples
+
+ iex> update_collection(collection, %{name: "Updated Name"})
+ {:ok, %Collection{}}
+
+ iex> update_collection(collection, %{name: nil})
+ {:error, %Ecto.Changeset{}}
+
+ ## Returns
+
+ - `{:ok, %Collection{}}` on success.
+ - `{:error, %Ecto.Changeset{}}` on failure due to validation errors.
+ """
+ @spec update_collection(Collection.t(), map()) ::
+ {:ok, Collection.t()} | {:error, Ecto.Changeset.t()}
+ def update_collection(collection, attrs) do
+ collection
+ |> Collection.changeset(attrs)
+ |> Repo.update()
+ end
+
+ @spec create_collection(Ecto.UUID.t(), String.t()) ::
+ {:ok, Collection.t()} | {:error, Ecto.Changeset.t()}
+ def create_collection(project_id, name) do
+ %Collection{}
+ |> Collection.changeset(%{project_id: project_id, name: name})
+ |> Repo.insert()
+ end
+
+ @spec delete_collection(Ecto.UUID.t()) ::
+ {:ok, Collection.t()}
+ | {:error, Ecto.Changeset.t()}
+ | {:error, :not_found}
+ def delete_collection(collection_id) do
+ case Repo.get(Collection, collection_id) do
+ nil -> {:error, :not_found}
+ collection -> Repo.delete(collection)
+ end
+ end
+
+ @spec get(Collection.t(), String.t()) :: Item.t() | nil
+ def get(%{id: collection_id}, key) do
+ Repo.get_by(Item, collection_id: collection_id, key: key)
+ end
+
+ @spec stream_all(Collection.t(), Enum.t()) :: Enum.t()
+ def stream_all(%{id: collection_id}, params \\ %{}) do
+ params = Map.new(params)
+ cursor = Map.get(params, :cursor)
+ limit = Map.fetch!(params, :limit)
+
+ collection_id
+ |> stream_query(cursor, limit)
+ |> filter_by_inserted_at(params)
+ |> Repo.stream()
+ end
+
+ @spec stream_match(Collection.t(), String.t(), Enum.t()) :: Enum.t()
+ def stream_match(
+ %{id: collection_id},
+ pattern,
+ params \\ %{}
+ ) do
+ pattern = format_pattern(pattern)
+ params = Map.new(params)
+ cursor = Map.get(params, :cursor)
+ limit = Map.fetch!(params, :limit)
+
+ collection_id
+ |> stream_query(cursor, limit)
+ |> filter_by_inserted_at(params)
+ |> where([i], like(i.key, ^pattern))
+ |> Repo.stream()
+ end
+
+ @spec put(Collection.t(), String.t(), String.t()) ::
+ :ok | {:error, Ecto.Changeset.t()}
+ def put(%{id: collection_id}, key, value) do
+ %Item{}
+ |> Item.changeset(%{collection_id: collection_id, key: key, value: value})
+ |> Repo.insert(
+ conflict_target: [:collection_id, :key],
+ on_conflict: [set: [value: value, updated_at: DateTime.utc_now()]]
+ )
+ |> then(fn result ->
+ with {:ok, _no_return} <- result, do: :ok
+ end)
+ end
+
+ @spec put_all(Collection.t(), [{String.t(), String.t()}]) ::
+ {:ok, non_neg_integer()} | :error
+ def put_all(%{id: collection_id}, kv_list) do
+ item_list =
+ Enum.with_index(kv_list, fn %{"key" => key, "value" => value},
+ unique_index ->
+ now = DateTime.add(DateTime.utc_now(), unique_index, :microsecond)
+
+ %{
+ collection_id: collection_id,
+ key: key,
+ value: value,
+ inserted_at: now,
+ updated_at: now
+ }
+ end)
+
+ case Repo.insert_all(Item, item_list,
+ conflict_target: [:collection_id, :key],
+ on_conflict: {:replace, [:value, :updated_at]}
+ ) do
+ {n, nil} when n > 0 -> {:ok, n}
+ _error -> :error
+ end
+ end
+
+ @spec delete(Collection.t(), String.t()) :: :ok | {:error, :not_found}
+ def delete(%{id: collection_id}, key) do
+ query =
+ from(i in Item, where: i.collection_id == ^collection_id and i.key == ^key)
+
+ case Repo.delete_all(query) do
+ {0, nil} -> {:error, :not_found}
+ {1, nil} -> :ok
+ end
+ end
+
+ @spec delete_all(Collection.t(), String.t() | nil) :: {:ok, non_neg_integer()}
+ def delete_all(%{id: collection_id}, key_pattern \\ nil) do
+ query =
+ from(i in Item, where: i.collection_id == ^collection_id)
+ |> then(fn query ->
+ case key_pattern do
+ nil -> query
+ pattern -> where(query, [i], like(i.key, ^format_pattern(pattern)))
+ end
+ end)
+
+ with {count, _nil} <- Repo.delete_all(query), do: {:ok, count}
+ end
+
+ defp stream_query(collection_id, cursor, limit) do
+ Item
+ |> where([i], i.collection_id == ^collection_id)
+ |> order_by([i], asc: i.inserted_at)
+ |> limit(^limit)
+ |> then(fn query ->
+ case cursor do
+ nil -> query
+ ts_cursor -> where(query, [i], i.inserted_at > ^ts_cursor)
+ end
+ end)
+ end
+
+ defp filter_by_inserted_at(query, params) do
+ query
+ |> filter_by_created_before(params)
+ |> filter_by_created_after(params)
+ end
+
+ defp filter_by_created_after(query, %{created_after: created_after}),
+ do: where(query, [i], i.inserted_at >= ^created_after)
+
+ defp filter_by_created_after(query, _params), do: query
+
+ defp filter_by_created_before(query, %{created_before: created_before}),
+ do: where(query, [i], i.inserted_at < ^created_before)
+
+ defp filter_by_created_before(query, _params), do: query
+
+ defp format_pattern(pattern) do
+ pattern
+ |> String.replace("\\", "\\\\")
+ |> String.replace("%", "\\%")
+ |> String.replace("*", "%")
+ end
+end
diff --git a/lib/lightning/collections/collection.ex b/lib/lightning/collections/collection.ex
new file mode 100644
index 0000000000..01be284e5e
--- /dev/null
+++ b/lib/lightning/collections/collection.ex
@@ -0,0 +1,37 @@
+defmodule Lightning.Collections.Collection do
+ @moduledoc """
+ Collection referenced by name associated to a project.
+ """
+ use Lightning.Schema
+
+ import Ecto.Changeset
+
+ @type t :: %__MODULE__{
+ id: Ecto.UUID.t(),
+ project_id: Ecto.UUID.t(),
+ name: String.t(),
+ inserted_at: NaiveDateTime.t(),
+ updated_at: NaiveDateTime.t()
+ }
+
+ schema "collections" do
+ field :name, :string
+ belongs_to :project, Lightning.Projects.Project
+ has_many :items, Lightning.Collections.Item
+
+ timestamps()
+ end
+
+ @doc false
+ def changeset(entry, attrs) do
+ entry
+ |> cast(attrs, [:project_id, :name])
+ |> validate_required([:project_id, :name])
+ |> validate_format(:name, ~r/^[a-z0-9]+([\-_.][a-z0-9]+)*$/,
+ message: "Collection name must be URL safe"
+ )
+ |> unique_constraint([:name],
+ message: "A collection with this name already exists"
+ )
+ end
+end
diff --git a/lib/lightning/collections/item.ex b/lib/lightning/collections/item.ex
new file mode 100644
index 0000000000..1ec5f6c755
--- /dev/null
+++ b/lib/lightning/collections/item.ex
@@ -0,0 +1,48 @@
+defmodule Lightning.Collections.Item do
+ @moduledoc """
+ A key value entry of a collection bound to a project.
+ """
+ use Lightning.Schema
+
+ import Ecto.Changeset
+
+ @type t :: %__MODULE__{
+ collection_id: Ecto.UUID.t(),
+ key: String.t(),
+ value: String.t(),
+ inserted_at: DateTime.t(),
+ updated_at: DateTime.t()
+ }
+
+ @primary_key false
+ schema "collections_items" do
+ belongs_to :collection, Lightning.Collections.Collection, primary_key: true
+ field :key, :string, primary_key: true
+ field :value, :string
+
+ timestamps(type: :utc_datetime_usec)
+ end
+
+ @doc false
+ def changeset(entry, attrs) do
+ entry
+ |> cast(attrs, [:collection_id, :key, :value])
+ |> validate_required([:collection_id, :key, :value])
+ |> unique_constraint([:collection_id, :key])
+ |> foreign_key_constraint(:collection_id)
+ end
+
+ defimpl Jason.Encoder, for: __MODULE__ do
+ def encode(item, opts) do
+ Jason.Encode.map(
+ %{
+ key: item.key,
+ value: item.value,
+ created: item.inserted_at,
+ updated: item.updated_at
+ },
+ opts
+ )
+ end
+ end
+end
diff --git a/lib/lightning/config.ex b/lib/lightning/config.ex
index 6c09eacf67..ca675ebaf8 100644
--- a/lib/lightning/config.ex
+++ b/lib/lightning/config.ex
@@ -7,6 +7,26 @@ defmodule Lightning.Config do
@behaviour Lightning.Config
alias Lightning.Services.AdapterHelper
+ @impl true
+ def token_signer do
+ :persistent_term.get({__MODULE__, "token_signer"}, nil)
+ |> case do
+ nil ->
+ pem =
+ Application.get_env(:lightning, :workers, [])
+ |> Keyword.get(:private_key)
+
+ signer = Joken.Signer.create("RS256", %{"pem" => pem})
+
+ :persistent_term.put({__MODULE__, "token_signer"}, signer)
+
+ signer
+
+ signer ->
+ signer
+ end
+ end
+
@impl true
def run_token_signer do
pem =
@@ -213,6 +233,7 @@ defmodule Lightning.Config do
@callback run_token_signer() :: Joken.Signer.t()
@callback storage() :: term()
@callback storage(key :: atom()) :: term()
+ @callback token_signer() :: Joken.Signer.t()
@callback usage_tracking() :: Keyword.t()
@callback usage_tracking_cron_opts() :: [Oban.Plugins.Cron.cron_input()]
@callback worker_secret() :: binary() | nil
@@ -232,6 +253,10 @@ defmodule Lightning.Config do
impl().run_token_signer()
end
+ def token_signer do
+ impl().token_signer()
+ end
+
@doc """
Returns the Token signer used to verify worker tokens.
"""
diff --git a/lib/lightning/helpers.ex b/lib/lightning/helpers.ex
index e7fa6e2e7b..4cb511c7d6 100644
--- a/lib/lightning/helpers.ex
+++ b/lib/lightning/helpers.ex
@@ -89,4 +89,68 @@ defmodule Lightning.Helpers do
def json_safe(a) when is_atom(a) and not is_boolean(a), do: Atom.to_string(a)
def json_safe(any), do: any
+
+ @doc """
+ Copies an error from one key to another in the given changeset.
+
+ ## Parameters
+
+ - `changeset`: The changeset to modify.
+ - `original_key`: The key where the error currently exists.
+ - `new_key`: The key where the error should be duplicated.
+ - `opts`: A keyword list of options. Supports `overwrite`, which is a boolean indicating whether to overwrite the `new_key` error if it already exists. Defaults to `true`.
+
+ ## Example
+
+ iex> changeset = %Ecto.Changeset{errors: [name: {"has already been taken", []}]}
+ iex> updated_changeset = Lightning.Helpers.copy_error(changeset, :name, :raw_name)
+ iex> updated_changeset.errors
+ [name: {"has already been taken", []}, raw_name: {"has already been taken", []}]
+
+ If the `original_key` doesn't exist in the errors, or if the `new_key` already exists and `overwrite` is set to `false`, the changeset is returned unchanged.
+ """
+ def copy_error(changeset, original_key, new_key, opts \\ [overwrite: true]) do
+ overwrite = Keyword.get(opts, :overwrite, true)
+
+ if Keyword.has_key?(changeset.errors, original_key) do
+ {error_msg, error_opts} = Keyword.fetch!(changeset.errors, original_key)
+
+ if Keyword.has_key?(changeset.errors, new_key) and not overwrite do
+ changeset
+ else
+ Ecto.Changeset.add_error(changeset, new_key, error_msg, error_opts)
+ end
+ else
+ changeset
+ end
+ end
+
+ @doc """
+ Converts a string into a URL-safe format by converting it to lowercase,
+ replacing unwanted characters with hyphens, and trimming leading/trailing hyphens.
+
+ This function allows international characters, which will be automatically
+ percent-encoded in URLs by browsers.
+
+ ## Parameters
+
+ - `name`: The string to convert. If `nil` is passed, it returns an empty string.
+
+ ## Examples
+
+ iex> url_safe_name("My Project!!")
+ "my-project"
+
+ iex> url_safe_name(nil)
+ ""
+ """
+ @spec url_safe_name(String.t() | nil) :: String.t()
+ def url_safe_name(nil), do: ""
+
+ def url_safe_name(name) when is_binary(name) do
+ name
+ |> String.downcase()
+ |> String.replace(~r/[^\p{L}0-9_\.\-]+/u, "-")
+ |> String.trim("-")
+ end
end
diff --git a/lib/lightning/policies/collections.ex b/lib/lightning/policies/collections.ex
new file mode 100644
index 0000000000..80e8d89722
--- /dev/null
+++ b/lib/lightning/policies/collections.ex
@@ -0,0 +1,31 @@
+defmodule Lightning.Policies.Collections do
+ @moduledoc """
+ The Bodyguard Policy module for Collections.
+
+ Access to collections is controlled by the project the collection belongs to.
+
+ The `access_collection` action is allowed if the user has access to the
+ project, or if a run belongs to the project (via it's workflow).
+ """
+ @behaviour Bodyguard.Policy
+
+ alias Lightning.Accounts.User
+ alias Lightning.Collections.Collection
+ alias Lightning.Run
+
+ @type actions :: :access_collection
+ @spec authorize(actions(), Lightning.Accounts.User.t(), Collection.t()) ::
+ :ok | {:error, :unauthorized}
+ def authorize(:access_collection, %User{} = user, %Collection{} = collection) do
+ Lightning.Policies.Permissions.can(
+ Lightning.Policies.ProjectUsers,
+ :access_project,
+ user,
+ collection
+ )
+ end
+
+ def authorize(:access_collection, %Run{} = run, %Collection{} = collection) do
+ Lightning.Runs.get_project_id_for_run(run) == collection.project_id
+ end
+end
diff --git a/lib/lightning/policies/project_users.ex b/lib/lightning/policies/project_users.ex
index 3b1e1eea3a..c539aa6e5b 100644
--- a/lib/lightning/policies/project_users.ex
+++ b/lib/lightning/policies/project_users.ex
@@ -41,7 +41,7 @@ defmodule Lightning.Policies.ProjectUsers do
@spec authorize(
actions(),
Lightning.Accounts.User.t(),
- Lightning.Projects.Project.t() | nil
+ Lightning.Projects.Project.t() | %{project_id: Ecto.UUID.t()} | nil
) :: boolean
def authorize(:access_project, %User{}, nil), do: false
@@ -87,4 +87,7 @@ defmodule Lightning.Policies.ProjectUsers do
:initiate_github_sync
],
do: project_user.role in [:owner, :admin, :editor]
+
+ def authorize(action, user, %{project_id: project_id}),
+ do: authorize(action, user, Projects.get_project(project_id))
end
diff --git a/lib/lightning/projects.ex b/lib/lightning/projects.ex
index ff9dad42d4..f4157c3fc6 100644
--- a/lib/lightning/projects.ex
+++ b/lib/lightning/projects.ex
@@ -627,15 +627,6 @@ defmodule Lightning.Projects do
|> Repo.one()
end
- def url_safe_project_name(nil), do: ""
-
- def url_safe_project_name(name) when is_binary(name) do
- name
- |> String.downcase()
- |> String.replace(~r/[^a-z-_\.\d]+/, "-")
- |> String.replace(~r/^\-+|\-+$/, "")
- end
-
def member_of?(%Project{id: project_id}, %User{id: user_id}) do
from(p in Project,
join: pu in assoc(p, :project_users),
diff --git a/lib/lightning/runs.ex b/lib/lightning/runs.ex
index 25866f8e4b..a4de7635aa 100644
--- a/lib/lightning/runs.ex
+++ b/lib/lightning/runs.ex
@@ -327,6 +327,7 @@ defmodule Lightning.Runs do
defdelegate subscribe(run), to: Events
+ @spec get_project_id_for_run(Run.t()) :: Ecto.UUID.t() | nil
def get_project_id_for_run(run) do
Ecto.assoc(run, [:work_order, :workflow, :project])
|> select([p], p.id)
diff --git a/lib/lightning/runs/run_options.ex b/lib/lightning/runs/run_options.ex
index a31c769502..ce588b1f1e 100644
--- a/lib/lightning/runs/run_options.ex
+++ b/lib/lightning/runs/run_options.ex
@@ -24,6 +24,11 @@ defmodule Lightning.Runs.RunOptions do
field :run_memory_limit_mb, :integer
end
+ def new(opts \\ %{}) do
+ %__MODULE__{}
+ |> cast(opts, [:save_dataclips, :run_timeout_ms])
+ end
+
defimpl Jason.Encoder, for: __MODULE__ do
def encode(value, opts) do
value
diff --git a/lib/lightning/runtime/runtime_manager.ex b/lib/lightning/runtime/runtime_manager.ex
index 7f81d88131..52bc44ae0a 100644
--- a/lib/lightning/runtime/runtime_manager.ex
+++ b/lib/lightning/runtime/runtime_manager.ex
@@ -44,7 +44,9 @@ defmodule Lightning.Runtime.RuntimeManager do
port: 2222,
repo_dir: nil,
worker_secret: nil,
- ws_url: "ws://localhost:4000/worker"
+ endpoint: nil,
+ ws_url: "ws://localhost:4000/worker",
+ col_url: "http://localhost:4000/collections"
@doc """
Parses the keyword list of start arguments and returns a tuple,
@@ -58,6 +60,7 @@ defmodule Lightning.Runtime.RuntimeManager do
Application.get_env(:lightning, __MODULE__, [])
|> Keyword.merge(args)
)
+ |> maybe_put_urls()
{_, args} = args |> Keyword.split(config |> Map.keys())
@@ -102,10 +105,33 @@ defmodule Lightning.Runtime.RuntimeManager do
{:ws_url, v} ->
~w(--lightning #{v})
+ {:col_url, v} ->
+ ~w(--collections-url #{v})
+
_ ->
[nil]
end
end
+
+ defp maybe_put_urls(config) do
+ if config.endpoint do
+ config
+ |> Map.merge(%{
+ ws_url:
+ Phoenix.VerifiedRoutes.unverified_url(config.endpoint, "/worker")
+ |> URI.parse()
+ |> Map.put(:scheme, "ws")
+ |> URI.to_string(),
+ col_url:
+ Phoenix.VerifiedRoutes.unverified_url(
+ config.endpoint,
+ "/collections"
+ )
+ })
+ else
+ config
+ end
+ end
end
defmodule RuntimeClient do
diff --git a/lib/lightning/tokens.ex b/lib/lightning/tokens.ex
new file mode 100644
index 0000000000..5c54c235fa
--- /dev/null
+++ b/lib/lightning/tokens.ex
@@ -0,0 +1,68 @@
+defmodule Lightning.Tokens do
+ @moduledoc """
+ Token generation, verification and validation.
+ """
+
+ defmodule PersonalAccessToken do
+ @moduledoc false
+ use Joken.Config
+
+ @impl true
+ def token_config do
+ %{}
+ |> add_claim("jti", &Joken.generate_jti/0)
+ |> add_claim("iss", fn -> "Lightning" end, &(&1 == "Lightning"))
+ |> add_claim("sub", nil, fn sub, _claims, _context ->
+ String.starts_with?(sub, "user:")
+ end)
+ |> add_claim(
+ "iat",
+ fn -> Lightning.current_time() |> DateTime.to_unix() end,
+ fn iat, _claims, _context ->
+ Lightning.current_time() >= iat |> DateTime.from_unix()
+ end
+ )
+ end
+ end
+
+ @doc """
+ Verify a token and return the claims if successful.
+
+ This serves as a central point to verify and validate different types
+ of tokens.
+ """
+ @spec verify(String.t()) :: {:ok, map()} | {:error, any()}
+ def verify(token) do
+ Joken.peek_claims(token)
+ |> case do
+ # TODO: Look up user tokens via the JTI and ensure the JTI is indexed
+ {:ok, %{"sub" => "user:" <> _}} ->
+ PersonalAccessToken.verify_and_validate(
+ token,
+ Lightning.Config.token_signer()
+ )
+
+ {:ok, %{"sub" => "run:" <> _}} ->
+ Lightning.Workers.verify_run_token(token, %{})
+
+ {:ok, _} ->
+ {:error, "Unsupported token type"}
+
+ {:error, err} ->
+ {:error, err}
+ end
+ end
+
+ @doc """
+ Get the subject of a token.
+ Currently support RunTokens and PersonalAccessTokens,
+ which return `Lightning.Run`s and `Lightning.Accounts.User`s respectively.
+ """
+ def get_subject(%{"sub" => "user:" <> user_id}) do
+ Lightning.Accounts.get_user(user_id)
+ end
+
+ def get_subject(%{"sub" => "run:" <> run_id}) do
+ Lightning.Runs.get(run_id)
+ end
+end
diff --git a/lib/lightning/utils/crypto.ex b/lib/lightning/utils/crypto.ex
new file mode 100644
index 0000000000..14c3683679
--- /dev/null
+++ b/lib/lightning/utils/crypto.ex
@@ -0,0 +1,37 @@
+defmodule Lightning.Utils.Crypto do
+ @moduledoc """
+ Utility functions for cryptographic operations.
+ """
+
+ @doc """
+ Generates a new RSA key pair with 2048 bits and a public exponent of 65537.
+
+ This is preferable to using `create_private_key` and `abstract_public_key` as
+ it generates a key pair in one step, and also doesn't require shelling out to
+ `openssl`.
+ """
+ def generate_rsa_key_pair do
+ {:RSAPrivateKey, _, modulus, public_exponent, _, _, _, _exponent1, _, _,
+ _other_prime_infos} =
+ rsa_private_key = :public_key.generate_key({:rsa, 2048, 65_537})
+
+ rsa_public_key = {:RSAPublicKey, modulus, public_exponent}
+
+ private_key =
+ [:public_key.pem_entry_encode(:RSAPrivateKey, rsa_private_key)]
+ |> :public_key.pem_encode()
+
+ public_key =
+ [:public_key.pem_entry_encode(:RSAPublicKey, rsa_public_key)]
+ |> :public_key.pem_encode()
+
+ {private_key, public_key}
+ end
+
+ @doc """
+ Generates a new HS256 key.
+ """
+ def generate_hs256_key do
+ 32 |> :crypto.strong_rand_bytes() |> Base.encode64()
+ end
+end
diff --git a/lib/lightning/workers.ex b/lib/lightning/workers.ex
index da836803fb..0c0e2ff30c 100644
--- a/lib/lightning/workers.ex
+++ b/lib/lightning/workers.ex
@@ -5,7 +5,7 @@ defmodule Lightning.Workers do
This module deals with the security tokens and the formatting used on
the communication with the workers.
"""
- defmodule Token do
+ defmodule WorkerToken do
@moduledoc """
JWT token configuration to authenticate workers.
"""
@@ -35,7 +35,14 @@ defmodule Lightning.Workers do
%{}
|> add_claim("iss", fn -> "Lightning" end, &(&1 == "Lightning"))
|> add_claim("id", nil, fn id, _claims, context ->
- is_binary(id) and id == Map.get(context, :id)
+ Map.get(context, :id)
+ |> case do
+ nil ->
+ is_binary(id)
+
+ expected_id ->
+ is_binary(id) and id == expected_id
+ end
end)
|> add_claim(
"nbf",
@@ -58,12 +65,14 @@ defmodule Lightning.Workers do
Lightning.Run.t(),
Lightning.Runs.RunOptions.t()
) :: binary()
- def generate_run_token(run, run_options) do
- run_timeout_ms = run_options[:run_timeout_ms]
-
+ def generate_run_token(run, run_options \\ %Lightning.Runs.RunOptions{}) do
{:ok, token, _claims} =
RunToken.generate_and_sign(
- %{"id" => run.id, "exp" => calculate_token_expiry(run_timeout_ms)},
+ %{
+ "id" => run.id,
+ "exp" => calculate_token_expiry(run_options.run_timeout_ms),
+ "sub" => "run:#{run.id}"
+ },
Lightning.Config.run_token_signer()
)
@@ -119,7 +128,7 @@ defmodule Lightning.Workers do
def verify_worker_token(token, context \\ %{}) when is_binary(token) do
context = Enum.into(context, %{current_time: Lightning.current_time()})
- Token.verify_and_validate(
+ WorkerToken.verify_and_validate(
token,
Lightning.Config.worker_token_signer(),
context
diff --git a/lib/lightning_web/channels/worker_channel.ex b/lib/lightning_web/channels/worker_channel.ex
index fb68a9e122..ce31b9cfb0 100644
--- a/lib/lightning_web/channels/worker_channel.ex
+++ b/lib/lightning_web/channels/worker_channel.ex
@@ -49,5 +49,8 @@ defmodule LightningWeb.WorkerChannel do
|> then(fn %{project_id: project_id} ->
UsageLimiter.get_run_options(%Context{project_id: project_id})
end)
+ |> Enum.into(%{})
+ |> Runs.RunOptions.new()
+ |> Ecto.Changeset.apply_changes()
end
end
diff --git a/lib/lightning_web/components/layouts/settings.html.heex b/lib/lightning_web/components/layouts/settings.html.heex
index eb22faf8f0..4b16f8964e 100644
--- a/lib/lightning_web/components/layouts/settings.html.heex
+++ b/lib/lightning_web/components/layouts/settings.html.heex
@@ -19,35 +19,42 @@
Projects
Users
Authentication
Audit
+
+
+ Collections
+
-
+
Back
diff --git a/lib/lightning_web/controllers/collections_controller.ex b/lib/lightning_web/controllers/collections_controller.ex
new file mode 100644
index 0000000000..dde63fdb73
--- /dev/null
+++ b/lib/lightning_web/controllers/collections_controller.ex
@@ -0,0 +1,292 @@
+defmodule LightningWeb.CollectionsController do
+ use LightningWeb, :controller
+
+ alias Lightning.Collections
+ alias Lightning.Policies.Permissions
+ alias Lightning.Repo
+
+ action_fallback LightningWeb.FallbackController
+
+ require Logger
+
+ @max_chunk_size 50
+
+ @default_limit Application.compile_env!(:lightning, __MODULE__)[:stream_limit]
+
+ @valid_params [
+ "key",
+ "cursor",
+ "limit",
+ "created_after",
+ "created_before"
+ ]
+
+ defp authorize(conn, collection) do
+ Permissions.can(
+ Lightning.Policies.Collections,
+ :access_collection,
+ conn.assigns.subject,
+ collection
+ )
+ end
+
+ #
+ # Controller starts here
+ #
+ def put(conn, %{"name" => col_name, "key" => key, "value" => value}) do
+ with {:ok, collection} <- Collections.get_collection(col_name),
+ :ok <- authorize(conn, collection) do
+ case Collections.put(collection, key, value) do
+ :ok ->
+ json(conn, %{upserted: 1, error: nil})
+
+ {:error, _reason} ->
+ json(conn, %{upserted: 0, error: "Format error"})
+ end
+ end
+ end
+
+ def put_all(conn, %{"name" => col_name, "items" => items}) do
+ with {:ok, collection} <- Collections.get_collection(col_name),
+ :ok <- authorize(conn, collection) do
+ case Collections.put_all(collection, items) do
+ {:ok, count} ->
+ json(conn, %{upserted: count, error: nil})
+
+ :error ->
+ conn
+ |> put_status(:internal_server_error)
+ |> json(%{upserted: 0, error: "Database Error"})
+ end
+ end
+ end
+
+ def get(conn, %{"name" => col_name, "key" => key}) do
+ with {:ok, collection} <- Collections.get_collection(col_name),
+ :ok <- authorize(conn, collection) do
+ case Collections.get(collection, key) do
+ nil ->
+ conn
+ |> put_status(:no_content)
+ |> json(nil)
+
+ item ->
+ json(conn, item)
+ end
+ end
+ end
+
+ def delete(conn, %{"name" => col_name, "key" => key}) do
+ with {:ok, collection} <- Collections.get_collection(col_name),
+ :ok <- authorize(conn, collection) do
+ case Collections.delete(collection, key) do
+ :ok ->
+ json(conn, %{key: key, deleted: 1, error: nil})
+
+ {:error, :not_found} ->
+ json(conn, %{key: key, deleted: 0, error: "Item Not Found"})
+ end
+ end
+ end
+
+ def delete_all(conn, %{"name" => col_name} = params) do
+ with {:ok, collection} <- Collections.get_collection(col_name),
+ :ok <- authorize(conn, collection) do
+ key_param = params["key"]
+
+ {:ok, n} = Collections.delete_all(collection, key_param)
+
+ json(conn, %{key: key_param, deleted: n, error: nil})
+ end
+ end
+
+ def stream(conn, %{"name" => col_name, "key" => key_pattern}) do
+ with {:ok, collection, filters, response_limit} <-
+ validate_query(conn, col_name) do
+ case Repo.transact(fn ->
+ items_stream =
+ Collections.stream_match(collection, key_pattern, filters)
+
+ stream_chunked(conn, items_stream, response_limit)
+ end) do
+ {:error, conn} -> conn
+ {:ok, conn} -> conn
+ end
+ end
+ end
+
+ def stream(conn, %{"name" => col_name}) do
+ with {:ok, collection, filters, response_limit} <-
+ validate_query(conn, col_name) do
+ case Repo.transact(fn ->
+ items_stream = Collections.stream_all(collection, filters)
+
+ stream_chunked(conn, items_stream, response_limit)
+ end) do
+ {:error, conn} -> conn
+ {:ok, conn} -> conn
+ end
+ end
+ end
+
+ defmodule ChunkAcc do
+ defstruct conn: nil,
+ count: 0,
+ limit: 0,
+ last: nil,
+ cursor_data: nil
+ end
+
+ defp stream_chunked(conn, items_stream, response_limit) do
+ with %{halted: false} = conn <- begin_chunking(conn) do
+ items_stream
+ |> Stream.chunk_every(@max_chunk_size)
+ |> Stream.with_index()
+ |> Enum.reduce_while(
+ %ChunkAcc{conn: conn, limit: response_limit},
+ &send_chunk/2
+ )
+ |> finish_chunking()
+ end
+ end
+
+ defp validate_query(conn, col_name) do
+ with {:ok, collection} <- Collections.get_collection(col_name),
+ :ok <- authorize(conn, collection),
+ query_params <-
+ Enum.into(conn.query_params, %{
+ "cursor" => nil,
+ "limit" => "#{@default_limit}"
+ }),
+ {:ok, filters} <- validate_query_params(query_params) do
+ # returns one more from db than the limit to determine if there are more items for the cursor
+ db_query_filters = Map.update(filters, :limit, @default_limit, &(&1 + 1))
+ response_limit = Map.fetch!(filters, :limit)
+
+ {:ok, collection, db_query_filters, response_limit}
+ end
+ end
+
+ defp validate_query_params(
+ %{"cursor" => cursor, "limit" => limit} = query_params
+ ) do
+ with invalid_params when map_size(invalid_params) == 0 <-
+ Map.drop(query_params, @valid_params),
+ {:ok, cursor} <- validate_cursor(cursor),
+ {limit, ""} <- Integer.parse(limit),
+ valid_params <- Map.take(query_params, @valid_params) do
+ filters =
+ valid_params
+ |> Map.new(fn {key, value} -> {String.to_existing_atom(key), value} end)
+ |> Map.put(:limit, limit)
+ |> Map.put(:cursor, cursor)
+
+ {:ok, filters}
+ else
+ _invalid ->
+ {:error, :bad_request}
+ end
+ end
+
+ defp validate_cursor(nil), do: {:ok, nil}
+
+ defp validate_cursor(cursor) do
+ with {:ok, decoded} <- Base.decode64(cursor),
+ {:ok, datetime, _off} <- DateTime.from_iso8601(decoded) do
+ {:ok, datetime}
+ end
+ end
+
+ defp begin_chunking(conn) do
+ conn
+ |> put_resp_content_type("application/json")
+ |> send_chunked(200)
+ |> Plug.Conn.chunk(~S({"items": [))
+ |> case do
+ {:ok, conn} ->
+ conn
+
+ {:error, reason} ->
+ Logger.warning("Error starting chunking: #{inspect(reason)}")
+ halt(conn)
+ end
+ end
+
+ defp finish_chunking(%ChunkAcc{conn: conn, cursor_data: cursor_data}) do
+ cursor =
+ if cursor_data do
+ cursor_data |> DateTime.to_iso8601() |> Base.encode64()
+ end
+
+ Plug.Conn.chunk(conn, ~S(], "cursor":) <> Jason.encode!(cursor) <> "}")
+ end
+
+ defp finish_chunking({:error, conn}), do: conn
+
+ defp send_chunk({chunk_items, 0}, acc) do
+ {taken_items, acc} = take_and_accumulate(chunk_items, acc)
+
+ taken_items
+ |> Enum.map_join(",", &Jason.encode!/1)
+ |> send_chunk_and_iterate(acc)
+ end
+
+ defp send_chunk(
+ {_chunk_items, _i},
+ %ChunkAcc{count: sent_count, last: last, limit: limit} = acc
+ )
+ when sent_count == limit do
+ {:halt, %ChunkAcc{acc | cursor_data: last.inserted_at}}
+ end
+
+ defp send_chunk({chunk_items, _i}, acc) do
+ {taken_items, acc} = take_and_accumulate(chunk_items, acc)
+
+ taken_items
+ |> Enum.map_join(",", &Jason.encode!/1)
+ |> then(fn items_chunk ->
+ "," <> items_chunk
+ end)
+ |> send_chunk_and_iterate(acc)
+ end
+
+ defp take_and_accumulate(
+ chunk_items,
+ %ChunkAcc{count: sent_count, limit: limit} = acc
+ ) do
+ taken_items = Enum.take(chunk_items, limit - sent_count)
+ last = List.last(taken_items)
+ taken_count = length(taken_items)
+
+ cursor_data =
+ if taken_count > 0 and length(chunk_items) > taken_count do
+ last.inserted_at
+ end
+
+ acc =
+ struct(acc, %{
+ count: sent_count + taken_count,
+ last: last,
+ cursor_data: cursor_data
+ })
+
+ {taken_items, acc}
+ end
+
+ defp send_chunk_and_iterate(
+ chunk,
+ %ChunkAcc{conn: conn, cursor_data: cursor_data} = acc
+ ) do
+ case Plug.Conn.chunk(conn, chunk) do
+ {:ok, conn} ->
+ if cursor_data do
+ {:halt, %{acc | conn: conn}}
+ else
+ {:cont, %{acc | conn: conn}}
+ end
+
+ {:error, :closed} ->
+ {:halt, {:error, conn}}
+ end
+ end
+end
diff --git a/lib/lightning_web/controllers/fallback_controller.ex b/lib/lightning_web/controllers/fallback_controller.ex
index 0ea590bec1..467130d4d2 100644
--- a/lib/lightning_web/controllers/fallback_controller.ex
+++ b/lib/lightning_web/controllers/fallback_controller.ex
@@ -14,11 +14,18 @@ defmodule LightningWeb.FallbackController do
|> render(:"404")
end
+ def call(conn, {:error, :bad_request}) do
+ conn
+ |> put_status(:bad_request)
+ |> put_view(LightningWeb.ErrorView)
+ |> render(:"400")
+ end
+
def call(conn, {:error, :unauthorized}) do
conn
|> put_status(:unauthorized)
|> put_view(LightningWeb.ErrorView)
- |> render(:"401", error: %{error: :unauthirized})
+ |> render(:"401")
end
def call(conn, {:error, :forbidden}) do
diff --git a/lib/lightning_web/controllers/user_auth.ex b/lib/lightning_web/controllers/user_auth.ex
index 38986253ef..0793e57a8a 100644
--- a/lib/lightning_web/controllers/user_auth.ex
+++ b/lib/lightning_web/controllers/user_auth.ex
@@ -188,7 +188,7 @@ defmodule LightningWeb.UserAuth do
end
end
- defp get_bearer(conn) do
+ def get_bearer(conn) do
conn
|> get_req_header("authorization")
|> case do
@@ -256,7 +256,6 @@ defmodule LightningWeb.UserAuth do
@doc """
Used for API routes that require the resource to be authenticated.
A resource can be a `User` or a `ProjectRepoConnection`
-
"""
def require_authenticated_api_resource(conn, _opts) do
if is_nil(conn.assigns[:current_resource]) do
diff --git a/lib/lightning_web/live/collection_live/collection_creation_modal.ex b/lib/lightning_web/live/collection_live/collection_creation_modal.ex
new file mode 100644
index 0000000000..273413fdc2
--- /dev/null
+++ b/lib/lightning_web/live/collection_live/collection_creation_modal.ex
@@ -0,0 +1,182 @@
+defmodule LightningWeb.CollectionLive.CollectionCreationModal do
+ use LightningWeb, :live_component
+
+ alias Lightning.Collections
+ alias Lightning.Collections.Collection
+ alias Lightning.Helpers
+ alias Lightning.Projects
+
+ @impl true
+ def update(assigns, socket) do
+ changeset = Collection.changeset(assigns.collection, %{})
+
+ {:ok,
+ socket
+ |> assign(assigns)
+ |> assign(:changeset, changeset)
+ |> assign(:name, get_collection_name(changeset))
+ |> assign(:projects_options, list_project_options())
+ |> assign_new(:mode, fn -> :create end)}
+ end
+
+ defp list_project_options do
+ Projects.list_projects() |> Enum.map(&{&1.name, &1.id})
+ end
+
+ defp get_collection_name(changeset) do
+ Ecto.Changeset.fetch_field!(changeset, :name)
+ end
+
+ @impl true
+ def handle_event("close_modal", _, socket) do
+ {:noreply, socket |> push_navigate(to: socket.assigns.return_to)}
+ end
+
+ def handle_event("validate", %{"collection" => collection_params}, socket) do
+ changeset =
+ socket.assigns.collection
+ |> Collection.changeset(
+ collection_params
+ |> coerce_raw_name_to_safe_name
+ )
+ |> Map.put(:action, :validate)
+
+ {:noreply,
+ socket
+ |> assign(
+ :changeset,
+ Lightning.Helpers.copy_error(changeset, :name, :raw_name)
+ )
+ |> assign(:name, Ecto.Changeset.fetch_field!(changeset, :name))}
+ end
+
+ def handle_event("save", %{"collection" => collection_params}, socket) do
+ %{mode: mode, return_to: return_to} = socket.assigns
+
+ result =
+ case mode do
+ :create ->
+ Collections.create_collection(collection_params)
+
+ :update ->
+ Collections.update_collection(
+ socket.assigns.collection,
+ collection_params
+ )
+ end
+
+ case result do
+ {:ok, _collection} ->
+ {:noreply,
+ socket
+ |> put_flash(:info, "Collection #{mode}d successfully")
+ |> push_navigate(to: return_to)}
+
+ {:error, changeset} ->
+ {:noreply,
+ assign(
+ socket,
+ :changeset,
+ Lightning.Helpers.copy_error(changeset, :name, :raw_name)
+ )}
+ end
+ end
+
+ defp coerce_raw_name_to_safe_name(%{"raw_name" => raw_name} = params) do
+ new_name = Helpers.url_safe_name(raw_name)
+
+ params |> Map.put("name", new_name)
+ end
+
+ defp coerce_raw_name_to_safe_name(%{} = params) do
+ params
+ end
+
+ @impl true
+ def render(assigns) do
+ ~H"""
+
+ <.modal id={@id} width="xl:min-w-1/3 min-w-1/2 max-w-full">
+ <:title>
+
+
+ <%= if @mode == :create,
+ do: "Create Collection",
+ else: "Edit Collection" %>
+
+
+ Close
+
+
+
+
+ <.form
+ :let={f}
+ for={@changeset}
+ id={"collection-form-#{@collection.id || "new"}"}
+ phx-target={@myself}
+ phx-change="validate"
+ phx-submit="save"
+ >
+
+
+ <.input
+ type="text"
+ field={f[:raw_name]}
+ value={@name}
+ label="Name"
+ required="true"
+ />
+ <.input type="hidden" field={f[:name]} />
+
+ <%= if to_string(f[:name].value) != "" do %>
+ Your collection will be named
+ <%= @name %> .
+ <% end %>
+
+
+
+ <.input
+ type="select"
+ field={f[:project_id]}
+ label="Project"
+ options={@projects_options}
+ required="true"
+ />
+
+
+ <.modal_footer class="mt-6 mx-6">
+
+
+ Save
+
+
+ Cancel
+
+
+
+
+
+
+ """
+ end
+end
diff --git a/lib/lightning_web/live/collection_live/components.ex b/lib/lightning_web/live/collection_live/components.ex
new file mode 100644
index 0000000000..560d860b03
--- /dev/null
+++ b/lib/lightning_web/live/collection_live/components.ex
@@ -0,0 +1,152 @@
+defmodule LightningWeb.CollectionLive.Components do
+ use LightningWeb, :component
+
+ import PetalComponents.Table
+
+ defp confirm_collection_deletion_modal(assigns) do
+ ~H"""
+ <.modal id={@id} width="max-w-md">
+ <:title>
+
+
+ Delete collection
+
+
+
+ Close
+
+
+
+
+
+
+ Are you sure you want to delete the collection
+ <%= @collection.name %>
+ ?
+ If you wish to proceed with this action, click on the delete button. To cancel click on the cancel button.
+
+
+
+ <.button
+ id={"#{@id}_confirm_button"}
+ type="button"
+ phx-click="delete_collection"
+ phx-value-collection={@collection.id}
+ color_class="bg-red-600 hover:bg-red-700 text-white"
+ phx-disable-with="Deleting..."
+ >
+ Delete
+
+
+ Cancel
+
+
+
+ """
+ end
+
+ defp table_title(assigns) do
+ ~H"""
+
+ Collections
+
+ (<%= @count %>)
+
+
+ """
+ end
+
+ def collections_table(assigns) do
+ next_sort_icon = %{asc: "hero-chevron-down", desc: "hero-chevron-up"}
+
+ assigns =
+ assign(assigns,
+ collections_count: Enum.count(assigns.collections),
+ empty?: Enum.empty?(assigns.collections),
+ name_sort_icon: next_sort_icon[assigns.name_direction]
+ )
+
+ ~H"""
+ <%= if @empty? do %>
+ <%= render_slot(@empty_state) %>
+ <% else %>
+
+ <.table_title count={@collections_count} />
+
+ <%= render_slot(@create_collection_button) %>
+
+
+ <.table id="collections-table">
+ <.tr>
+ <.th>
+
+ Name
+
+ <.icon name={@name_sort_icon} />
+
+
+
+ <.th>Project
+ <.th>
+
+
+ <.tr
+ :for={collection <- @collections}
+ id={"collections-table-row-#{collection.id}"}
+ class="hover:bg-gray-100 transition-colors duration-200"
+ >
+ <.td class="break-words max-w-[15rem] text-gray-800">
+ <%= collection.name %>
+
+ <.td class="break-words max-w-[25rem]">
+ <%= collection.project.name %>
+
+
+ <.td>
+
+
+ Edit
+
+
+ Delete
+
+
+ <.live_component
+ id={"update-collection-#{collection.id}-modal"}
+ module={LightningWeb.CollectionLive.CollectionCreationModal}
+ collection={collection}
+ mode={:update}
+ return_to={~p"/settings/collections"}
+ />
+ <.confirm_collection_deletion_modal
+ id={"delete-collection-#{collection.id}-modal"}
+ collection={collection}
+ />
+
+
+
+ <% end %>
+ """
+ end
+end
diff --git a/lib/lightning_web/live/collection_live/index.ex b/lib/lightning_web/live/collection_live/index.ex
new file mode 100644
index 0000000000..abb3f43c93
--- /dev/null
+++ b/lib/lightning_web/live/collection_live/index.ex
@@ -0,0 +1,129 @@
+defmodule LightningWeb.CollectionLive.Index do
+ use LightningWeb, :live_view
+
+ import LightningWeb.CollectionLive.Components
+
+ alias Lightning.Collections
+ alias Lightning.Collections.Collection
+ alias Lightning.Policies.Permissions
+ alias Lightning.Policies.Users
+
+ require Logger
+
+ @impl true
+ def mount(_params, _session, socket) do
+ can_access_admin_space =
+ Users
+ |> Permissions.can?(:access_admin_space, socket.assigns.current_user, {})
+
+ if can_access_admin_space do
+ {:ok,
+ socket
+ |> assign(
+ page_title: "Collections",
+ active_menu_item: :collections,
+ collections: Collections.list_collections(),
+ name_sort_direction: :asc
+ ), layout: {LightningWeb.Layouts, :settings}}
+ else
+ {:ok,
+ socket
+ |> put_flash(:nav, :no_access)
+ |> push_redirect(to: "/projects")}
+ end
+ end
+
+ @impl true
+ def handle_event("sort", %{"by" => field}, socket) do
+ sort_key = String.to_atom("#{field}_sort_direction")
+ sort_direction = Map.get(socket.assigns, sort_key, :asc)
+ new_sort_direction = switch_sort_direction(sort_direction)
+
+ order_column = map_sort_field_to_column(field)
+
+ collections =
+ Collections.list_collections(
+ order_by: [{new_sort_direction, order_column}]
+ )
+
+ {:noreply,
+ socket
+ |> assign(:collections, collections)
+ |> assign(sort_key, new_sort_direction)}
+ end
+
+ def handle_event("delete_collection", %{"collection" => collection_id}, socket) do
+ case Collections.delete_collection(collection_id) do
+ {:ok, _collection} ->
+ {:noreply,
+ socket
+ |> put_flash(:info, "Collection deleted successfully!")
+ |> push_navigate(to: ~p"/settings/collections")}
+
+ {:error, reason} ->
+ Logger.error("Error during collection deletion: #{inspect(reason)}")
+
+ {:noreply,
+ socket
+ |> put_flash(:error, "Couldn't delete collection!")
+ |> push_navigate(to: ~p"/settings/collections")}
+ end
+ end
+
+ defp switch_sort_direction(:asc), do: :desc
+ defp switch_sort_direction(:desc), do: :asc
+
+ defp map_sort_field_to_column("name"), do: :name
+
+ @impl true
+ def render(assigns) do
+ ~H"""
+
+ <:header>
+
+ <:title><%= @page_title %>
+
+
+
+
+ <.collections_table
+ collections={@collections}
+ user={@current_user}
+ name_direction={@name_sort_direction}
+ >
+ <:empty_state>
+
+
+
+ No collection found. Create a new one.
+
+
+
+ <:create_collection_button>
+ <.button
+ role="button"
+ id="open-create-collection-modal-button"
+ phx-click={show_modal("create-collection-modal")}
+ class="col-span-1 w-full rounded-md"
+ >
+ Create collection
+
+
+
+ <.live_component
+ id="create-collection-modal"
+ module={LightningWeb.CollectionLive.CollectionCreationModal}
+ collection={%Collection{}}
+ return_to={~p"/settings/collections"}
+ />
+
+
+
+ """
+ end
+end
diff --git a/lib/lightning_web/live/dashboard_live/project_creation_modal.ex b/lib/lightning_web/live/dashboard_live/project_creation_modal.ex
index f41d1af648..c40684133e 100644
--- a/lib/lightning_web/live/dashboard_live/project_creation_modal.ex
+++ b/lib/lightning_web/live/dashboard_live/project_creation_modal.ex
@@ -1,6 +1,7 @@
defmodule LightningWeb.DashboardLive.ProjectCreationModal do
use LightningWeb, :live_component
+ alias Lightning.Helpers
alias Lightning.Projects
alias Lightning.Projects.Project
@@ -61,7 +62,7 @@ defmodule LightningWeb.DashboardLive.ProjectCreationModal do
end
defp coerce_raw_name_to_safe_name(%{"raw_name" => raw_name} = params) do
- new_name = Projects.url_safe_project_name(raw_name)
+ new_name = Helpers.url_safe_name(raw_name)
params |> Map.put("name", new_name)
end
diff --git a/lib/lightning_web/live/project_live/form_component.ex b/lib/lightning_web/live/project_live/form_component.ex
index 4b9155c5e0..66f1dda693 100644
--- a/lib/lightning_web/live/project_live/form_component.ex
+++ b/lib/lightning_web/live/project_live/form_component.ex
@@ -15,6 +15,7 @@ defmodule LightningWeb.ProjectLive.FormComponent do
import Ecto.Changeset, only: [fetch_field!: 2]
import LightningWeb.Components.Form
+ alias Lightning.Helpers
alias Lightning.Projects
alias Lightning.Projects.Project
@@ -49,7 +50,7 @@ defmodule LightningWeb.ProjectLive.FormComponent do
|> assign(:changeset, changeset)
|> assign(
:name,
- Projects.url_safe_project_name(fetch_field!(changeset, :name))
+ Helpers.url_safe_name(fetch_field!(changeset, :name))
)}
end
@@ -125,7 +126,7 @@ defmodule LightningWeb.ProjectLive.FormComponent do
end
defp coerce_raw_name_to_safe_name(%{"raw_name" => raw_name} = params) do
- new_name = Projects.url_safe_project_name(raw_name)
+ new_name = Helpers.url_safe_name(raw_name)
params |> Map.put("name", new_name)
end
diff --git a/lib/lightning_web/live/project_live/settings.html.heex b/lib/lightning_web/live/project_live/settings.html.heex
index 146499bd93..1d277d4117 100644
--- a/lib/lightning_web/live/project_live/settings.html.heex
+++ b/lib/lightning_web/live/project_live/settings.html.heex
@@ -629,31 +629,33 @@
project_user={project_user}
/>
- <.td class="text-right">
- <.button
- id={"remove_project_user_#{project_user.id}_button"}
- type="button"
- phx-click={show_modal("remove_#{project_user.id}_modal")}
- color_class="bg-white text-gray-900 hover:bg-gray-50 disabled:bg-gray-100"
- class="gap-x-2 rounded-md px-3.5 py-2.5 text-sm shadow-sm ring-1 ring-inset ring-gray-300 disabled:cursor-not-allowed"
- tooltip={
- remove_user_tooltip(
- project_user,
- @current_user,
- @can_remove_project_user
- )
- }
- disabled={
- !user_removable?(
- project_user,
- @current_user,
- @can_remove_project_user
- )
- }
- >
- <.icon name="hero-minus-circle" class="w-5 h-5" />
- Remove Collaborator
-
+ <.td>
+
+ <.button
+ id={"remove_project_user_#{project_user.id}_button"}
+ type="button"
+ phx-click={show_modal("remove_#{project_user.id}_modal")}
+ color_class="bg-white text-gray-900 hover:bg-gray-50 disabled:bg-gray-100"
+ class="gap-x-2 rounded-md px-3.5 py-2.5 text-sm shadow-sm ring-1 ring-inset ring-gray-300 disabled:cursor-not-allowed"
+ tooltip={
+ remove_user_tooltip(
+ project_user,
+ @current_user,
+ @can_remove_project_user
+ )
+ }
+ disabled={
+ !user_removable?(
+ project_user,
+ @current_user,
+ @can_remove_project_user
+ )
+ }
+ >
+ <.icon name="hero-minus-circle" class="w-5 h-5" />
+ Remove Collaborator
+
+
<.confirm_user_removal_modal
:if={
user_removable?(
diff --git a/lib/lightning_web/plugs/api_auth.ex b/lib/lightning_web/plugs/api_auth.ex
new file mode 100644
index 0000000000..b37b2d7eed
--- /dev/null
+++ b/lib/lightning_web/plugs/api_auth.ex
@@ -0,0 +1,48 @@
+defmodule LightningWeb.Plugs.ApiAuth do
+ @moduledoc """
+ Authenticates api calls based on JWT bearer token.
+ """
+ use Phoenix.Controller
+ import Plug.Conn
+
+ def init(opts) do
+ opts
+ end
+
+ def call(conn, _opts) do
+ with {:ok, bearer_token} <- get_bearer_token(conn),
+ {:ok, claims} <- Lightning.Tokens.verify(bearer_token) do
+ conn
+ |> assign(:claims, claims)
+ |> put_subject()
+ else
+ {:error, _reason} ->
+ deny_access(conn)
+ end
+ end
+
+ defp get_bearer_token(conn) do
+ conn
+ |> get_req_header("authorization")
+ |> case do
+ ["Bearer " <> bearer] -> {:ok, bearer}
+ _none_or_many -> {:error, :token_not_found}
+ end
+ end
+
+ defp put_subject(conn) do
+ conn.assigns.claims
+ |> Lightning.Tokens.get_subject()
+ |> then(fn subject ->
+ conn |> assign(:subject, subject)
+ end)
+ end
+
+ defp deny_access(conn) do
+ conn
+ |> put_status(:unauthorized)
+ |> put_view(LightningWeb.ErrorView)
+ |> render(:"401")
+ |> halt()
+ end
+end
diff --git a/lib/lightning_web/route_helpers.ex b/lib/lightning_web/route_helpers.ex
index f255c3e901..33df348577 100644
--- a/lib/lightning_web/route_helpers.ex
+++ b/lib/lightning_web/route_helpers.ex
@@ -4,15 +4,6 @@ defmodule LightningWeb.RouteHelpers do
"""
alias LightningWeb.Router.Helpers, as: Routes
- def show_run_url(project_id, run_id) do
- Routes.project_run_show_url(
- LightningWeb.Endpoint,
- :show,
- project_id,
- run_id
- )
- end
-
def project_dashboard_url(project_id) do
Routes.project_workflow_index_url(
LightningWeb.Endpoint,
diff --git a/lib/lightning_web/router.ex b/lib/lightning_web/router.ex
index 84b25ab718..50587825f9 100644
--- a/lib/lightning_web/router.ex
+++ b/lib/lightning_web/router.ex
@@ -42,6 +42,11 @@ defmodule LightningWeb.Router do
plug :accepts, ["json"]
end
+ pipeline :authenticated_api do
+ plug :accepts, ["json"]
+ plug LightningWeb.Plugs.ApiAuth
+ end
+
scope "/", LightningWeb do
pipe_through [:browser]
@@ -84,6 +89,18 @@ defmodule LightningWeb.Router do
# resources "/runs", API.RunController, only: [:index, :show]
end
+ ## Collections
+ scope "/collections", LightningWeb do
+ pipe_through [:authenticated_api]
+
+ get "/:name", CollectionsController, :stream
+ get "/:name/:key", CollectionsController, :get
+ put "/:name/:key", CollectionsController, :put
+ post "/:name", CollectionsController, :put_all
+ delete "/:name/:key", CollectionsController, :delete
+ delete "/:name", CollectionsController, :delete_all
+ end
+
## Authentication routes
scope "/", LightningWeb do
@@ -154,6 +171,8 @@ defmodule LightningWeb.Router do
live "/settings/authentication", AuthProvidersLive.Index, :edit
live "/settings/authentication/new", AuthProvidersLive.Index, :new
+
+ live "/settings/collections", CollectionLive.Index, :index
end
live_session :default, on_mount: LightningWeb.InitAssigns do
diff --git a/lib/lightning_web/views/error_view.ex b/lib/lightning_web/views/error_view.ex
index 3db6b9f58c..e050068202 100644
--- a/lib/lightning_web/views/error_view.ex
+++ b/lib/lightning_web/views/error_view.ex
@@ -1,4 +1,6 @@
defmodule LightningWeb.ErrorView do
+ @moduledoc false
+
# This module needs to be changed to use Layouts
use LightningWeb, :view
@@ -31,7 +33,10 @@ defmodule LightningWeb.ErrorView do
Authorization Error
-
+
<%= for {k,v} <- @error do %>
<%= k %>
<%= v %>
@@ -43,10 +48,6 @@ defmodule LightningWeb.ErrorView do
"""
end
- def render("403.json", %{error: error}) do
- %{"error" => error}
- end
-
defp logo_bar(assigns) do
~H"""
@@ -70,10 +71,14 @@ defmodule LightningWeb.ErrorView do
# By default, Phoenix returns the status message from
# the template name. For example, "404.html" becomes
# "Not Found".
- def template_not_found(template, _assigns) do
+ def template_not_found(template, assigns) do
if String.match?(template, ~r/.json$/) do
%{
- "error" => Phoenix.Controller.status_message_from_template(template)
+ "error" =>
+ case assigns do
+ %{error: error} -> error
+ _ -> Phoenix.Controller.status_message_from_template(template)
+ end
}
else
Phoenix.Controller.status_message_from_template(template)
diff --git a/lib/mix/tasks/gen_worker_keys.ex b/lib/mix/tasks/gen_worker_keys.ex
index 1c2a1edec6..59362ed180 100644
--- a/lib/mix/tasks/gen_worker_keys.ex
+++ b/lib/mix/tasks/gen_worker_keys.ex
@@ -6,6 +6,8 @@ defmodule Mix.Tasks.Lightning.GenWorkerKeys do
use Mix.Task
+ alias Lightning.Utils
+
@footer """
To use these keys, use the above output to set the environment variables.
@@ -25,70 +27,21 @@ defmodule Mix.Tasks.Lightning.GenWorkerKeys do
# looks like we may need "try" with this "with"
# https://hexdocs.pm/credo/Credo.Check.Readability.PreferImplicitTry.html
# credo:disable-for-next-line
- try do
- with {:ok, private_key} <- create_private_key(),
- {:ok, public_key} <- abstract_public_key(private_key) do
- IO.puts("""
- WORKER_RUNS_PRIVATE_KEY="#{private_key |> Base.encode64(padding: false)}"
-
- WORKER_SECRET="#{generate_hs256_key()}"
+ {private_key, public_key} = Utils.Crypto.generate_rsa_key_pair()
- WORKER_LIGHTNING_PUBLIC_KEY="#{public_key |> Base.encode64(padding: false)}"
+ IO.puts("""
+ WORKER_RUNS_PRIVATE_KEY="#{private_key |> Base.encode64(padding: false)}"
+ WORKER_SECRET="#{Utils.Crypto.generate_hs256_key()}"
- #{@footer}
- """)
- end
- rescue
- e ->
- case e do
- %{original: :enoent} ->
- IO.puts("openssl not found in PATH")
+ WORKER_LIGHTNING_PUBLIC_KEY="#{public_key |> Base.encode64(padding: false)}"
- e ->
- IO.puts("Error: #{inspect(e)}")
- end
-
- exit({:shutdown, 1})
- end
- end
-
- defp call_openssl(args) do
- System.cmd("openssl", args, stderr_to_stdout: true)
- |> case do
- {_, 0} ->
- :ok
-
- {stdout, status} ->
- {:error, status, stdout}
- end
- end
-
- defp create_private_key do
- filename = Path.join(System.tmp_dir!(), "jwtRSA256-private.pem")
-
- with :ok <- call_openssl(~w[genrsa -out #{filename} 2048]),
- {:ok, contents} <- File.read(filename),
- :ok <- File.rm(filename) do
- {:ok, contents}
- end
- end
-
- defp abstract_public_key(private_key) do
- filename = Path.join(System.tmp_dir!(), "jwtRSA256.pem")
-
- with :ok <- File.write(filename, private_key),
- :ok <-
- call_openssl(
- ~w[rsa -in #{filename} -pubout -outform PEM -out #{filename}]
- ),
- {:ok, contents} <- File.read(filename),
- :ok <- File.rm(filename) do
- {:ok, contents}
- end
- end
- defp generate_hs256_key do
- 32 |> :crypto.strong_rand_bytes() |> Base.encode64()
+ #{@footer}
+ """)
+ rescue
+ e ->
+ IO.puts("Error: #{inspect(e)}")
+ exit({:shutdown, 1})
end
end
diff --git a/mix.exs b/mix.exs
index c5a2129bac..2bfaf785f9 100644
--- a/mix.exs
+++ b/mix.exs
@@ -150,7 +150,8 @@ defmodule Lightning.MixProject do
# Github API Secret Encoding
{:enacl, github: "aeternity/enacl", branch: "master"},
{:earmark, "~> 1.4"},
- {:eventually, "~> 1.1", only: [:test]}
+ {:eventually, "~> 1.1", only: [:test]},
+ {:benchee, "~> 1.3.1", only: :dev}
]
end
diff --git a/mix.lock b/mix.lock
index e4b2dd4d72..15d31ce9cf 100644
--- a/mix.lock
+++ b/mix.lock
@@ -1,6 +1,7 @@
%{
"acceptor_pool": {:hex, :acceptor_pool, "1.0.0", "43c20d2acae35f0c2bcd64f9d2bde267e459f0f3fd23dab26485bf518c281b21", [:rebar3], [], "hexpm", "0cbcd83fdc8b9ad2eee2067ef8b91a14858a5883cb7cd800e6fcd5803e158788"},
"bcrypt_elixir": {:hex, :bcrypt_elixir, "2.3.1", "5114d780459a04f2b4aeef52307de23de961b69e13a5cd98a911e39fda13f420", [:make, :mix], [{:comeonin, "~> 5.3", [hex: :comeonin, repo: "hexpm", optional: false]}, {:elixir_make, "~> 0.6", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "42182d5f46764def15bf9af83739e3bf4ad22661b1c34fc3e88558efced07279"},
+ "benchee": {:hex, :benchee, "1.3.1", "c786e6a76321121a44229dde3988fc772bca73ea75170a73fd5f4ddf1af95ccf", [:mix], [{:deep_merge, "~> 1.0", [hex: :deep_merge, repo: "hexpm", optional: false]}, {:statistex, "~> 1.0", [hex: :statistex, repo: "hexpm", optional: false]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "76224c58ea1d0391c8309a8ecbfe27d71062878f59bd41a390266bf4ac1cc56d"},
"bodyguard": {:hex, :bodyguard, "2.4.2", "e91ecd43fae949f5dca7d5054bedf612b5afd6fb4a84376c9909d14e69ab15a7", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "4683ae363d98dee29aac30bda14af51287898f49880fb25ea961227d2dbf6f27"},
"briefly": {:hex, :briefly, "0.5.1", "ee10d48da7f79ed2aebdc3e536d5f9a0c3e36ff76c0ad0d4254653a152b13a8a", [:mix], [], "hexpm", "bd684aa92ad8b7b4e0d92c31200993c4bc1469fc68cd6d5f15144041bd15cb57"},
"broadway": {:hex, :broadway, "1.1.0", "8ed3aea01fd6f5640b3e1515b90eca51c4fc1fac15fb954cdcf75dc054ae719c", [:mix], [{:gen_stage, "~> 1.0", [hex: :gen_stage, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.3.7 or ~> 0.4 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "25e315ef1afe823129485d981dcc6d9b221cea30e625fd5439e9b05f44fb60e4"},
@@ -27,6 +28,7 @@
"ctx": {:hex, :ctx, "0.6.0", "8ff88b70e6400c4df90142e7f130625b82086077a45364a78d208ed3ed53c7fe", [:rebar3], [], "hexpm", "a14ed2d1b67723dbebbe423b28d7615eb0bdcba6ff28f2d1f1b0a7e1d4aa5fc2"},
"db_connection": {:hex, :db_connection, "2.7.0", "b99faa9291bb09892c7da373bb82cba59aefa9b36300f6145c5f201c7adf48ec", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "dcf08f31b2701f857dfc787fbad78223d61a32204f217f15e881dd93e4bdd3ff"},
"decimal": {:hex, :decimal, "2.1.1", "5611dca5d4b2c3dd497dec8f68751f1f1a54755e8ed2a966c2633cf885973ad6", [:mix], [], "hexpm", "53cfe5f497ed0e7771ae1a475575603d77425099ba5faef9394932b35020ffcc"},
+ "deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm", "ce708e5f094b9cd4e8f2be4f00d2f4250c4095be93f8cd6d018c753894885430"},
"dialyxir": {:hex, :dialyxir, "1.4.4", "fb3ce8741edeaea59c9ae84d5cec75da00fa89fe401c72d6e047d11a61f65f70", [:mix], [{:erlex, ">= 0.2.7", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "cd6111e8017ccd563e65621a4d9a4a1c5cd333df30cebc7face8029cacb4eff6"},
"dotenvy": {:hex, :dotenvy, "0.8.0", "777486ad485668317c56afc53a7cbcd74f43e4e34588ba8e95a73e15a360050e", [:mix], [], "hexpm", "1f535066282388cbd109743d337ac46ff0708195780d4b5778bb83491ab1b654"},
"earmark": {:hex, :earmark, "1.4.46", "8c7287bd3137e99d26ae4643e5b7ef2129a260e3dcf41f251750cb4563c8fb81", [:mix], [], "hexpm", "798d86db3d79964e759ddc0c077d5eb254968ed426399fbf5a62de2b5ff8910a"},
@@ -143,6 +145,7 @@
"snappyer": {:hex, :snappyer, "1.2.9", "9cc58470798648ce34c662ca0aa6daae31367667714c9a543384430a3586e5d3", [:rebar3], [], "hexpm", "18d00ca218ae613416e6eecafe1078db86342a66f86277bd45c95f05bf1c8b29"},
"sobelow": {:hex, :sobelow, "0.13.0", "218afe9075904793f5c64b8837cc356e493d88fddde126a463839351870b8d1e", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "cd6e9026b85fc35d7529da14f95e85a078d9dd1907a9097b3ba6ac7ebbe34a0d"},
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.7", "354c321cf377240c7b8716899e182ce4890c5938111a1296add3ec74cf1715df", [:make, :mix, :rebar3], [], "hexpm", "fe4c190e8f37401d30167c8c405eda19469f34577987c76dde613e838bbc67f8"},
+ "statistex": {:hex, :statistex, "1.0.0", "f3dc93f3c0c6c92e5f291704cf62b99b553253d7969e9a5fa713e5481cd858a5", [:mix], [], "hexpm", "ff9d8bee7035028ab4742ff52fc80a2aa35cece833cf5319009b52f1b5a86c27"},
"sweet_xml": {:hex, :sweet_xml, "0.7.4", "a8b7e1ce7ecd775c7e8a65d501bc2cd933bff3a9c41ab763f5105688ef485d08", [:mix], [], "hexpm", "e7c4b0bdbf460c928234951def54fe87edf1a170f6896675443279e2dbeba167"},
"swoosh": {:hex, :swoosh, "1.12.0", "ecc85ee12947932986243299b8d28e6cdfc192c8d9e24c4c64f6738efdf344cb", [:mix], [{:cowboy, "~> 1.1 or ~> 2.4", [hex: :cowboy, repo: "hexpm", optional: true]}, {:ex_aws, "~> 2.1", [hex: :ex_aws, repo: "hexpm", optional: true]}, {:finch, "~> 0.6", [hex: :finch, repo: "hexpm", optional: true]}, {:gen_smtp, "~> 0.13 or ~> 1.0", [hex: :gen_smtp, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mail, "~> 0.2", [hex: :mail, repo: "hexpm", optional: true]}, {:mime, "~> 1.1 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: true]}, {:plug_cowboy, ">= 1.0.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.2 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "87db7ab0f35e358ba5eac3afc7422ed0c8c168a2d219d2a83ad8cb7a424f6cc9"},
"table_rex": {:hex, :table_rex, "3.1.1", "0c67164d1714b5e806d5067c1e96ff098ba7ae79413cc075973e17c38a587caa", [:mix], [], "hexpm", "678a23aba4d670419c23c17790f9dcd635a4a89022040df7d5d772cb21012490"},
diff --git a/priv/bench/collections.exs b/priv/bench/collections.exs
new file mode 100644
index 0000000000..3a8853f581
--- /dev/null
+++ b/priv/bench/collections.exs
@@ -0,0 +1,169 @@
+alias Lightning.Repo
+alias Lightning.Collections
+alias Lightning.Projects
+
+keys_count = 5_000
+
+Repo.delete_all(Collections.Collection)
+
+project =
+ with nil <- Repo.get_by(Projects.Project, name: "benchee") do
+ user = Repo.get_by(Lightning.Accounts.User, email: "demo@openfn.org")
+ {:ok, project} = Projects.create_project(%{name: "benchee", project_users: [%{user_id: user.id, role: :owner}]})
+ project
+ end
+
+{:ok, collection} =
+ with {:error, :not_found} <- Collections.get_collection("benchee") do
+ Collections.create_collection(project.id, "benchee")
+ end
+
+IO.puts("\n### Setup:")
+IO.puts("Generating items for benchee collection...")
+
+record = fn prefix, i ->
+ i_str = String.pad_leading(to_string(i), 5, "0")
+ {"#{prefix}:foo#{i_str}:bar#{i_str}", Jason.encode!(%{fieldA: "value#{1_000_000 + i}"})}
+end
+
+sampleA = Enum.map(1..keys_count, fn i -> record.("keyA", i) end)
+sampleB = Enum.map(1..keys_count, fn i -> record.("keyB", i) end)
+{sampleC1, sampleC2} = Enum.map(1..keys_count * 2, fn i -> record.("keyC", i) end) |> Enum.split(keys_count)
+
+:timer.tc(fn ->
+ [sampleA, sampleB, sampleC1]
+ |> Enum.map(fn sample ->
+ Task.async(fn ->
+ Enum.with_index(sample, fn {key, value}, idx ->
+ if rem(idx, 50) == 0, do: IO.puts("Inserting " <> key)
+ :ok = Collections.put(collection, key, value)
+ end)
+ :ok
+ end)
+ end)
+ |> Task.await_many(:infinity)
+end)
+|> tap(fn {duration, _res} ->
+ IO.puts("Inserted 3 x #{keys_count} shuffled items (w/ unsorted keys).")
+ IO.puts("elapsed time: #{div(duration, 1_000)}ms\n")
+end)
+
+IO.puts("Inserting #{length(sampleC2)} items with put_all...")
+:timer.tc(fn ->
+ :ok = Collections.put_all(collection, sampleC2)
+end)
+|> tap(fn {duration, _res} ->
+ IO.puts("elapsed time: #{div(duration, 1_000)}ms\n")
+end)
+
+sampleD = Enum.map(1..keys_count, fn i -> record.("keyD", i) end)
+
+IO.puts("Inserting sampleD (w/ sorted keys)...")
+:timer.tc(fn ->
+ sampleD
+ |> Enum.chunk_every(1000)
+ |> Enum.map(fn sample ->
+ Task.async(fn ->
+ Enum.each(sample, fn {k, v} -> Collections.put(collection, k, v) end)
+ end)
+ end)
+ |> Task.await_many(:infinity)
+end)
+|> tap(fn {duration, _res} ->
+ IO.puts("elapsed time: #{div(duration, 1000)}ms\n")
+end)
+
+IO.puts("Upserting sampleD...")
+:timer.tc(fn ->
+ sampleD
+ |> Enum.chunk_every(1000)
+ |> Enum.map(fn sample ->
+ Task.async(fn ->
+ Enum.each(sample, fn {k, v} -> Collections.put(collection, k, v) end)
+ end)
+ end)
+ |> Task.await_many(:infinity)
+end)
+|> tap(fn {duration, _res} ->
+ IO.puts("elapsed time: #{div(duration, 1000)}ms\n")
+end)
+
+stream_all =
+ fn ->
+ Stream.unfold(nil, fn cursor ->
+ case Repo.transaction(fn -> Collections.stream_all(collection, cursor) |> Enum.to_list() end) do
+ {:ok, []} -> nil
+ {:ok, list} -> {list, List.last(list).updated_at}
+ end
+ end)
+ |> Enum.to_list()
+ |> List.flatten()
+ end
+
+stream_match_all =
+ fn ->
+ Stream.unfold(nil, fn cursor ->
+ case Repo.transaction(fn -> Collections.stream_match(collection, "key*", cursor) |> Enum.to_list() end) do
+ {:ok, []} -> nil
+ {:ok, list} -> {list, List.last(list).updated_at}
+ end
+ end)
+ |> Enum.to_list()
+ |> List.flatten()
+ end
+
+stream_match_prefix =
+ fn ->
+ Stream.unfold(nil, fn cursor ->
+ case Repo.transaction(fn -> Collections.stream_match(collection, "keyA*", cursor) |> Enum.to_list() end) do
+ {:ok, []} -> nil
+ {:ok, list} -> {list, List.last(list).updated_at}
+ end
+ end)
+ |> Enum.to_list()
+ |> List.flatten()
+ end
+
+stream_match_trigram =
+ fn ->
+ Stream.unfold(nil, fn cursor ->
+ case Repo.transaction(fn -> Collections.stream_match(collection, "keyB*bar*", cursor) |> Enum.to_list() end) do
+ {:ok, []} -> nil
+ {:ok, list} -> {list, List.last(list).updated_at}
+ end
+ end)
+ |> Enum.to_list()
+ |> List.flatten()
+ end
+
+
+IO.puts("\n### Round record count ({microsecs, count}):")
+:timer.tc(fn -> stream_all.() |> Enum.count() end) |> IO.inspect(label: "stream_all")
+:timer.tc(fn -> stream_match_all.() |> Enum.count() end) |> IO.inspect(label: "stream_match_all")
+:timer.tc(fn -> stream_match_prefix.() |> Enum.count() end) |> IO.inspect(label: "stream_match_prefix")
+:timer.tc(fn -> stream_match_trigram.() |> Enum.count() end) |> IO.inspect(label: "stream_match_trigram")
+IO.puts("\n")
+
+Benchee.run(
+ %{
+ "stream_all" => stream_all,
+ "stream_match_all" => stream_match_all,
+ "stream_match_prefix" => stream_match_prefix,
+ "stream_match_trigram" => stream_match_trigram
+ },
+ warmup: 2,
+ time: 5,
+ parallel: 1
+)
+
+Benchee.run(
+ %{
+ "stream_all" => stream_all,
+ "stream_match_all" => stream_match_all,
+ "stream_match_prefix" => stream_match_prefix,
+ "stream_match_trigram" => stream_match_trigram
+ },
+ warmup: 2,
+ time: 5,
+ parallel: 4
+)
diff --git a/priv/repo/migrations/20241022021209_create_collections.exs b/priv/repo/migrations/20241022021209_create_collections.exs
new file mode 100644
index 0000000000..5e87809bd0
--- /dev/null
+++ b/priv/repo/migrations/20241022021209_create_collections.exs
@@ -0,0 +1,36 @@
+defmodule Lightning.Repo.Migrations.CreateCollections do
+ use Ecto.Migration
+
+ def change do
+ create table(:collections, primary_key: false) do
+ add :id, :binary_id, primary_key: true
+ add :name, :string
+
+ add :project_id,
+ references(:projects, on_delete: :delete_all, type: :binary_id, null: false)
+
+ timestamps()
+ end
+
+ create unique_index(:collections, [:name])
+
+ create table(:collections_items, primary_key: false) do
+ add :collection_id,
+ references(:collections, type: :binary_id, on_delete: :delete_all, null: false)
+
+ add :key, :string
+ add :value, :string
+
+ timestamps(type: :naive_datetime_usec)
+ end
+
+ execute "CREATE EXTENSION IF NOT EXISTS pg_trgm",
+ "DROP EXTENSION IF EXISTS pg_trgm"
+
+ create index(:collections_items, [:updated_at])
+ create unique_index(:collections_items, [:collection_id, :key])
+
+ execute "CREATE INDEX collections_items_key_trgm_idx ON collections_items USING GIN (key gin_trgm_ops)",
+ "DROP INDEX IF EXISTS collections_items_key_trgm_idx"
+ end
+end
diff --git a/test/integration/web_and_worker_test.exs b/test/integration/web_and_worker_test.exs
index 0cfe9d9877..64daa3dae4 100644
--- a/test/integration/web_and_worker_test.exs
+++ b/test/integration/web_and_worker_test.exs
@@ -248,7 +248,7 @@ defmodule Lightning.WebAndWorkerTest do
end)
assert version_logs =~ "▸ node.js 18.17"
- assert version_logs =~ "▸ worker 1.7"
+ assert version_logs =~ "▸ worker 1.8"
assert version_logs =~ "▸ @openfn/language-http 3.1.12"
expected_lines =
diff --git a/test/lightning/accounts_test.exs b/test/lightning/accounts_test.exs
index 4de50f2e19..636f2e1be9 100644
--- a/test/lightning/accounts_test.exs
+++ b/test/lightning/accounts_test.exs
@@ -1044,7 +1044,10 @@ defmodule Lightning.AccountsTest do
assert user_token = Repo.get_by(UserToken, token: token)
assert user_token.context == "api"
- Lightning.Accounts.UserToken.verify_and_validate!(token)
+ Lightning.Tokens.PersonalAccessToken.verify_and_validate!(
+ token,
+ Lightning.Config.token_signer()
+ )
# Creating the same token for another user should fail
assert_raise Ecto.ConstraintError, fn ->
diff --git a/test/lightning/collection_test.exs b/test/lightning/collection_test.exs
new file mode 100644
index 0000000000..cdaf76a815
--- /dev/null
+++ b/test/lightning/collection_test.exs
@@ -0,0 +1,85 @@
+defmodule Lightning.CollectionTest do
+ use Lightning.DataCase, async: true
+
+ import Lightning.Factories
+
+ alias Lightning.Collections.Collection
+
+ setup do
+ project = insert(:project, name: "Test Project")
+ {:ok, project: project}
+ end
+
+ @valid_name "valid-name"
+
+ describe "changeset/2" do
+ test "valid attributes create a valid changeset", %{project: project} do
+ valid_attrs = %{
+ "project_id" => project.id,
+ "name" => @valid_name
+ }
+
+ changeset = Collection.changeset(%Collection{}, valid_attrs)
+ assert changeset.valid?
+ end
+
+ test "missing required fields result in errors" do
+ invalid_attrs = %{"project_id" => nil, "name" => nil}
+
+ changeset = Collection.changeset(%Collection{}, invalid_attrs)
+ refute changeset.valid?
+
+ assert %{
+ project_id: ["can't be blank"],
+ name: ["can't be blank"]
+ } = errors_on(changeset)
+ end
+
+ test "name must be URL-safe (valid format)", %{project: project} do
+ valid_names = [
+ "valid-name",
+ "collection_123",
+ "my.collection",
+ "valid-collection-name"
+ ]
+
+ for name <- valid_names do
+ attrs = %{"project_id" => project.id, "name" => name}
+ changeset = Collection.changeset(%Collection{}, attrs)
+ assert changeset.valid?, "Expected #{name} to be valid"
+ end
+ end
+
+ test "name with invalid characters fails validation", %{project: project} do
+ invalid_names = [
+ "invalid name",
+ "invalid_name!",
+ "invalid/name",
+ "-invalid",
+ "invalid-",
+ "invalid--name"
+ ]
+
+ for name <- invalid_names do
+ attrs = %{"project_id" => project.id, "name" => name}
+ changeset = Collection.changeset(%Collection{}, attrs)
+ refute changeset.valid?, "Expected #{name} to be invalid"
+
+ assert %{name: ["Collection name must be URL safe"]} =
+ errors_on(changeset)
+ end
+ end
+
+ test "name uniqueness constraint adds error", %{project: project} do
+ insert(:collection, project: project, name: "existing-name")
+
+ attrs = %{"project_id" => project.id, "name" => "existing-name"}
+ changeset = Collection.changeset(%Collection{}, attrs)
+
+ assert {:error, changeset} = Lightning.Repo.insert(changeset)
+
+ assert %{name: ["A collection with this name already exists"]} =
+ errors_on(changeset)
+ end
+ end
+end
diff --git a/test/lightning/collections_test.exs b/test/lightning/collections_test.exs
new file mode 100644
index 0000000000..8aa2514f7f
--- /dev/null
+++ b/test/lightning/collections_test.exs
@@ -0,0 +1,562 @@
+defmodule Lightning.CollectionsTest do
+ use Lightning.DataCase
+
+ alias Lightning.Collections
+ alias Lightning.Collections.Collection
+ alias Lightning.Collections.Item
+
+ describe "get_collection/1" do
+ test "get a collection" do
+ %{id: collection_id, name: collection_name} = insert(:collection)
+
+ assert {:ok, %Collection{id: ^collection_id}} =
+ Collections.get_collection(collection_name)
+ end
+
+ test "returns an error when the collection does not exist" do
+ assert {:error, :not_found} =
+ Collections.get_collection("nonexistent")
+ end
+ end
+
+ describe "create_collection/2" do
+ test "creates a new collection" do
+ %{id: project_id} = insert(:project)
+ name = "col1_project1"
+
+ assert {:ok, %Collection{project_id: ^project_id, name: ^name}} =
+ Collections.create_collection(project_id, name)
+ end
+
+ test "returns an error when collection name is taken" do
+ %{id: project_id1} = insert(:project)
+ %{id: project_id2} = insert(:project)
+ name = "col1_project1"
+
+ assert {:ok, %Collection{project_id: ^project_id1, name: ^name}} =
+ Collections.create_collection(project_id1, name)
+
+ assert {:error,
+ %{
+ errors: [
+ name:
+ {"A collection with this name already exists",
+ [
+ constraint: :unique,
+ constraint_name: "collections_name_index"
+ ]}
+ ]
+ }} = Collections.create_collection(project_id2, name)
+ end
+ end
+
+ describe "delete_collection/1" do
+ test "deletes a collection" do
+ %{id: collection_id} = insert(:collection)
+
+ assert {:ok, %Collection{id: ^collection_id}} =
+ Collections.delete_collection(collection_id)
+ end
+
+ test "returns an error when collection does not exist" do
+ assert {:error, :not_found} =
+ Collections.delete_collection(Ecto.UUID.generate())
+ end
+ end
+
+ describe "get/2" do
+ test "returns an entry for the given collection" do
+ %{key: key, value: value, collection: collection} =
+ insert(:collection_item) |> Repo.preload(:collection)
+
+ assert %Item{key: ^key, value: ^value} = Collections.get(collection, key)
+ end
+
+ test "returns nil if the item key does not exist" do
+ collection = insert(:collection)
+
+ refute Collections.get(collection, "nonexistent")
+ end
+
+ test "returns nil if the collection does not exist" do
+ insert(:collection_item, key: "existing_key")
+
+ refute Collections.get(%{id: Ecto.UUID.generate()}, "existing_key")
+ end
+ end
+
+ describe "stream_all/3" do
+ test "returns all items for the given collection sorted by inserted_at" do
+ collection = insert(:collection)
+
+ items =
+ 1..11
+ |> Enum.map(fn _i ->
+ insert(:collection_item,
+ key: "rkey#{:rand.uniform()}",
+ collection: collection
+ )
+ end)
+
+ Repo.transaction(fn ->
+ assert stream = Collections.stream_all(collection, limit: 50)
+
+ assert stream_items =
+ stream
+ |> Stream.take(15)
+ |> Enum.to_list()
+ |> Repo.preload(collection: :project)
+
+ assert List.last(stream_items) ==
+ Enum.sort_by(items, & &1.inserted_at) |> List.last()
+
+ assert MapSet.new(stream_items) == MapSet.new(items)
+ end)
+ end
+
+ test "returns the items after a cursor up to a limited amount" do
+ collection = insert(:collection)
+
+ items =
+ Enum.map(1..30, fn _i ->
+ insert(:collection_item,
+ key: "rkey#{:rand.uniform()}",
+ collection: collection
+ )
+ end)
+
+ %{inserted_at: cursor} = Enum.at(items, 4)
+
+ Repo.transaction(fn ->
+ assert stream =
+ Collections.stream_all(collection, cursor: cursor, limit: 50)
+
+ assert stream |> Enum.to_list() |> Enum.count() == 30 - (4 + 1)
+ end)
+
+ Repo.transaction(fn ->
+ assert stream =
+ Collections.stream_all(collection, cursor: cursor, limit: 10)
+
+ assert Enum.count(stream) == 10
+ end)
+ end
+
+ test "returns empty list when collection is empty" do
+ collection = insert(:collection)
+
+ Repo.transaction(fn ->
+ assert stream = Collections.stream_all(collection, limit: 50)
+ assert Enum.count(stream) == 0
+ end)
+ end
+
+ test "returns empty list when the collection doesn't exist" do
+ insert(:collection_item, key: "existing_key")
+
+ Repo.transaction(fn ->
+ assert stream =
+ Collections.stream_all(%{id: Ecto.UUID.generate()}, limit: 50)
+
+ assert Enum.count(stream) == 0
+ end)
+ end
+
+ test "fails when outside of an explicit transaction" do
+ collection = insert(:collection)
+ _items = insert_list(5, :collection_item, collection: collection)
+
+ assert stream = Collections.stream_all(collection, limit: 50)
+
+ assert_raise RuntimeError,
+ ~r/cannot reduce stream outside of transaction/,
+ fn ->
+ Enum.take(stream, 5) |> Enum.each(&inspect/1)
+ end
+ end
+ end
+
+ describe "stream_match/3" do
+ test "returns item with exact match" do
+ collection = insert(:collection)
+ _itemA = insert(:collection_item, key: "keyA", collection: collection)
+ itemB = insert(:collection_item, key: "keyB", collection: collection)
+
+ Repo.transaction(fn ->
+ assert stream = Collections.stream_match(collection, "keyB*", limit: 50)
+
+ assert [itemB] ==
+ stream
+ |> Enum.to_list()
+ |> Repo.preload(collection: :project)
+ end)
+ end
+
+ test "returns matching items for the given collection sorted by inserted_at" do
+ collection = insert(:collection)
+
+ items =
+ 1..11
+ |> Enum.map(fn _i ->
+ insert(:collection_item,
+ key: "rkeyA#{:rand.uniform()}",
+ collection: collection
+ )
+ end)
+
+ insert(:collection_item, key: "rkeyB", collection: collection)
+
+ Repo.transaction(fn ->
+ assert stream = Collections.stream_match(collection, "rkeyA*", limit: 50)
+
+ assert stream_items =
+ Stream.take(stream, 12)
+ |> Enum.to_list()
+ |> Repo.preload(collection: :project)
+
+ assert List.last(stream_items) ==
+ Enum.sort_by(items, & &1.inserted_at) |> List.last()
+
+ assert MapSet.new(stream_items) == MapSet.new(items)
+ end)
+ end
+
+ test "returns matching items after a cursor up to a limited amount" do
+ collection = insert(:collection)
+
+ items =
+ Enum.map(1..30, fn _i ->
+ insert(:collection_item,
+ key: "rkeyA#{:rand.uniform()}",
+ collection: collection
+ )
+ end)
+
+ %{inserted_at: cursor} = Enum.at(items, 9)
+
+ insert(:collection_item, key: "rkeyB", collection: collection)
+
+ Repo.transaction(fn ->
+ assert stream =
+ Collections.stream_match(collection, "rkeyA*",
+ cursor: cursor,
+ limit: 50
+ )
+
+ assert Enum.count(stream) == 30 - (9 + 1)
+ end)
+
+ Repo.transaction(fn ->
+ assert stream =
+ Collections.stream_match(collection, "rkeyA*",
+ cursor: cursor,
+ limit: 16
+ )
+
+ assert Enum.count(stream) == 16
+ end)
+ end
+
+ test "returns empty list when collection is empty" do
+ collection = insert(:collection)
+
+ Repo.transaction(fn ->
+ assert stream =
+ Collections.stream_match(collection, "any-key", limit: 50)
+
+ assert Enum.count(stream) == 0
+ end)
+ end
+
+ test "returns empty list when the collection doesn't exist" do
+ insert(:collection_item, key: "existing_key")
+
+ Repo.transaction(fn ->
+ assert stream =
+ Collections.stream_match(
+ %{id: Ecto.UUID.generate()},
+ "existing_key",
+ limit: 50
+ )
+
+ assert Enum.count(stream) == 0
+ end)
+ end
+
+ test "returns item escaping the %" do
+ collection = insert(:collection)
+ item = insert(:collection_item, key: "keyA%", collection: collection)
+
+ Repo.transaction(fn ->
+ assert stream = Collections.stream_match(collection, "keyA%*", limit: 50)
+
+ assert [item] ==
+ stream
+ |> Enum.to_list()
+ |> Repo.preload(collection: :project)
+ end)
+
+ insert(:collection_item, key: "keyBC", collection: collection)
+
+ Repo.transaction(fn ->
+ assert stream = Collections.stream_match(collection, "keyB%", limit: 50)
+
+ assert Enum.count(stream) == 0
+ end)
+ end
+
+ test "returns item escaping the \\" do
+ collection = insert(:collection)
+ item = insert(:collection_item, key: "keyA\\", collection: collection)
+
+ Repo.transaction(fn ->
+ assert stream =
+ Collections.stream_match(collection, "keyA\\*", limit: 50)
+
+ assert [item] ==
+ stream
+ |> Enum.to_list()
+ |> Repo.preload(collection: :project)
+ end)
+ end
+
+ test "fails when outside of an explicit transaction" do
+ collection = insert(:collection)
+ _items = insert_list(5, :collection_item, collection: collection)
+
+ assert stream = Collections.stream_match(collection, "key*", limit: 50)
+
+ assert_raise RuntimeError,
+ ~r/cannot reduce stream outside of transaction/,
+ fn ->
+ Enum.take(stream, 5) |> Enum.each(&inspect/1)
+ end
+ end
+ end
+
+ describe "put/3" do
+ test "creates a new entry in the collection for the given collection" do
+ collection = insert(:collection)
+
+ assert :ok = Collections.put(collection, "some-key", "some-value")
+
+ assert %{key: "some-key", value: "some-value"} =
+ Repo.get_by!(Item, key: "some-key")
+ end
+
+ test "updates the value of an item when key exists" do
+ collection = insert(:collection)
+
+ assert :ok = Collections.put(collection, "some-key", "some-value1")
+
+ assert %{key: "some-key", value: "some-value1"} =
+ Repo.get_by!(Item, key: "some-key")
+
+ assert :ok = Collections.put(collection, "some-key", "some-value2")
+
+ assert %{key: "some-key", value: "some-value2"} =
+ Repo.get_by!(Item, key: "some-key")
+ end
+
+ test "returns an :error if the collection does not exist" do
+ assert {:error,
+ %{
+ errors: [
+ collection_id:
+ {"does not exist",
+ [
+ constraint: :foreign,
+ constraint_name: "collections_items_collection_id_fkey"
+ ]}
+ ]
+ }} = Collections.put(%{id: Ecto.UUID.generate()}, "key", "value")
+ end
+ end
+
+ describe "put_all/2" do
+ test "inserts multiple entries at once in a given collection" do
+ collection = insert(:collection)
+
+ items =
+ Enum.map(1..5, fn i -> %{"key" => "key#{i}", "value" => "value#{i}"} end)
+
+ assert {:ok, 5} = Collections.put_all(collection, items)
+
+ assert Item |> Repo.all() |> Enum.map(&Map.take(&1, [:key, :value])) ==
+ Enum.map(items, &%{key: &1["key"], value: &1["value"]})
+ end
+
+ test "replaces conflicting values and updates timestamp" do
+ collection = insert(:collection)
+
+ items =
+ Enum.map(1..5, fn i -> %{"key" => "key#{i}", "value" => "value#{i}"} end)
+
+ assert {:ok, 5} = Collections.put_all(collection, items)
+
+ assert %{updated_at: updated_at1} = Repo.get_by(Item, key: "key1")
+ assert %{updated_at: updated_at2} = Repo.get_by(Item, key: "key2")
+ assert %{updated_at: updated_at5} = Repo.get_by(Item, key: "key5")
+
+ update_items =
+ Enum.map(1..2, fn i ->
+ %{"key" => "key#{i}", "value" => "value#{10 + i}"}
+ end)
+
+ assert {:ok, 2} = Collections.put_all(collection, update_items)
+
+ assert %{value: "value11", updated_at: updated_at} =
+ Repo.get_by(Item, key: "key1")
+
+ assert updated_at > updated_at1
+
+ assert %{value: "value12", updated_at: updated_at} =
+ Repo.get_by(Item, key: "key2")
+
+ assert updated_at > updated_at2
+
+ assert %{value: "value5", updated_at: ^updated_at5} =
+ Repo.get_by(Item, key: "key5")
+ end
+ end
+
+ describe "delete/2" do
+ test "deletes an entry for the given collection" do
+ collection = insert(:collection)
+
+ %{key: key} =
+ insert(:collection_item, collection: collection)
+
+ assert :ok = Collections.delete(collection, key)
+
+ refute Collections.get(collection, key)
+ end
+
+ test "returns an :error if the collection does not exist" do
+ assert {:error, :not_found} =
+ Collections.delete(%{id: Ecto.UUID.generate()}, "key")
+ end
+
+ test "returns an :error if item does not exist" do
+ collection = insert(:collection)
+
+ assert {:error, :not_found} =
+ Collections.delete(collection, "nonexistent")
+ end
+ end
+
+ describe "delete_all/2" do
+ test "deletes all items of the given collection" do
+ collection = insert(:collection)
+
+ items = insert_list(3, :collection_item, collection: collection)
+
+ assert {:ok, 3} = Collections.delete_all(collection)
+
+ refute Enum.any?(items, &Collections.get(collection, &1.key))
+ end
+
+ test "deletes matching items of the given collection" do
+ collection = insert(:collection)
+
+ item1 =
+ insert(:collection_item, collection: collection, key: "foo:123:bar1")
+
+ item2 =
+ insert(:collection_item, collection: collection, key: "foo:234:bar2")
+
+ item3 =
+ insert(:collection_item, collection: collection, key: "foo:345:bar3")
+
+ item4 =
+ insert(:collection_item, collection: collection, key: "foo:456:zanzibar")
+
+ assert {:ok, 3} = Collections.delete_all(collection, "foo:*:bar*")
+
+ refute Collections.get(collection, item1.key)
+ refute Collections.get(collection, item2.key)
+ refute Collections.get(collection, item3.key)
+ assert Collections.get(collection, item4.key)
+ end
+
+ test "returns an :error if the collection does not exist" do
+ assert {:error, :not_found} =
+ Collections.delete(%{id: Ecto.UUID.generate()}, "key")
+ end
+
+ test "returns an :error if item does not exist" do
+ collection = insert(:collection)
+
+ assert {:error, :not_found} =
+ Collections.delete(collection, "nonexistent")
+ end
+ end
+
+ describe "list_collections/1" do
+ test "returns a list of collections with default ordering and preloading" do
+ collection1 = insert(:collection, name: "B Collection")
+ collection2 = insert(:collection, name: "A Collection")
+
+ result = Collections.list_collections()
+
+ assert Enum.map(result, & &1.id) == [collection2.id, collection1.id]
+ end
+
+ test "returns collections ordered by specified field" do
+ collection1 = insert(:collection, inserted_at: ~N[2024-01-01 00:00:00])
+ collection2 = insert(:collection, inserted_at: ~N[2024-02-01 00:00:00])
+
+ result = Collections.list_collections(order_by: [asc: :inserted_at])
+
+ assert Enum.map(result, & &1.id) == [collection1.id, collection2.id]
+ end
+
+ test "preloads specified associations" do
+ project = insert(:project)
+ insert(:collection, project: project)
+
+ result = Collections.list_collections(preload: [:project])
+
+ assert Enum.map(result, & &1.project.id) == [project.id]
+ end
+ end
+
+ describe "create_collection/1" do
+ test "creates a new collection with valid attributes" do
+ %{id: project_id} = insert(:project)
+ attrs = %{name: "new-collection", project_id: project_id}
+
+ assert {:ok, %Collection{name: "new-collection"}} =
+ Collections.create_collection(attrs)
+ end
+
+ test "returns an error if invalid attributes are provided" do
+ attrs = %{name: nil}
+
+ assert {:error, changeset} = Collections.create_collection(attrs)
+
+ assert %{name: ["can't be blank"], project_id: ["can't be blank"]} ==
+ errors_on(changeset)
+ end
+ end
+
+ describe "update_collection/2" do
+ test "updates an existing collection with valid attributes" do
+ collection = insert(:collection, name: "Old Name")
+ attrs = %{name: "updated-name"}
+
+ assert {:ok, %Collection{name: "updated-name"}} =
+ Collections.update_collection(collection, attrs)
+ end
+
+ test "returns an error if invalid attributes are provided" do
+ collection = insert(:collection)
+ attrs = %{name: nil}
+
+ assert {:error, changeset} =
+ Collections.update_collection(collection, attrs)
+
+ assert %{name: ["can't be blank"]} == errors_on(changeset)
+ end
+ end
+end
diff --git a/test/lightning/failure_alert_test.exs b/test/lightning/failure_alert_test.exs
index 3b7d38884a..7b0f4db91e 100644
--- a/test/lightning/failure_alert_test.exs
+++ b/test/lightning/failure_alert_test.exs
@@ -258,7 +258,7 @@ defmodule Lightning.FailureAlertTest do
Lightning.Stub.reset_time()
{:ok, bearer, claims} =
- Workers.Token.generate_and_sign(
+ Workers.WorkerToken.generate_and_sign(
%{},
Lightning.Config.worker_token_signer()
)
@@ -269,6 +269,7 @@ defmodule Lightning.FailureAlertTest do
UsageLimiter.get_run_options(%Context{
project_id: run.work_order.workflow.project_id
})
+ |> Map.new()
{:ok, %{}, socket} =
LightningWeb.WorkerSocket
diff --git a/test/lightning/helpers_test.exs b/test/lightning/helpers_test.exs
index de87090f4a..5ae1f17e48 100644
--- a/test/lightning/helpers_test.exs
+++ b/test/lightning/helpers_test.exs
@@ -3,6 +3,9 @@ defmodule Lightning.HelpersTest do
import Lightning.Helpers, only: [coerce_json_field: 2]
+ alias Ecto.Changeset
+ alias Lightning.Helpers
+
test "coerce_json_field/2 will transform a json string inside a map by it's key" do
input = %{
"body" =>
@@ -54,4 +57,110 @@ defmodule Lightning.HelpersTest do
goals: 123
}
end
+
+ describe "copy_error/4" do
+ test "copies an error from one key to another" do
+ changeset = %Changeset{errors: [name: {"has already been taken", []}]}
+ updated_changeset = Helpers.copy_error(changeset, :name, :raw_name)
+
+ assert updated_changeset.errors[:name] == {"has already been taken", []}
+
+ assert updated_changeset.errors[:raw_name] ==
+ {"has already been taken", []}
+ end
+
+ test "returns the changeset unchanged if original_key does not exist" do
+ changeset = %Changeset{errors: [email: {"is invalid", []}]}
+ updated_changeset = Helpers.copy_error(changeset, :name, :raw_name)
+
+ assert updated_changeset == changeset
+ refute Keyword.has_key?(updated_changeset.errors, :raw_name)
+ end
+
+ test "overwrites the new_key error if it exists and overwrite is true" do
+ changeset = %Changeset{
+ errors: [
+ name: {"has already been taken", []},
+ raw_name: {"is invalid", []}
+ ]
+ }
+
+ updated_changeset = Helpers.copy_error(changeset, :name, :raw_name)
+
+ assert updated_changeset.errors[:raw_name] ==
+ {"has already been taken", []}
+ end
+
+ test "does not overwrite the new_key error if overwrite is false" do
+ changeset = %Changeset{
+ errors: [
+ name: {"has already been taken", []},
+ raw_name: {"is invalid", []}
+ ]
+ }
+
+ updated_changeset =
+ Helpers.copy_error(changeset, :name, :raw_name, overwrite: false)
+
+ assert updated_changeset.errors[:raw_name] == {"is invalid", []}
+ end
+
+ test "returns the changeset unchanged if new_key already exists and overwrite is false" do
+ changeset = %Changeset{
+ errors: [
+ name: {"has already been taken", []},
+ raw_name: {"is invalid", []}
+ ]
+ }
+
+ updated_changeset =
+ Helpers.copy_error(changeset, :name, :raw_name, overwrite: false)
+
+ assert updated_changeset.errors[:raw_name] == {"is invalid", []}
+ end
+ end
+
+ describe "url_safe_name/1" do
+ test "returns an empty string when given nil" do
+ assert Helpers.url_safe_name(nil) == ""
+ end
+
+ test "converts a simple string to lowercase and replaces spaces" do
+ assert Helpers.url_safe_name("My Project") == "my-project"
+ end
+
+ test "removes special characters and replaces them with hyphens" do
+ assert Helpers.url_safe_name("My@#Project!!") == "my-project"
+ end
+
+ test "trims leading and trailing hyphens" do
+ assert Helpers.url_safe_name("--My Project--") == "my-project"
+ end
+
+ test "preserves international characters" do
+ assert Helpers.url_safe_name("Éléphant") == "éléphant"
+ end
+
+ test "handles a string with multiple special characters" do
+ assert Helpers.url_safe_name("Hello, World! 123") == "hello-world-123"
+ end
+
+ test "handles a string with underscores and periods" do
+ assert Helpers.url_safe_name("file_name.version_1.0") ==
+ "file_name.version_1.0"
+ end
+
+ test "replaces multiple spaces or special characters with a single hyphen" do
+ assert Helpers.url_safe_name("My Project") == "my-project"
+ assert Helpers.url_safe_name("My--Project!!") == "my--project"
+ end
+
+ test "handles a string with only special characters and removes them" do
+ assert Helpers.url_safe_name("###!!!") == ""
+ end
+
+ test "keeps numbers intact in the string" do
+ assert Helpers.url_safe_name("Project 2023") == "project-2023"
+ end
+ end
end
diff --git a/test/lightning/tokens_test.exs b/test/lightning/tokens_test.exs
new file mode 100644
index 0000000000..ce09342727
--- /dev/null
+++ b/test/lightning/tokens_test.exs
@@ -0,0 +1,107 @@
+defmodule Lightning.TokensTest do
+ use Lightning.DataCase, async: true
+
+ import Lightning.Factories
+
+ alias Lightning.Tokens
+
+ setup do
+ Mox.stub_with(Lightning.MockConfig, Lightning.Config.API)
+ Mox.stub_with(LightningMock, Lightning.Stub)
+
+ :ok
+ end
+
+ describe "UserToken" do
+ test "verify a valid token" do
+ user = insert(:user)
+
+ Lightning.Stub.freeze_time(~U[2024-01-01 00:00:00Z])
+
+ token = Lightning.Accounts.generate_api_token(user)
+
+ assert {:ok, claims} = Tokens.verify(token)
+
+ expected_sub = "user:#{user.id}"
+
+ assert %{
+ "iss" => "Lightning",
+ "iat" => 1_704_067_200,
+ "sub" => ^expected_sub,
+ "jti" => _
+ } = claims
+ end
+
+ test "verify a forged/invalid token" do
+ user = insert(:user)
+
+ token = Lightning.Accounts.generate_api_token(user)
+
+ replace_signer(:token_signer)
+
+ assert {:error, :signature_error} = Tokens.verify(token)
+ end
+
+ test "retrieving the subject from the token" do
+ Lightning.Stub.freeze_time(DateTime.utc_now())
+ user = insert(:user)
+
+ token = Lightning.Accounts.generate_api_token(user)
+
+ assert {:ok, claims} = Tokens.verify(token)
+
+ assert Tokens.get_subject(claims) == user |> Repo.reload!()
+ end
+ end
+
+ describe "RunToken" do
+ test "verify a valid token" do
+ Lightning.Stub.freeze_time(~U[2024-01-01 00:00:00Z])
+
+ token =
+ Lightning.Workers.generate_run_token(%{id: run_id = Ecto.UUID.generate()})
+
+ assert {:ok, claims} = Tokens.verify(token)
+
+ assert %{
+ "exp" => 1_704_067_270,
+ "id" => run_id,
+ "iss" => "Lightning",
+ "nbf" => 1_704_067_200,
+ "sub" => "run:#{run_id}"
+ } == claims
+ end
+
+ test "verify a forged/invalid token" do
+ token =
+ Lightning.Workers.generate_run_token(%{id: Ecto.UUID.generate()})
+
+ replace_signer(:run_token_signer)
+
+ assert {:error, :signature_error} = Tokens.verify(token)
+ end
+
+ test "verify an expired token" do
+ Lightning.Stub.freeze_time(~U[2024-01-01 00:00:00Z])
+
+ token =
+ Lightning.Workers.generate_run_token(%{id: Ecto.UUID.generate()})
+
+ Lightning.Stub.freeze_time(~U[2024-02-01 00:00:00Z])
+
+ assert {
+ :error,
+ [message: "Invalid token", claim: "exp", claim_val: 1_704_067_270]
+ } = Tokens.verify(token)
+ end
+ end
+
+ # Generate a new RSA cert that is different to the one the token was
+ # signed with.
+ defp replace_signer(key) do
+ Mox.stub(Lightning.MockConfig, key, fn ->
+ {pvt, _pub} = Lightning.Utils.Crypto.generate_rsa_key_pair()
+ Joken.Signer.create("RS256", %{"pem" => pvt})
+ end)
+ end
+end
diff --git a/test/lightning/workers_test.exs b/test/lightning/workers_test.exs
index a19b067712..9ef97d93d0 100644
--- a/test/lightning/workers_test.exs
+++ b/test/lightning/workers_test.exs
@@ -1,24 +1,26 @@
defmodule Lightning.WorkersTest do
use ExUnit.Case, async: true
- alias Lightning.Workers.Token
+ alias Lightning.Workers.RunToken
+ alias Lightning.Workers.WorkerToken
setup do
Mox.stub_with(LightningMock, Lightning.API)
+ Mox.stub_with(Lightning.MockConfig, Lightning.Config.API)
- :ok
+ %{run_token_signer: Lightning.Config.run_token_signer()}
end
- describe "Token" do
+ describe "WorkerToken" do
test "can generate a token" do
{:ok, token, claims} =
- Token.generate_and_sign(%{"id" => id = Ecto.UUID.generate()})
+ WorkerToken.generate_and_sign(%{"id" => id = Ecto.UUID.generate()})
assert %{"id" => ^id, "nbf" => nbf} = claims
- assert nbf <= DateTime.utc_now() |> DateTime.to_unix()
+ assert nbf <= Lightning.current_time() |> DateTime.to_unix()
assert token != ""
- assert {:ok, claims} = Token.verify(token)
+ assert {:ok, claims} = WorkerToken.verify(token)
assert {:error,
[
@@ -26,9 +28,47 @@ defmodule Lightning.WorkersTest do
{:claim, "nbf"},
{:claim_val, _time}
]} =
- Token.validate(claims, %{
+ WorkerToken.validate(claims, %{
current_time: DateTime.utc_now() |> DateTime.add(-5, :second)
})
end
end
+
+ describe "RunToken" do
+ test "can generate a token", %{run_token_signer: run_token_signer} do
+ {:ok, token, claims} =
+ RunToken.generate_and_sign(
+ %{"id" => id = Ecto.UUID.generate()},
+ run_token_signer
+ )
+
+ assert %{"id" => ^id, "nbf" => nbf} = claims
+ assert nbf <= Lightning.current_time() |> DateTime.to_unix()
+ assert token != ""
+
+ assert {:ok, ^claims} =
+ RunToken.verify(token, run_token_signer)
+ end
+
+ test "validating with a run_id" do
+ {:ok, claims} =
+ RunToken.generate_claims(%{"id" => id = Ecto.UUID.generate()})
+
+ assert {:ok, ^claims} =
+ RunToken.validate(claims, %{
+ id: id,
+ current_time: Lightning.current_time()
+ })
+ end
+
+ test "validating without a run_id" do
+ {:ok, claims} =
+ RunToken.generate_claims(%{"id" => _id = Ecto.UUID.generate()})
+
+ assert {:ok, ^claims} =
+ RunToken.validate(claims, %{
+ current_time: Lightning.current_time()
+ })
+ end
+ end
end
diff --git a/test/lightning_web/channels/run_channel_test.exs b/test/lightning_web/channels/run_channel_test.exs
index 74af8e9bb0..6cdfb386ce 100644
--- a/test/lightning_web/channels/run_channel_test.exs
+++ b/test/lightning_web/channels/run_channel_test.exs
@@ -59,7 +59,7 @@ defmodule LightningWeb.RunChannelTest do
# A valid token, but nbf hasn't been reached yet
{:ok, bearer, _} =
- Workers.Token.generate_and_sign(
+ Workers.WorkerToken.generate_and_sign(
%{
"nbf" =>
DateTime.utc_now()
@@ -84,7 +84,9 @@ defmodule LightningWeb.RunChannelTest do
other_id = Ecto.UUID.generate()
bearer =
- Workers.generate_run_token(%{id: id}, run_timeout_ms: 1000)
+ Workers.generate_run_token(%{id: id}, %{
+ run_timeout_ms: 1000
+ })
assert {:error, %{reason: "unauthorized"}} =
socket
@@ -99,7 +101,7 @@ defmodule LightningWeb.RunChannelTest do
id = Ecto.UUID.generate()
bearer =
- Workers.generate_run_token(%{id: id}, run_timeout_ms: 1000)
+ Workers.generate_run_token(%{id: id}, %{run_timeout_ms: 1000})
assert {:error, %{reason: "not_found"}} =
socket
@@ -567,6 +569,7 @@ defmodule LightningWeb.RunChannelTest do
Lightning.Extensions.MockUsageLimiter.get_run_options(%Context{
project_id: project.id
})
+ |> Enum.into(%{})
{:ok, _, socket} =
context.socket
@@ -1379,7 +1382,7 @@ defmodule LightningWeb.RunChannelTest do
defp create_socket(context) do
{:ok, bearer, claims} =
- Workers.Token.generate_and_sign(
+ Workers.WorkerToken.generate_and_sign(
%{},
Lightning.Config.worker_token_signer()
)
@@ -1403,7 +1406,12 @@ defmodule LightningWeb.RunChannelTest do
|> subscribe_and_join(
LightningWeb.RunChannel,
"run:#{run.id}",
- %{"token" => Workers.generate_run_token(run, run_timeout_ms: 2)}
+ %{
+ "token" =>
+ Workers.generate_run_token(run, %Lightning.Runs.RunOptions{
+ run_timeout_ms: 2
+ })
+ }
)
%{socket: socket}
diff --git a/test/lightning_web/channels/worker_channel_test.exs b/test/lightning_web/channels/worker_channel_test.exs
index 3e790ad424..abebafbb53 100644
--- a/test/lightning_web/channels/worker_channel_test.exs
+++ b/test/lightning_web/channels/worker_channel_test.exs
@@ -22,7 +22,7 @@ defmodule LightningWeb.WorkerChannelTest do
describe "worker:queue channel" do
setup do
{:ok, bearer, claims} =
- Workers.Token.generate_and_sign(
+ Workers.WorkerToken.generate_and_sign(
%{},
Lightning.Config.worker_token_signer()
)
@@ -82,7 +82,9 @@ defmodule LightningWeb.WorkerChannelTest do
ref = push(socket, "claim", %{"demand" => 4})
assert_reply ref, :ok, %{runs: runs}
- assert runs |> Enum.map(& &1["id"]) == rest |> Enum.map(& &1.id)
+ assert runs |> Enum.map(& &1["id"]) |> MapSet.new() ==
+ rest |> Enum.map(& &1.id) |> MapSet.new()
+
assert length(runs) == 2, "only 2 runs should be returned"
end
end
diff --git a/test/lightning_web/channels/worker_socket_test.exs b/test/lightning_web/channels/worker_socket_test.exs
index ad42144d5a..66acd5cc7c 100644
--- a/test/lightning_web/channels/worker_socket_test.exs
+++ b/test/lightning_web/channels/worker_socket_test.exs
@@ -12,7 +12,7 @@ defmodule LightningWeb.WorkerSocketTest do
test "with a valid token" do
{:ok, bearer, _} =
- Workers.Token.generate_and_sign(
+ Workers.WorkerToken.generate_and_sign(
%{},
Lightning.Config.worker_token_signer()
)
diff --git a/test/lightning_web/collections_controller_test.exs b/test/lightning_web/collections_controller_test.exs
new file mode 100644
index 0000000000..9d9a9aad43
--- /dev/null
+++ b/test/lightning_web/collections_controller_test.exs
@@ -0,0 +1,725 @@
+defmodule LightningWeb.API.CollectionsControllerTest do
+ use LightningWeb.ConnCase, async: true
+
+ import Lightning.Factories
+
+ alias Lightning.Collections
+
+ @default_limit Application.compile_env!(
+ :lightning,
+ LightningWeb.CollectionsController
+ )[
+ :stream_limit
+ ]
+
+ setup %{conn: conn} do
+ {:ok, conn: put_req_header(conn, "accept", "application/json")}
+ end
+
+ test "without a token", %{conn: conn} do
+ conn = get(conn, ~p"/collections/foo")
+
+ assert %{"error" => "Unauthorized"} == json_response(conn, 401)
+ end
+
+ describe "authenticating with a run token" do
+ test "with a token that is invalid", %{conn: conn} do
+ workflow = insert(:simple_workflow)
+ workorder = insert(:workorder, dataclip: insert(:dataclip))
+
+ collection = insert(:collection, project: workflow.project)
+
+ run =
+ insert(:run,
+ work_order: workorder,
+ dataclip: workorder.dataclip,
+ starting_trigger: workflow.triggers |> hd()
+ )
+
+ token = Lightning.Workers.generate_run_token(run)
+
+ conn =
+ conn
+ |> assign_bearer(token)
+ |> get(~p"/collections/#{collection.name}")
+
+ assert json_response(conn, 401) == %{"error" => "Unauthorized"}
+ end
+ end
+
+ describe "authenticating with a personal access token" do
+ test "with a project they don't have access to", %{conn: conn} do
+ user = insert(:user)
+
+ project =
+ insert(:project, project_users: [])
+
+ collection = insert(:collection, project: project)
+
+ token = Lightning.Accounts.generate_api_token(user)
+
+ conn =
+ conn
+ |> assign_bearer(token)
+ |> get(~p"/collections/#{collection.name}")
+
+ assert json_response(conn, 401) == %{"error" => "Unauthorized"}
+ end
+ end
+
+ describe "GET /collections/:name/:key" do
+ test "returns the item", %{conn: conn} do
+ user = insert(:user)
+
+ project =
+ insert(:project, project_users: [%{user: user}])
+
+ collection =
+ insert(:collection,
+ project: project,
+ items: [%{key: "foo", value: "bar"}]
+ )
+
+ token = Lightning.Accounts.generate_api_token(user)
+
+ conn =
+ conn
+ |> assign_bearer(token)
+ |> get(~p"/collections/#{collection.name}/foo")
+
+ item = hd(collection.items)
+
+ assert json_response(conn, 200) == %{
+ "key" => item.key,
+ "value" => item.value,
+ "created" => DateTime.to_iso8601(item.inserted_at),
+ "updated" => DateTime.to_iso8601(item.updated_at)
+ }
+ end
+
+ test "returns 404 when the collection doesn't exist", %{conn: conn} do
+ user = insert(:user)
+
+ project =
+ insert(:project, project_users: [%{user: user}])
+
+ _another_collection = insert(:collection, project: project)
+
+ token = Lightning.Accounts.generate_api_token(user)
+
+ conn =
+ conn
+ |> assign_bearer(token)
+ |> get(~p"/collections/misspelled-collection/foo")
+
+ assert json_response(conn, 404) == %{"error" => "Not Found"}
+ end
+
+ test "returns 404 when the item doesn't exist", %{conn: conn} do
+ user = insert(:user)
+
+ project =
+ insert(:project, project_users: [%{user: user}])
+
+ collection = insert(:collection, project: project)
+
+ token = Lightning.Accounts.generate_api_token(user)
+
+ conn =
+ conn
+ |> assign_bearer(token)
+ |> get(~p"/collections/#{collection.name}/some-unexisting-key")
+
+ assert json_response(conn, 204) == nil
+ end
+ end
+
+ describe "PUT /collections/:name/:key" do
+ test "inserts an item", %{conn: conn} do
+ user = insert(:user)
+
+ project =
+ insert(:project, project_users: [%{user: user}])
+
+ collection =
+ insert(:collection,
+ project: project,
+ items: [%{key: "foo", value: "bar"}]
+ )
+
+ token = Lightning.Accounts.generate_api_token(user)
+
+ conn =
+ conn
+ |> assign_bearer(token)
+ |> put(~p"/collections/#{collection.name}/baz", value: "qux")
+
+ assert json_response(conn, 200) == %{
+ "upserted" => 1,
+ "error" => nil
+ }
+ end
+
+ test "updates an item", %{conn: conn} do
+ user = insert(:user)
+
+ project =
+ insert(:project, project_users: [%{user: user}])
+
+ collection =
+ insert(:collection,
+ project: project,
+ items: [%{key: "foo", value: "bar"}]
+ )
+
+ token = Lightning.Accounts.generate_api_token(user)
+
+ conn =
+ conn
+ |> assign_bearer(token)
+ |> put(~p"/collections/#{collection.name}/foo", %{value: "qux2"})
+
+ assert json_response(conn, 200) == %{
+ "upserted" => 1,
+ "error" => nil
+ }
+ end
+
+ test "returns 404 when the collection doesn't exist", %{conn: conn} do
+ user = insert(:user)
+
+ project =
+ insert(:project, project_users: [%{user: user}])
+
+ _another_collection = insert(:collection, project: project)
+
+ token = Lightning.Accounts.generate_api_token(user)
+
+ conn =
+ conn
+ |> assign_bearer(token)
+ |> post(~p"/collections/misspelled-collection/baz", value: "qux")
+
+ assert json_response(conn, 404) == %{"error" => "Not Found"}
+ end
+ end
+
+ describe "POST /collections/:name" do
+ test "upserted multiple items", %{conn: conn} do
+ user = insert(:user)
+
+ project =
+ insert(:project, project_users: [%{user: user}])
+
+ collection =
+ insert(:collection,
+ project: project,
+ items: Enum.map(1..3, &%{key: "foo#{&1}", value: "bar#{&1}"})
+ )
+
+ token = Lightning.Accounts.generate_api_token(user)
+
+ conn =
+ conn
+ |> assign_bearer(token)
+ |> post(~p"/collections/#{collection.name}", %{
+ items: Enum.map(1..10, &%{key: "foo#{&1}", value: "bar#{&1}"})
+ })
+
+ assert json_response(conn, 200) == %{
+ "upserted" => 10,
+ "error" => nil
+ }
+ end
+
+ test "returns 404 when the collection doesn't exist", %{conn: conn} do
+ user = insert(:user)
+
+ project =
+ insert(:project, project_users: [%{user: user}])
+
+ _another_collection = insert(:collection, project: project)
+
+ token = Lightning.Accounts.generate_api_token(user)
+
+ conn =
+ conn
+ |> assign_bearer(token)
+ |> post(~p"/collections/misspelled-collection", %{
+ items: [%{key: "baz", value: "qux"}]
+ })
+
+ assert json_response(conn, 404) == %{"error" => "Not Found"}
+ end
+ end
+
+ describe "DELETE" do
+ test "deletes an item", %{conn: conn} do
+ user = insert(:user)
+
+ project =
+ insert(:project, project_users: [%{user: user}])
+
+ collection =
+ insert(:collection,
+ project: project,
+ items: [%{key: "foo", value: "bar"}]
+ )
+
+ token = Lightning.Accounts.generate_api_token(user)
+
+ conn =
+ conn
+ |> assign_bearer(token)
+ |> delete(~p"/collections/#{collection.name}/foo")
+
+ assert json_response(conn, 200) == %{
+ "key" => "foo",
+ "deleted" => 1,
+ "error" => nil
+ }
+ end
+
+ test "deletes matching items", %{conn: conn} do
+ user = insert(:user)
+
+ project =
+ insert(:project, project_users: [%{user: user}])
+
+ collection =
+ insert(:collection,
+ project: project,
+ items: [
+ %{key: "foo:123:bar1", value: "value1"},
+ %{key: "foo:234:boo", value: "value2"},
+ %{key: "foo:345:bar2", value: "value3"}
+ ]
+ )
+
+ token = Lightning.Accounts.generate_api_token(user)
+
+ conn =
+ conn
+ |> assign_bearer(token)
+ |> delete(~p"/collections/#{collection.name}", key: "foo:*:bar*")
+
+ assert json_response(conn, 200) == %{
+ "key" => "foo:*:bar*",
+ "deleted" => 2,
+ "error" => nil
+ }
+ end
+
+ test "returns 404 when the collection doesn't exist", %{conn: conn} do
+ user = insert(:user)
+
+ project =
+ insert(:project, project_users: [%{user: user}])
+
+ _another_collection = insert(:collection, project: project)
+
+ token = Lightning.Accounts.generate_api_token(user)
+
+ conn =
+ conn
+ |> assign_bearer(token)
+ |> delete(~p"/collections/misspelled-collection/foo")
+
+ assert json_response(conn, 404) == %{"error" => "Not Found"}
+ end
+ end
+
+ describe "GET /collections/:name" do
+ test "with no results", %{conn: conn} do
+ user = insert(:user)
+
+ project =
+ insert(:project, project_users: [%{user: user}])
+
+ collection = insert(:collection, project: project)
+
+ token = Lightning.Accounts.generate_api_token(user)
+
+ conn =
+ conn
+ |> assign_bearer(token)
+ |> get(~p"/collections/#{collection.name}")
+
+ assert json_response(conn, 200) == %{"items" => [], "cursor" => nil}
+ end
+
+ test "using a key pattern", %{conn: conn} do
+ user = insert(:user)
+
+ project =
+ insert(:project, project_users: [%{user: user}])
+
+ collection = insert(:collection, project: project)
+
+ insert(:collection_item, collection: collection, key: "foo:bar:baz")
+ insert(:collection_item, collection: collection, key: "foo:bar:baz:quux")
+
+ token = Lightning.Accounts.generate_api_token(user)
+
+ conn = assign_bearer(conn, token)
+
+ assert %{state: :chunked} =
+ conn =
+ get(
+ conn,
+ ~p"/collections/#{collection.name}?#{%{key: "foo:bar:*"}}"
+ )
+
+ assert %{
+ "items" => [
+ %{"key" => "foo:bar:baz", "value" => _},
+ %{"key" => "foo:bar:baz:quux", "value" => _}
+ ],
+ "cursor" => nil
+ } = json_response(conn, 200)
+
+ assert %{state: :chunked} =
+ conn =
+ get(
+ conn,
+ ~p"/collections/#{collection.name}?#{%{key: "foo:*:baz"}}"
+ )
+
+ assert %{
+ "items" => [%{"key" => "foo:bar:baz", "value" => _}],
+ "cursor" => nil
+ } = json_response(conn, 200)
+ end
+
+ test "using a key pattern and creation filters", %{conn: conn} do
+ user = insert(:user)
+
+ project =
+ insert(:project, project_users: [%{user: user}])
+
+ collection = insert(:collection, project: project)
+
+ before_insert = DateTime.utc_now() |> DateTime.add(-1, :microsecond)
+
+ insert(:collection_item, collection: collection, key: "foo:bar:baz")
+ insert(:collection_item, collection: collection, key: "foo:moon:baz")
+ insert(:collection_item, collection: collection, key: "foo:bar:baz:out")
+
+ after_insert = DateTime.utc_now() |> DateTime.add(1, :microsecond)
+
+ token = Lightning.Accounts.generate_api_token(user)
+
+ conn = assign_bearer(conn, token)
+
+ assert %{
+ "items" => [],
+ "cursor" => nil
+ } =
+ conn
+ |> get(~p"/collections/#{collection.name}",
+ key: "foo:*:baz",
+ created_after: after_insert
+ )
+ |> json_response(200)
+
+ assert %{
+ "items" => [],
+ "cursor" => nil
+ } =
+ conn
+ |> get(~p"/collections/#{collection.name}",
+ key: "foo:*:baz",
+ created_before: before_insert
+ )
+ |> json_response(200)
+
+ assert %{
+ "items" => [
+ %{"key" => "foo:bar:baz", "value" => _},
+ %{"key" => "foo:moon:baz", "value" => _}
+ ],
+ "cursor" => nil
+ } =
+ conn
+ |> get(~p"/collections/#{collection.name}",
+ key: "foo:*:baz",
+ created_after: before_insert,
+ created_before: after_insert
+ )
+ |> json_response(200)
+ end
+
+ test "up exactly to the limit", %{conn: conn} do
+ user = insert(:user)
+ project = insert(:project, project_users: [%{user: user}])
+
+ collection =
+ insert(:collection,
+ project: project,
+ items: insert_list(3, :collection_item)
+ )
+
+ token = Lightning.Accounts.generate_api_token(user)
+
+ conn =
+ conn
+ |> assign_bearer(token)
+ |> get(~p"/collections/#{collection.name}")
+
+ assert conn.state == :chunked
+
+ expected_items =
+ collection.items
+ |> Enum.map(&Jason.encode!/1)
+ |> Enum.map(&Jason.decode!/1)
+
+ assert json_response(conn, 200) == %{
+ "items" => expected_items,
+ "cursor" => nil
+ }
+ end
+
+ test "using creation filters", %{conn: conn} do
+ user = insert(:user)
+ project = insert(:project, project_users: [%{user: user}])
+
+ before_insert = DateTime.utc_now() |> DateTime.add(-1, :microsecond)
+
+ collection =
+ insert(:collection,
+ project: project,
+ items: insert_list(3, :collection_item)
+ )
+
+ after_insert = DateTime.utc_now() |> DateTime.add(1, :microsecond)
+
+ token = Lightning.Accounts.generate_api_token(user)
+
+ conn = assign_bearer(conn, token)
+
+ assert %{
+ "items" => [],
+ "cursor" => nil
+ } =
+ conn
+ |> get(~p"/collections/#{collection.name}",
+ created_after: after_insert
+ )
+ |> json_response(200)
+
+ assert %{
+ "items" => [],
+ "cursor" => nil
+ } =
+ conn
+ |> get(~p"/collections/#{collection.name}",
+ created_before: before_insert
+ )
+ |> json_response(200)
+
+ items =
+ collection.items
+ |> Enum.map(&Jason.encode!/1)
+ |> Enum.map(&Jason.decode!/1)
+
+ assert %{
+ "items" => ^items,
+ "cursor" => nil
+ } =
+ conn
+ |> get(~p"/collections/#{collection.name}")
+ |> json_response(200)
+ end
+
+ test "returns items up to a custom limit", %{conn: conn} do
+ user = insert(:user)
+ project = insert(:project, project_users: [%{user: user}])
+ limit = 10
+
+ collection =
+ insert(:collection,
+ project: project,
+ items: insert_list(limit + 1, :collection_item)
+ )
+
+ token = Lightning.Accounts.generate_api_token(user)
+
+ conn =
+ conn
+ |> assign_bearer(token)
+ |> get(~p"/collections/#{collection.name}", limit: limit)
+
+ assert conn.state == :chunked
+
+ expected_items =
+ collection.items
+ |> Enum.take(limit)
+ |> Enum.map(&Jason.encode!/1)
+ |> Enum.map(&Jason.decode!/1)
+
+ last_item =
+ collection.items
+ |> Enum.take(limit)
+ |> List.last()
+
+ assert json_response(conn, 200) == %{
+ "items" => expected_items,
+ "cursor" =>
+ Base.encode64(DateTime.to_iso8601(last_item.inserted_at))
+ }
+ end
+ end
+
+ describe "GET /collections/:name with cursors" do
+ test "up to the limit and returning a cursor", %{conn: conn} do
+ user = insert(:user)
+ project = insert(:project, project_users: [%{user: user}])
+ collection = insert(:collection, project: project)
+
+ items =
+ insert_list(@default_limit + 1, :collection_item, collection: collection)
+
+ token = Lightning.Accounts.generate_api_token(user)
+
+ conn =
+ conn
+ |> assign_bearer(token)
+ |> get(~p"/collections/#{collection.name}")
+
+ assert conn.state == :chunked
+
+ expected_items = Enum.take(items, @default_limit)
+ last_item = List.last(expected_items)
+
+ assert %{
+ "items" => items,
+ "cursor" => cursor
+ } = json_response(conn, 200)
+
+ assert items ==
+ expected_items
+ |> Enum.map(&Jason.encode!/1)
+ |> Enum.map(&Jason.decode!/1)
+
+ assert cursor == Base.encode64(DateTime.to_iso8601(last_item.inserted_at))
+ end
+
+ test "up to the limit from a cursor returning a cursor", %{conn: conn} do
+ user = insert(:user)
+ project = insert(:project, project_users: [%{user: user}])
+ collection = insert(:collection, project: project)
+ limit = 100
+
+ all_items =
+ Enum.map(1..(2 * limit + 1), fn i ->
+ insert(:collection_item,
+ collection: collection,
+ key: "key#{i}",
+ value: "value#{i}"
+ )
+ end)
+
+ token = Lightning.Accounts.generate_api_token(user)
+
+ conn = assign_bearer(conn, token)
+
+ assert %{state: :chunked} =
+ conn =
+ get(conn, ~p"/collections/#{collection.name}", limit: limit)
+
+ expected_items =
+ all_items
+ |> Enum.take(limit)
+ |> Enum.map(&Jason.encode!/1)
+ |> Enum.map(&Jason.decode!/1)
+
+ assert %{
+ "items" => ^expected_items,
+ "cursor" => cursor
+ } = json_response(conn, 200)
+
+ assert %{state: :chunked} =
+ conn =
+ get(conn, ~p"/collections/#{collection.name}",
+ cursor: cursor,
+ limit: limit
+ )
+
+ expected_items =
+ all_items
+ |> Enum.drop(limit)
+ |> Enum.take(limit)
+ |> Enum.map(&Jason.encode!/1)
+ |> Enum.map(&Jason.decode!/1)
+
+ assert %{
+ "items" => ^expected_items,
+ "cursor" => cursor
+ } = json_response(conn, 200)
+
+ %{inserted_at: last_inserted_at} =
+ Repo.get_by(Collections.Item,
+ collection_id: collection.id,
+ key: List.last(expected_items)["key"]
+ )
+
+ assert {:ok, ^last_inserted_at, 0} =
+ cursor |> Base.decode64!() |> DateTime.from_iso8601()
+ end
+
+ test "up exactly to the limit from a cursor", %{conn: conn} do
+ user = insert(:user)
+ project = insert(:project, project_users: [%{user: user}])
+ collection = insert(:collection, project: project)
+ limit = 100
+
+ all_items =
+ Enum.map(1..(2 * limit), fn i ->
+ insert(:collection_item,
+ collection: collection,
+ key: "key#{i}",
+ value: "value#{i}"
+ )
+ end)
+
+ token = Lightning.Accounts.generate_api_token(user)
+
+ conn = assign_bearer(conn, token)
+
+ assert %{state: :chunked} =
+ conn =
+ get(conn, ~p"/collections/#{collection.name}", limit: limit)
+
+ expected_items =
+ all_items
+ |> Enum.take(limit)
+ |> Enum.map(&Jason.encode!/1)
+ |> Enum.map(&Jason.decode!/1)
+
+ assert %{
+ "items" => response_items,
+ "cursor" => cursor
+ } = json_response(conn, 200)
+
+ assert response_items == expected_items
+
+ assert %{state: :chunked} =
+ conn =
+ get(conn, ~p"/collections/#{collection.name}",
+ cursor: cursor,
+ limit: limit
+ )
+
+ expected_items =
+ all_items
+ |> Enum.drop(limit)
+ |> Enum.map(&Jason.encode!/1)
+ |> Enum.map(&Jason.decode!/1)
+
+ assert %{
+ "items" => ^expected_items,
+ "cursor" => nil
+ } = json_response(conn, 200)
+ end
+ end
+end
diff --git a/test/lightning_web/live/collection_live_test.exs b/test/lightning_web/live/collection_live_test.exs
new file mode 100644
index 0000000000..a745e08a43
--- /dev/null
+++ b/test/lightning_web/live/collection_live_test.exs
@@ -0,0 +1,195 @@
+defmodule LightningWeb.CollectionLiveTest do
+ use LightningWeb.ConnCase, async: true
+
+ import Phoenix.LiveViewTest
+ import Lightning.Factories
+
+ describe "Index as a regular user" do
+ setup :register_and_log_in_user
+
+ test "Regular user cannot access the collections page", %{conn: conn} do
+ {:ok, _view, html} =
+ live(conn, ~p"/settings/collections")
+ |> follow_redirect(conn, ~p"/projects")
+
+ assert html =~ "No Access"
+ end
+ end
+
+ describe "Index as a superuser" do
+ setup :register_and_log_in_superuser
+
+ test "Superuser can access the collections page", %{conn: conn} do
+ {:ok, _view, html} = live(conn, ~p"/settings/collections")
+
+ assert html =~ "Collections"
+ assert html =~ "No collection found. Create a new one."
+ end
+
+ test "Collections are listed for superuser", %{conn: conn} do
+ collection_1 =
+ insert(:collection,
+ name: "Collection A",
+ project: build(:project, name: "Project A")
+ )
+
+ collection_2 =
+ insert(:collection,
+ name: "Collection B",
+ project: build(:project, name: "Project B")
+ )
+
+ {:ok, view, _html} = live(conn, ~p"/settings/collections")
+
+ assert has_element?(view, "tr#collections-table-row-#{collection_1.id}")
+ assert has_element?(view, "tr#collections-table-row-#{collection_2.id}")
+ end
+
+ test "Collections can be sorted by name for superuser", %{conn: conn} do
+ insert(:collection, name: "B Collection")
+ insert(:collection, name: "A Collection")
+
+ {:ok, view, _html} = live(conn, ~p"/settings/collections")
+
+ sorted_names = get_sorted_collection_names(view)
+ assert sorted_names == ["A Collection", "B Collection"]
+
+ view |> element("span[phx-click='sort']") |> render_click()
+ sorted_names = get_sorted_collection_names(view)
+ assert sorted_names == ["B Collection", "A Collection"]
+ end
+
+ test "Superuser can delete a collection", %{conn: conn} do
+ collection = insert(:collection, name: "Delete Me")
+
+ {:ok, view, _html} = live(conn, ~p"/settings/collections")
+
+ assert has_element?(view, "tr#collections-table-row-#{collection.id}")
+
+ {:ok, view, html} =
+ view
+ |> element("#delete-collection-#{collection.id}-modal_confirm_button")
+ |> render_click()
+ |> follow_redirect(conn, ~p"/settings/collections")
+
+ assert html =~ "Collection deleted successfully"
+
+ refute has_element?(view, "tr#collections-table-row-#{collection.id}")
+ end
+
+ test "Superuser can create a collection via the modal", %{
+ conn: conn,
+ user: user
+ } do
+ project = insert(:project, project_users: [%{user: user}])
+ {:ok, view, _html} = live(conn, ~p"/settings/collections")
+
+ assert has_element?(view, "#collection-form-new")
+
+ view
+ |> form("#collection-form-new", collection: %{raw_name: "New Collection"})
+ |> render_change()
+
+ assert has_element?(view, "input[type='text'][value='new-collection']")
+
+ {:ok, _view, html} =
+ view
+ |> form("#collection-form-new",
+ collection: %{raw_name: "New Collection", project_id: project.id}
+ )
+ |> render_submit()
+ |> follow_redirect(conn, ~p"/settings/collections")
+
+ assert html =~ "Collection created successfully"
+ assert html =~ "new-collection"
+ assert html =~ project.name
+ end
+
+ test "Canceling collection creation modal closes the modal", %{conn: conn} do
+ {:ok, view, _html} = live(conn, ~p"/settings/collections")
+
+ assert has_element?(view, "#collection-form-new")
+
+ view
+ |> form("#collection-form-new", collection: %{raw_name: "New Collection"})
+ |> render_change()
+
+ {:ok, _view, html} =
+ view
+ |> element("#cancel-collection-creation-new")
+ |> render_click()
+ |> follow_redirect(conn, ~p"/settings/collections")
+
+ refute html =~ "new-collection"
+ end
+
+ test "Superuser can update a collection via the modal", %{
+ conn: conn,
+ user: user
+ } do
+ project = insert(:project, project_users: [%{user: user}])
+ collection = insert(:collection, name: "Old Collection", project: project)
+
+ {:ok, view, _html} = live(conn, ~p"/settings/collections")
+
+ assert has_element?(view, "#collection-form-#{collection.id}")
+
+ view
+ |> form("#collection-form-#{collection.id}",
+ collection: %{raw_name: "Updated Collection"}
+ )
+ |> render_change()
+
+ assert has_element?(view, "input[type='text'][value='updated-collection']")
+
+ {:ok, _view, html} =
+ view
+ |> form("#collection-form-#{collection.id}",
+ collection: %{raw_name: "Updated Collection", project_id: project.id}
+ )
+ |> render_submit()
+ |> follow_redirect(conn, ~p"/settings/collections")
+
+ assert html =~ "Collection updated successfully"
+ assert html =~ "updated-collection"
+ assert html =~ project.name
+ end
+
+ test "Creating a collection with a name that already exists fails", %{
+ conn: conn,
+ user: user
+ } do
+ project = insert(:project, project_users: [%{user: user}])
+
+ collection = insert(:collection, name: "duplicate-name", project: project)
+
+ {:ok, view, _html} = live(conn, ~p"/settings/collections")
+
+ assert has_element?(view, "#collection-form-new")
+
+ view
+ |> form("#collection-form-new",
+ collection: %{raw_name: collection.name, project_id: project.id}
+ )
+ |> render_change()
+
+ view
+ |> form("#collection-form-new")
+ |> render_submit() =~ "A collection with this name already exists"
+ end
+ end
+
+ defp get_sorted_collection_names(view) do
+ html = render(view)
+
+ html
+ |> Floki.parse_document!()
+ |> Floki.find("#collections-table tr")
+ |> Enum.map(fn tr ->
+ Floki.find(tr, "td:nth-child(1)")
+ |> Floki.text()
+ |> String.trim()
+ end)
+ |> Enum.filter(fn name -> String.length(name) > 0 end)
+ end
+end
diff --git a/test/lightning_web/live/tokens_live_test.exs b/test/lightning_web/live/tokens_live_test.exs
index c11fad0011..516bb3302c 100644
--- a/test/lightning_web/live/tokens_live_test.exs
+++ b/test/lightning_web/live/tokens_live_test.exs
@@ -49,7 +49,7 @@ defmodule LightningWeb.TokensLiveTest do
|> Floki.parse_fragment!()
|> Floki.attribute("value")
|> Floki.text()
- |> String.length() == 275
+ |> String.length() == 539
end
test "See a list of tokens", %{conn: conn} do
diff --git a/test/support/conn_case.ex b/test/support/conn_case.ex
index 945ea06f2d..3306b2867f 100644
--- a/test/support/conn_case.ex
+++ b/test/support/conn_case.ex
@@ -27,6 +27,7 @@ defmodule LightningWeb.ConnCase do
import Plug.Conn
import Phoenix.ConnTest
import LightningWeb.ConnCase
+ import LightningWeb.ConnHelpers
alias LightningWeb.Router.Helpers, as: Routes
alias Lightning.Repo
diff --git a/test/support/conn_helpers.ex b/test/support/conn_helpers.ex
new file mode 100644
index 0000000000..e4cb38e605
--- /dev/null
+++ b/test/support/conn_helpers.ex
@@ -0,0 +1,5 @@
+defmodule LightningWeb.ConnHelpers do
+ def assign_bearer(conn, token) do
+ conn |> Plug.Conn.put_req_header("authorization", "Bearer #{token}")
+ end
+end
diff --git a/test/support/factories.ex b/test/support/factories.ex
index 2fa524a33f..a43ba3226e 100644
--- a/test/support/factories.ex
+++ b/test/support/factories.ex
@@ -318,6 +318,26 @@ defmodule Lightning.Factories do
}
end
+ def collection_factory do
+ %Lightning.Collections.Collection{
+ project: build(:project),
+ name: sequence(:name, &"collection-#{&1}")
+ }
+ end
+
+ def collection_item_factory do
+ %Lightning.Collections.Item{
+ collection: build(:collection),
+ key: sequence(:key, &"key-#{&1}"),
+ value: sequence(:value, &"value-#{&1}"),
+ inserted_at:
+ sequence(
+ :inserted_at,
+ &DateTime.add(DateTime.utc_now(), &1, :microsecond)
+ )
+ }
+ end
+
# ----------------------------------------------------------------------------
# Helpers
# ----------------------------------------------------------------------------
@@ -345,7 +365,7 @@ defmodule Lightning.Factories do
sequence(:timestamp, fn i ->
DateTime.utc_now()
|> DateTime.add(ago, scale)
- |> DateTime.add(i * gap, :second)
+ |> DateTime.add(i * gap, scale)
end)
end