diff --git a/.github/workflows/deploy-website.yml b/.github/workflows/deploy-website.yml
index 8fa6cab..19b6fee 100644
--- a/.github/workflows/deploy-website.yml
+++ b/.github/workflows/deploy-website.yml
@@ -16,6 +16,29 @@ jobs:
steps:
- uses: actions/checkout@v3
+ - name: Setup .NET
+ uses: actions/setup-dotnet@v3
+ with:
+ dotnet-version: '8.0.x'
+
+ - run: dotnet tool install --global gsferreira.XmlDocMarkdown.Docusaurus --version 0.0.1-beta2 # using this version while the Pull Request isn't accepted here: https://github.com/ejball/XmlDocMarkdown/pull/126
+ shell: bash
+
+ - name: .NET Publish
+ run: dotnet publish LoadShedding.sln -c Release -o ./drop
+
+ - run: dotnet tool list --global
+ shell: bash
+
+ - run: xmldocmd-docusaurus ./drop/Farfetch.LoadShedding.dll website/docs/reference/Farfetch.LoadShedding --type-folders
+ shell: bash
+
+ - run: xmldocmd-docusaurus ./drop/Farfetch.LoadShedding.AspNetCore.dll website/docs/reference/Farfetch.LoadShedding.AspNetCore --type-folders
+ shell: bash
+
+ - run: xmldocmd-docusaurus ./drop/Farfetch.LoadShedding.Prometheus.dll website/docs/reference/Farfetch.LoadShedding.Prometheus --type-folders
+ shell: bash
+
- uses: actions/setup-node@v3
with:
node-version: 18
@@ -34,4 +57,4 @@ jobs:
uses: peaceiris/actions-gh-pages@v3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
- publish_dir: ./website/build
\ No newline at end of file
+ publish_dir: ./website/build
diff --git a/.github/workflows/test-deploy-website.yml b/.github/workflows/test-deploy-website.yml
index e4375d2..bb5cc2d 100644
--- a/.github/workflows/test-deploy-website.yml
+++ b/.github/workflows/test-deploy-website.yml
@@ -10,6 +10,29 @@ jobs:
steps:
- uses: actions/checkout@v3
+ - name: Setup .NET
+ uses: actions/setup-dotnet@v3
+ with:
+ dotnet-version: '8.0.x'
+
+ - run: dotnet tool install --global gsferreira.XmlDocMarkdown.Docusaurus --version 0.0.1-beta2 # using this version while the Pull Request isn't accepted here: https://github.com/ejball/XmlDocMarkdown/pull/126
+ shell: bash
+
+ - name: .NET Publish
+ run: dotnet publish LoadShedding.sln -c Release -o ./drop
+
+ - run: dotnet tool list --global
+ shell: bash
+
+ - run: xmldocmd-docusaurus ./drop/Farfetch.LoadShedding.dll website/docs/reference/Farfetch.LoadShedding --type-folders
+ shell: bash
+
+ - run: xmldocmd-docusaurus ./drop/Farfetch.LoadShedding.AspNetCore.dll website/docs/reference/Farfetch.LoadShedding.AspNetCore --type-folders
+ shell: bash
+
+ - run: xmldocmd-docusaurus ./drop/Farfetch.LoadShedding.Prometheus.dll website/docs/reference/Farfetch.LoadShedding.Prometheus --type-folders
+ shell: bash
+
- uses: actions/setup-node@v3
with:
node-version: 18
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 26e871b..098a516 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -33,6 +33,14 @@ Here are a few things you can do that will increase the likelihood of your pull
You can find a Makefile with steps for running using docker on the repository root.
+## Running git worflows locally
+
+You can build and test workflows locally with [act](https://github.com/nektos/act):
+- Follow the [instructions](https://github.com/nektos/act?tab=readme-ov-file#installation) to install act
+- To check all available actions: `act -l`
+- To run the default (push) event: `act`
+- To run a specific job: `act -j build-test`
+
## Resources
- [How to Contribute to Open Source](https://opensource.guide/how-to-contribute/)
@@ -43,4 +51,4 @@ You can find a Makefile with steps for running using docker on the repository ro
By sending us your contributions, you are agreeing that your contribution is made subject to the terms of our [Contributor Ownership Statement](https://github.com/Farfetch/.github/blob/master/COS.md)
-[code-of-conduct]: CODE_OF_CONDUCT.md
\ No newline at end of file
+[code-of-conduct]: CODE_OF_CONDUCT.md
diff --git a/README.md b/README.md
index e285715..2a90001 100644
--- a/README.md
+++ b/README.md
@@ -1,125 +1,34 @@
-# LoadShedding
+# LoadShedding · [![GitHub license](https://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/farfetch/loadshedding/blob/main/LICENSE) [![nuget version](https://img.shields.io/nuget/v/loadshedding.svg?style=flat)](https://www.nuget.org/packages/LoadShedding/) ![Build Master](https://github.com/Farfetch/loadshedding/workflows/Build/badge.svg?branch=master)
-This library provides a set of capabilities to enable the service to deal with requests overload and resource limits to avoid outages and to ensure the best usage of the service capacity.
+## Introduction
-## Documentation
-
-
-Getting Started
-
-- [Installation](docs/getting_started/installation.md)
-- Register Limits
- - [Concurrency Adaptative Limiter](docs/getting_started/register_concurrency_adaptative_limiter.md)
-- [Samples](samples)
-
-
-
-## Samples
-
-For sample projects showcasing the application types, dependencies and features please check the [samples](samples/) folder.
-
-## Contributing
-
-Read our [contributing guidelines](CONTRIBUTING.md) to learn about our development process, how to propose bugfixes and improvements, and how to build and test your changes.
-
-### Requirements
-
-- Makefile
-- .NET 6
-- Docker
-- Docker Compose
-
-#### Makefile - Simplify Tests and Build commands
-
-**Important: You must have the Make app installed and configured in your local environment.**
-
-With the makefile present in the Load-Shedding, the teams or engineers who want to contribute to the development of the Load-Shedding library, have a simplified way to run the tests and build the project, the commands are shown below and will wrap and use the same commands that the ones used in the pipeline.
-
-Note: You should be in the root folder of the repository locally, to allow you run the commands.
+⚡️ LoadShedding was designed to effectively manage concurrency in the .NET ecosystem.
-### Building
+🏗 Built upon the foundational principles and concepts pioneered by [Netflix](https://netflixtechblog.medium.com/performance-under-load-3e6fa9a60581).
-To build the solution file, you can simply use this command.
+🎁 Offers out-of-the-box Adaptative Concurrency Limiter implementation.
-```bash
-make build
-```
+🎁 Offers additional features like metrics, request prioritization, and configuration flexibility.
-#### Testing (Unit)
+## Installation
-To run all the tests, you can simply use this command.
+[Read the docs](https://farfetch.github.io/loadshedding/docs/getting-started/installation) for any further information.
-```bash
-make unit-tests
-```
-
-The command will build the solution and then run all the tests present and marked as Unit tests.
-
-#### Testing (Integration)
-
-To run all the integration tests, you can simply use this command.
-
-```bash
-make integration-tests
-```
-
-The command will build the solution and then run all the integration tests.
-
-### Benchmark
-
-Below, it is possible to see a benchmark analysis of the concurrency control mechanism, for this test multiple scenarios were created:
-
-- **Limiter_Default:** Directly tests the AdaptiveConcurrencyLimiter with default priority;
-- **Limiter_RandomPriority:** Directly tests the AdaptiveConcurrencyLimiter with random priorities;
-- **LimiterMiddleware_Default:** Tests the ConcurrencyLimiterMiddleware that uses the AdaptiveConcurrencyLimiter with Http Requests context with default priority.
-- **LimiterMiddleware_RandomPriority:** Tests the ConcurrencyLimiterMiddleware that uses the AdaptiveConcurrencyLimiter with Http Requests context with random priorities.
-
-- **TaskQueueWith1000Items_EnqueueFixedPriority:** Tests the TaskQueue.Enqueue pre-loaded with 1000 items and a default priority;
-- **TaskQueueEmpty_EnqueueRandomPriority:** Tests the TaskQueue.Enqueue with no elements;
-- **TaskQueueWith1000Items_EnqueueRandomPriority:** Tests the TaskQueue.Enqueue pre-loaded with 1000 items and random priorities;
-- **TaskQueueWith1000Items_Dequeue:** Tests the TaskQueue.Dequeue pre-loaded with 1000 items;
-- **TaskQueue_EnqueueNewItem_LimitReached:** Tests the TaskQueue.Enqueue pre-loaded with 1000 items and the queue limit reached;
-
-#### Limiter
-
-``` ini
-BenchmarkDotNet=v0.13.4, OS=Windows 10 (10.0.19044.2604/21H2/November2021Update)
-Intel Core i7-10610U CPU 1.80GHz, 1 CPU, 8 logical and 4 physical cores
-.NET SDK=7.0.103
- [Host] : .NET 6.0.14 (6.0.1423.7309), X64 RyuJIT AVX2
- Job-VLMTWN : .NET 6.0.14 (6.0.1423.7309), X64 RyuJIT AVX2
-
-IterationCount=10
-```
+## Documentation
-| Method | Mean | Error | StdDev | Min | Max | Rank | Completed Work Items | Lock Contentions | Gen0 | Allocated |
-|--------------------------------- |---------:|---------:|---------:|---------:|---------:|-----:|---------------------:|-----------------:|-------:|----------:|
-| Limiter_Default | 354.4 ns | 6.22 ns | 3.25 ns | 349.2 ns | 358.4 ns | 1 | 0.0000 | - | 0.1450 | 608 B |
-| Limiter_RandomPriority | 366.3 ns | 4.82 ns | 2.52 ns | 363.6 ns | 369.6 ns | 2 | 0.0000 | - | 0.1450 | 608 B |
-| LimiterMiddleware_Default | 436.6 ns | 28.55 ns | 16.99 ns | 416.7 ns | 471.4 ns | 3 | 0.0000 | - | 0.1855 | 776 B |
-| LimiterMiddleware_RandomPriority | 468.7 ns | 6.55 ns | 3.90 ns | 463.7 ns | 475.3 ns | 4 | 0.0000 | - | 0.2027 | 848 B |
+Learn more about using LoadShedding on the [site](https://farfetch.github.io/loadshedding/docs)!
-##### TaskQueue
+## Contributing
-``` ini
-BenchmarkDotNet=v0.13.4, OS=Windows 10 (10.0.19044.2604/21H2/November2021Update)
-Intel Core i7-10610U CPU 1.80GHz, 1 CPU, 8 logical and 4 physical cores
-.NET SDK=7.0.103
- [Host] : .NET 6.0.14 (6.0.1423.7309), X64 RyuJIT AVX2
- Job-THBOTE : .NET 6.0.14 (6.0.1423.7309), X64 RyuJIT AVX2
+Read our [contributing guidelines](CONTRIBUTING.md) to learn about our development process, how to propose bugfixes and improvements, and how to build and test your changes.
-InvocationCount=1 IterationCount=10 UnrollFactor=1
+## Get in touch
-```
+You can find us at:
-| Method | Mean | Error | StdDev | Min | Max | Rank | Completed Work Items | Lock Contentions | Allocated |
-|--------------------------------------------- |----------:|----------:|----------:|---------:|----------:|-----:|---------------------:|-----------------:|----------:|
-| TaskQueueWith1000Items_EnqueueFixedPriority | 11.311 μs | 3.8278 μs | 2.2779 μs | 6.800 μs | 14.800 μs | 4 | - | - | 896 B |
-| TaskQueueEmpty_EnqueueRandomPriority | 6.700 μs | 3.8089 μs | 2.2666 μs | 4.800 μs | 11.800 μs | 2 | - | - | 896 B |
-| TaskQueueWith1000Items_EnqueueRandomPriority | 3.650 μs | 0.3540 μs | 0.1852 μs | 3.400 μs | 4.000 μs | 1 | - | - | 896 B |
-| TaskQueueWith1000Items_Dequeue | 5.500 μs | 1.0912 μs | 0.5707 μs | 4.600 μs | 6.400 μs | 2 | - | - | 704 B |
-| TaskQueue_EnqueueNewItem_LimitReached | 8.111 μs | 2.2848 μs | 1.3596 μs | 7.000 μs | 10.500 μs | 3 | - | - | 1144 B |
+- [GitHub Issues](https://github.com/Farfetch/loadshedding/issues)
+- [GitHub Discussions](https://github.com/Farfetch/loadshedding/discussions)
-#### Conclusion
+## License
-In all the scenarios the time added to the execution pipeline is very small and the impact caused by the limiter and task queue can be ignored.
+LoadShedding is a free and open source project, released under the permissible [MIT license](LICENSE).
diff --git a/docs/getting_started/installation.md b/docs/getting_started/installation.md
deleted file mode 100644
index c60a9c7..0000000
--- a/docs/getting_started/installation.md
+++ /dev/null
@@ -1,25 +0,0 @@
-# LoadShedding Installation
-
-To start using the LoadShedding library, just install the following package to the Startup Project:
-
-```bash
-dotnet add package Farfetch.LoadShedding.AspNetCore
-```
-
-## How to Use
-
-Add the LoadShedding services by calling the `AddLoadShedding` extension.
-
-```csharp
-services.AddLoadShedding();
-```
-
-Use the `UseLoadShedding` extension method by extending the `IApplicationBuilder` interface.
-
-```csharp
-app.UseLoadShedding();
-```
-
-------
-
-Go back to [Documentation Index](/README.md#documentation)
diff --git a/docs/getting_started/quickstart.md b/docs/getting_started/quickstart.md
deleted file mode 100644
index afaf3c7..0000000
--- a/docs/getting_started/quickstart.md
+++ /dev/null
@@ -1,105 +0,0 @@
-# Quickstart: Create your first application with LoadShedding
-
-In this guide, you will use C# and the .NET CLI to create a WebApi that will have LoadShedding with the default configurations.
-
-By the end of the guide, you will know how to use LoadShedding on your application.
-
-## Prerequisites
-
-- One of the following .NET versions:
- - .NET Core 2.0 or above.
-
-## Overview
-
-You will create a WebApi using LoadShedding.
-
-## Steps
-### 1. Create a folder for your applications
-
-Create a new folder with the name _LoadSheddingQuickstart_.
-
-### 2. Create WebApi Project
-
-Run the following command to create a WebApi Project named _LoadSheddingQuickstart_.
-
-```bash
-dotnet new webapi -controllers -n LoadSheddingQuickstart
-```
-
-### 3. Install LoadShedding package
-
-Inside the _LoadSheddingQuickstart_ project directory, run the following command to install the required package
-
-```bash
-dotnet add package Farfetch.LoadShedding.AspNetCore
-```
-
-### 4 Add Metrics
-
-Inside the _LoadSheddingQuickstart_ project directory, run the following command to install Prometheus
-
-```bash
-dotnet add package prometheus-net.AspNetCore
-dotnet add package Farfetch.LoadShedding.Prometheus
-```
-
-:::info
-This step is optional. With this you will be able to confirm that LoadShedding is configured on the [Run!](#7-run) step.
-:::
-
-### 5. Add LoadShedding on the WebApi
-
-Add the LoadShedding services by calling the AddLoadShedding extension:
-
-```csharp
-services.AddLoadShedding();
-```
-
-Optionally you can configure `SubscribeEvents()` and you will be able to confirm that LoadShedding is configured on the [Run!](#7-run) step:
-
-```csharp
-services.AddLoadShedding((provider, options) =>
-{
- options.SubscribeEvents(events =>
- {
- events.ItemEnqueued.Subscribe(args => Console.WriteLine($"QueueLimit: {args.QueueLimit}, QueueCount: {args.QueueCount}"));
- events.ItemDequeued.Subscribe(args => Console.WriteLine($"QueueLimit: {args.QueueLimit}, QueueCount: {args.QueueCount}"));
- events.ItemProcessing.Subscribe(args => Console.WriteLine($"ConcurrencyLimit: {args.ConcurrencyLimit}, ConcurrencyItems: {args.ConcurrencyCount}"));
- events.ItemProcessed.Subscribe(args => Console.WriteLine($"ConcurrencyLimit: {args.ConcurrencyLimit}, ConcurrencyItems: {args.ConcurrencyCount}"));
- events.Rejected.Subscribe(args => Console.Error.WriteLine($"Item rejected with Priority: {args.Priority}"));
- });
-});
-```
-
-:::note
-For more information about de default configurations and possible customizations see [here.](../getting_started/register_concurrency_adaptative_limiter.md#options-configuration)
-:::
-
-### 6. Use LoadShedding on the WebApi
-
-Use the `UseLoadShedding` extension method by extending the `IApplicationBuilder` interface:
-
-```csharp
-app.UseLoadShedding();
-```
-
-### 7. Run!
-
-From the `LoadSheddingQuickstart` directory run the project:
-
-```bash
-dotnet run --project LoadSheddingQuickstart/LoadSheddingQuickstart.csproj
-```
-
-And you should be able to call the available endpoint with Swagger Web Interface: [https://localhost:7231/swagger/index.html]()
-
-Now, you can confirm that LoadShedding is correctly configured on your WebApi by calling the following endpoint: [localhost:7231/metrics]().
-
-You should see the LoadShedding metrics referenced [here](../getting_started/register_concurrency_adaptative_limiter.md#reference-documentation).
-
-Additionally, you can also confirm if LoadShedding is correctly configured by checking the following console output:
-
-```
-ConcurrencyLimit: 5, ConcurrencyItems: 1
-ConcurrencyLimit: 5, ConcurrencyItems: 0
-```
diff --git a/docs/getting_started/register_concurrency_adaptative_limiter.md b/docs/getting_started/register_concurrency_adaptative_limiter.md
deleted file mode 100644
index da43156..0000000
--- a/docs/getting_started/register_concurrency_adaptative_limiter.md
+++ /dev/null
@@ -1,246 +0,0 @@
-# Register Concurrency Adaptative Limiter
-
-The concurrency adaptative limiter provides a capacity to auto-adjust the accepted traffic based on the runtime performance, ensuring that latencies remain low.
-
-![Concurrency Adaptative Limiter](../resources/concurrency_limiter_graph.png)
-
-As can be seen in the previous image (adapted from [Performance Under Load Article](https://netflixtechblog.medium.com/performance-under-load-3e6fa9a60581)):
-
-* the requests are processed since the number of capacity + the number of queue slots is not reached.
-* as soon as the maximum concurrency limit is reached (possible to configure), the requests will enter a queue.
-* the requests that are waiting in the queue, will be released by a FIFO (First In, First Out) methodology.
-* as soon as the maximum queue size is reached, the system will automatically reject the following requests, returning a 503 - Service Unavailable error.
-* the latency will be kept low independent of the number of requests.
-* the capacity/concurrency limit will be automatically calculated through some algorithms taking into account the service performance degradation.
-
-## How to use it
-
-### Base Configuration
-
-Install all the needed services by calling `IServiceCollection.AddLoadShedding`.
-
-```csharp
-app.AddLoadShedding();
-```
-
-Extend the `IApplicationBuilder` using the `UseLoadShedding` extension method.
-
-```csharp
-app.UseLoadShedding();
-```
-
-### Options Configuration
-
-It is possible to have access to additional configurations when registering the services.
-
-```csharp
-services.AddLoadShedding((provider, options) =>
-{
- options.AdaptativeLimiter.ConcurrencyOptions.MinQueueSize = 10;
- options.AdaptativeLimiter.UseHeaderPriorityResolver();
- options.SubscribeEvents(events =>
- {
- events.ItemEnqueued.Subscribe(args => Console.WriteLine($"QueueLimit: {args.QueueLimit}, QueueCount: {args.QueueCount}"));
- events.ItemDequeued.Subscribe(args => Console.WriteLine($"QueueLimit: {args.QueueLimit}, QueueCount: {args.QueueCount}"));
- events.ItemProcessing.Subscribe(args => Console.WriteLine($"ConcurrencyLimit: {args.ConcurrencyLimit}, ConcurrencyItems: {args.ConcurrencyCount}"));
- events.ItemProcessed.Subscribe(args => Console.WriteLine($"ConcurrencyLimit: {args.ConcurrencyLimit}, ConcurrencyItems: {args.ConcurrencyCount}"));
- events.Rejected.Subscribe(args => Console.Error.WriteLine($"Item rejected with Priority: {args.Priority}"));
- });
-});
-```
-
-By default, the following `ConcurrencyOptions` values will be used:
-
-| Option | Description | Default Value |
-| ------ | -------- | -------- |
-| MinConcurrencyLimit | The minimum number of concurrent requests allowed | 5 |
-| InitialConcurrencyLimit | The starting number of concurrent requests allowed. This may be adjusted up or down based on the performance of the system | 5 |
-| MaxConcurrencyLimit | The maximum number of concurrent requests allowed | 500 |
-| Tolerance | The level of flexibility in adjusting the concurrency limit. It indicates how much change in the minimum latency is acceptable before lowering the concurrency limit threshold. A high tolerance means the system can adjust the concurrency limit more freely, while a low tolerance means the limit will be maintained more strictly. For example, a value of 2.0 means a 2x increase in latency is acceptable | 1.5 |
-| MinQueueSize | The minimum number of requests that must be waiting in the queue before new requests can be processed | 20 |
-| InitialQueueSize | The starting number of requests in the queue | 20 |
-| QueueTimeoutInMs | The queue waiting timeout, when the timeout is reached the task will be canceled and will throw an OperationCanceledException. | Infinite |
-
-**Note:** These default values were defined based on:
-
-* investigation of the [Netflix Concurrency Limit](https://github.com/Netflix/concurrency-limits) tool.
-* having a huge margin of tolerance: accepting 500 requests simultaneously (and 50 more going to the queue - initially).
-
-On the other hand, if needed, these settings can be completely overridden by using the `ConcurrencyOptions` property:
-
-```csharp
-
-services.AddLoadShedding((provider, options) =>
-{
- options.AdaptativeLimiter.ConcurrencyOptions.MinConcurrencyLimit = 5;
- options.AdaptativeLimiter.ConcurrencyOptions.InitialConcurrencyLimit = 5;
- options.AdaptativeLimiter.ConcurrencyOptions.InitialQueueSize = 50;
- options.AdaptativeLimiter.ConcurrencyOptions.Tolerance = 2;
- options.AdaptativeLimiter.ConcurrencyOptions.QueueTimeoutInMs = 60000;
-});
-```
-
-When defining the options values, the following criteria need to be accomplished:
-
-* MinConcurrencyLimit, InitialConcurrencyLimit, MaxConcurrencyLimit, MinQueueSize, and MinQueueSize >= 1
-* Tolerance > 1
-* MaxConcurrencyLimit > MinConcurrencyLimit
-* InitialConcurrencyLimit >= MinConcurrencyLimit && MaxConcurrencyLimit >= InitialConcurrencyLimit
-* InitialQueueSize >= MinQueueSize
-
-### Events Listener Configuration
-
-It is possible to monitor the service performance by subscribing internal events:
-
-* QueueLimitChanged: invoked whenever the queue limit is changed.
-* QueueItemsCountChanged: invoked whenever an item is enqueued or dequeued.
-* ConcurrencyLimitChanged: invoked whenever the concurrency limit is changed.
-* ConcurrentItemsCountChanged: invoked whenever an item is being processed or it is finished.
-* ItemEnqueued: invoked whenever a task is enqueued.
-* ItemDequeued: invoked whenever a task is dequeued.
-* Rejected: invoked whenever there are rejected requests - queue limit is reached.
-
-```csharp
-services.AddLoadShedding((provider, options) =>
-{
- options.SubscribeEvents(events =>
- {
- events.ItemEnqueued.Subscribe(args => Console.WriteLine($"QueueLimit: {args.QueueLimit}, QueueCount: {args.QueueCount}"));
- events.ItemDequeued.Subscribe(args => Console.WriteLine($"QueueLimit: {args.QueueLimit}, QueueCount: {args.QueueCount}"));
- events.ItemProcessing.Subscribe(args => Console.WriteLine($"ConcurrencyLimit: {args.ConcurrencyLimit}, ConcurrencyItems: {args.ConcurrencyCount}"));
- events.ItemProcessed.Subscribe(args => Console.WriteLine($"ConcurrencyLimit: {args.ConcurrencyLimit}, ConcurrencyItems: {args.ConcurrencyCount}"));
- events.Rejected.Subscribe(args => Console.Error.WriteLine($"Item rejected with Priority: {args.Priority}"));
- });
-});
-```
-
-### Custom Queue Size Calculator Configuration
-
-Calculating the queue size has the main goal to find the maximum value of requests allowed to be in the queue.
-
-The default queue size calculator is based on the square root of the concurrency limit value.
-
-Optionally, the strategy can be overridden by:
-
-#### 1 - Implement the IQueueSizeCalculator interface
-
-```csharp
- public class CustomQueueSizeCalculator : IQueueSizeCalculator
- {
- public int CalculateQueueSize(IConcurrencyContext context)
- {
- // Implement the Calculate Queue Size logic here
-
- return default;
- }
- }
-```
-
-#### 2 - Use a custom QueueSizeCalculator
-
-```csharp
-services.AddLoadShedding((provider, options) =>
-{
- options.AdaptativeLimiter.QueueSizeCalculator = new CustomQueueSizeCalculator();
-});
-```
-
-### Request Prioritization Configuration
-
-It is possible to configure the settings to establish priority resolvers for requests.
-
-At present, only one strategy is supported, which means that solely the most recently configured strategy will be implemented.
-
-#### Http Header Priority Resolver
-
-With the extension `UseHeaderPriorityResolver` it will automatically convert the value of the HTTP Header `X-Priority` to the request priority.
-
-The allowed values are: critical, normal and non-critical
-
-```csharp
-services.AddLoadShedding((provider, options) =>
-{
- options.AdaptativeLimiter.UseHeaderPriorityResolver();
-});
-```
-
-#### Endpoint Priority Resolver
-
-With the extension `UseEndpointPriorityResolver` it will automatically load the Priority defined for the endpoint from the `EndpointPriorityAttribute`.
-
-```csharp
-services.AddLoadShedding((provider, options) =>
-{
- options.AdaptativeLimiter.UseEndpointPriorityResolver();
-});
-```
-
-Also, add `EndpointPriorityAttribute` in the action.
-
-```csharp
-[HttpGet]
-[Route("people")]
-[EndpointPriority(Priority.Critical)]
-public async Task GetPeopleAsync()
-{
- return this.Ok(new[]
- {
- new Person
- {
- Id = 1,
- Age = 18,
- UserName = "john.doe"
- }
- });
-}
-```
-
-### Including Metrics
-
-The library has the option to export adaptative limiter metrics to Prometheus.
-
-#### Install Package
-
-```bash
-dotnet add package Farfetch.LoadShedding.Prometheus
-```
-
-#### Configure
-
-Use the `LoadSheddingOptions` extension method `AddMetrics()`.
-The metrics includes the label `method` that describes the HTTP method. For this value to be correctly parsed, the `HTTPContextAccessor` should be included otherwise the `method` label will output the value `UNKNOWN`.
-
-```csharp
-builder.Services.AddHttpContextAccessor();
-
-services.AddLoadShedding((provider, options) =>
-{
- options.AddMetrics();
-});
-```
-
-`AddMetrics` has additional options that supports renaming and enable/disable specific metrics.
-
-```csharp
-options.AddMetrics(options =>
-{
- options.QueueLimit.Enabled = false;
- options.ConcurrencyLimit.Enabled = false;
- options.RequestRejected.Enabled = false;
-});
-```
-
-#### Reference Documentation
-
-| Metric Name | Metric Description | Metric Type | Labels |
-| ----------- | ------------------ | ----------- | ------ |
-| http_requests_concurrency_items_total | The current number of executions concurrently | gauge | |
-| http_requests_concurrency_limit_total | The current concurrency limit | gauge | |
-| http_requests_queue_items_total | The current number of items waiting to be processed in the queue | gauge | method (HTTP method of the request), priority (critical, noncritical, normal) |
-| http_requests_queue_limit_total | The current queue limit size | gauge | |
-| http_requests_queue_time_seconds | The time each request spent in the queue until its executed | histogram | method (HTTP method of the request), priority (critical, noncritical, normal) |
-| http_requests_rejected_total | The number of requests rejected because the queue limit is reached | counter | method (HTTP method of the request), priority (critical, noncritical, normal), reason (max_queue_items, queue_timeout) |
-
-------
-
-Go back to [Documentation Index](/README.md#documentation)
diff --git a/website/docs/benckmarks/_category_.json b/website/docs/benckmarks/_category_.json
new file mode 100644
index 0000000..3f36aa7
--- /dev/null
+++ b/website/docs/benckmarks/_category_.json
@@ -0,0 +1,7 @@
+{
+ "label": "Benckmarks",
+ "position": 5,
+ "link": {
+ "type": "generated-index"
+ }
+}
diff --git a/website/docs/benckmarks/adaptative-concurrency-limiter-test-report.md b/website/docs/benckmarks/adaptative-concurrency-limiter-test-report.md
new file mode 100644
index 0000000..f8b3b94
--- /dev/null
+++ b/website/docs/benckmarks/adaptative-concurrency-limiter-test-report.md
@@ -0,0 +1,60 @@
+# Adaptative Concurrency Limiter Test Report
+
+## Benchmark
+
+Below, it is possible to see a benchmark analysis of the concurrency control mechanism, for this test multiple scenarios were created:
+
+- **Limiter_Default:** Directly tests the AdaptiveConcurrencyLimiter with default priority;
+- **Limiter_RandomPriority:** Directly tests the AdaptiveConcurrencyLimiter with random priorities;
+- **LimiterMiddleware_Default:** Tests the ConcurrencyLimiterMiddleware that uses the AdaptiveConcurrencyLimiter with Http Requests context with default priority.
+- **LimiterMiddleware_RandomPriority:** Tests the ConcurrencyLimiterMiddleware that uses the AdaptiveConcurrencyLimiter with Http Requests context with random priorities.
+
+- **TaskQueueWith1000Items_EnqueueFixedPriority:** Tests the TaskQueue.Enqueue pre-loaded with 1000 items and a default priority;
+- **TaskQueueEmpty_EnqueueRandomPriority:** Tests the TaskQueue.Enqueue with no elements;
+- **TaskQueueWith1000Items_EnqueueRandomPriority:** Tests the TaskQueue.Enqueue pre-loaded with 1000 items and random priorities;
+- **TaskQueueWith1000Items_Dequeue:** Tests the TaskQueue.Dequeue pre-loaded with 1000 items;
+- **TaskQueue_EnqueueNewItem_LimitReached:** Tests the TaskQueue.Enqueue pre-loaded with 1000 items and the queue limit reached;
+
+### Limiter
+
+``` ini
+BenchmarkDotNet=v0.13.4, OS=Windows 10 (10.0.19044.2604/21H2/November2021Update)
+Intel Core i7-10610U CPU 1.80GHz, 1 CPU, 8 logical and 4 physical cores
+.NET SDK=7.0.103
+ [Host] : .NET 6.0.14 (6.0.1423.7309), X64 RyuJIT AVX2
+ Job-VLMTWN : .NET 6.0.14 (6.0.1423.7309), X64 RyuJIT AVX2
+
+IterationCount=10
+```
+
+| Method | Mean | Error | StdDev | Min | Max | Rank | Completed Work Items | Lock Contentions | Gen0 | Allocated |
+|--------------------------------- |---------:|---------:|---------:|---------:|---------:|-----:|---------------------:|-----------------:|-------:|----------:|
+| Limiter_Default | 354.4 ns | 6.22 ns | 3.25 ns | 349.2 ns | 358.4 ns | 1 | 0.0000 | - | 0.1450 | 608 B |
+| Limiter_RandomPriority | 366.3 ns | 4.82 ns | 2.52 ns | 363.6 ns | 369.6 ns | 2 | 0.0000 | - | 0.1450 | 608 B |
+| LimiterMiddleware_Default | 436.6 ns | 28.55 ns | 16.99 ns | 416.7 ns | 471.4 ns | 3 | 0.0000 | - | 0.1855 | 776 B |
+| LimiterMiddleware_RandomPriority | 468.7 ns | 6.55 ns | 3.90 ns | 463.7 ns | 475.3 ns | 4 | 0.0000 | - | 0.2027 | 848 B |
+
+#### TaskQueue
+
+``` ini
+BenchmarkDotNet=v0.13.4, OS=Windows 10 (10.0.19044.2604/21H2/November2021Update)
+Intel Core i7-10610U CPU 1.80GHz, 1 CPU, 8 logical and 4 physical cores
+.NET SDK=7.0.103
+ [Host] : .NET 6.0.14 (6.0.1423.7309), X64 RyuJIT AVX2
+ Job-THBOTE : .NET 6.0.14 (6.0.1423.7309), X64 RyuJIT AVX2
+
+InvocationCount=1 IterationCount=10 UnrollFactor=1
+
+```
+
+| Method | Mean | Error | StdDev | Min | Max | Rank | Completed Work Items | Lock Contentions | Allocated |
+|--------------------------------------------- |----------:|----------:|----------:|---------:|----------:|-----:|---------------------:|-----------------:|----------:|
+| TaskQueueWith1000Items_EnqueueFixedPriority | 11.311 μs | 3.8278 μs | 2.2779 μs | 6.800 μs | 14.800 μs | 4 | - | - | 896 B |
+| TaskQueueEmpty_EnqueueRandomPriority | 6.700 μs | 3.8089 μs | 2.2666 μs | 4.800 μs | 11.800 μs | 2 | - | - | 896 B |
+| TaskQueueWith1000Items_EnqueueRandomPriority | 3.650 μs | 0.3540 μs | 0.1852 μs | 3.400 μs | 4.000 μs | 1 | - | - | 896 B |
+| TaskQueueWith1000Items_Dequeue | 5.500 μs | 1.0912 μs | 0.5707 μs | 4.600 μs | 6.400 μs | 2 | - | - | 704 B |
+| TaskQueue_EnqueueNewItem_LimitReached | 8.111 μs | 2.2848 μs | 1.3596 μs | 7.000 μs | 10.500 μs | 3 | - | - | 1144 B |
+
+### Conclusion
+
+In all the scenarios the time added to the execution pipeline is very small and the impact caused by the limiter and task queue can be ignored.
diff --git a/website/docs/getting-started/_category_.json b/website/docs/getting-started/_category_.json
index f8928f4..20d34de 100644
--- a/website/docs/getting-started/_category_.json
+++ b/website/docs/getting-started/_category_.json
@@ -1,7 +1,7 @@
{
- "label": "Getting Started",
- "position": 2,
- "link": {
- "type": "generated-index"
- }
- }
\ No newline at end of file
+ "label": "Getting Started",
+ "position": 2,
+ "link": {
+ "type": "generated-index"
+ }
+}
diff --git a/website/docs/getting-started/packages.md b/website/docs/getting-started/packages.md
new file mode 100644
index 0000000..5ce5b86
--- /dev/null
+++ b/website/docs/getting-started/packages.md
@@ -0,0 +1,19 @@
+---
+sidebar_position: 3
+---
+
+# Packages
+
+## Core
+
+| Package | NuGet Stable | Downloads |
+| ---------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------- |
+| [Farfetch.LoadShedding](https://www.nuget.org/packages/Farfetch.LoadShedding/) | [![Farfetch.LoadShedding](https://img.shields.io/nuget/v/Farfetch.LoadShedding.svg)](https://www.nuget.org/packages/Farfetch.LoadShedding/) | [![Farfetch.LoadShedding](https://img.shields.io/nuget/dt/Farfetch.LoadShedding.svg)](https://www.nuget.org/packages/Farfetch.LoadShedding/) |
+| [Farfetch.LoadShedding.AspNetCore](https://www.nuget.org/packages/Farfetch.LoadShedding.AspNetCore/) | [![Farfetch.LoadShedding.AspNetCore](https://img.shields.io/nuget/v/Farfetch.LoadShedding.AspNetCore.svg)](https://www.nuget.org/packages/Farfetch.LoadShedding.AspNetCore/) | [![Farfetch.LoadShedding](https://img.shields.io/nuget/dt/Farfetch.LoadShedding.AspNetCore.svg)](https://www.nuget.org/packages/Farfetch.LoadShedding.AspNetCore/) |
+
+## Metrics
+
+| Package | NuGet Stable | Downloads |
+| ---------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------- |
+| [Farfetch.LoadShedding.Prometheus](https://www.nuget.org/packages/Farfetch.LoadShedding.Prometheus/) | [![Farfetch.LoadShedding.Prometheus](https://img.shields.io/nuget/v/Farfetch.LoadShedding.Prometheus.svg)](https://www.nuget.org/packages/Farfetch.LoadShedding.Prometheus/) | [![Farfetch.LoadShedding.Prometheus](https://img.shields.io/nuget/dt/Farfetch.LoadShedding.Prometheus.svg)](https://www.nuget.org/packages/Farfetch.LoadShedding.Prometheus/) |
+
diff --git a/website/docs/getting-started/quickstart.md b/website/docs/getting-started/quickstart.md
index 970a089..773c7a7 100644
--- a/website/docs/getting-started/quickstart.md
+++ b/website/docs/getting-started/quickstart.md
@@ -3,4 +3,108 @@ sidebar_position: 2
sidebar_label: Quickstart
---
-# Quickstart
+# Quickstart: Create your first application with LoadShedding
+
+In this guide, you will use C# and the .NET CLI to create a WebApi that will have LoadShedding with the default configurations.
+
+By the end of the guide, you will know how to use LoadShedding on your application.
+
+## Prerequisites
+
+- One of the following .NET versions:
+ - .NET Core 2.0 or above.
+
+## Overview
+
+You will create a WebApi using LoadShedding.
+
+## Steps
+### 1. Create a folder for your applications
+
+Create a new folder with the name _LoadSheddingQuickstart_.
+
+### 2. Create WebApi Project
+
+Run the following command to create a WebApi Project named _LoadSheddingQuickstart_.
+
+```bash
+dotnet new webapi -controllers -n LoadSheddingQuickstart
+```
+
+### 3. Install LoadShedding package
+
+Inside the _LoadSheddingQuickstart_ project directory, run the following command to install the required package
+
+```bash
+dotnet add package Farfetch.LoadShedding.AspNetCore
+```
+
+### 4 Add Metrics
+
+Inside the _LoadSheddingQuickstart_ project directory, run the following command to install Prometheus
+
+```bash
+dotnet add package prometheus-net.AspNetCore
+dotnet add package Farfetch.LoadShedding.Prometheus
+```
+
+:::info
+This step is optional. With this you will be able to confirm that LoadShedding is configured on the [Run!](#7-run) step.
+:::
+
+### 5. Add LoadShedding on the WebApi
+
+Add the LoadShedding services by calling the AddLoadShedding extension:
+
+```csharp
+services.AddLoadShedding();
+```
+
+Optionally you can configure `SubscribeEvents()` and you will be able to confirm that LoadShedding is configured on the [Run!](#7-run) step:
+
+```csharp
+services.AddLoadShedding((provider, options) =>
+{
+ options.SubscribeEvents(events =>
+ {
+ events.ItemEnqueued.Subscribe(args => Console.WriteLine($"QueueLimit: {args.QueueLimit}, QueueCount: {args.QueueCount}"));
+ events.ItemDequeued.Subscribe(args => Console.WriteLine($"QueueLimit: {args.QueueLimit}, QueueCount: {args.QueueCount}"));
+ events.ItemProcessing.Subscribe(args => Console.WriteLine($"ConcurrencyLimit: {args.ConcurrencyLimit}, ConcurrencyItems: {args.ConcurrencyCount}"));
+ events.ItemProcessed.Subscribe(args => Console.WriteLine($"ConcurrencyLimit: {args.ConcurrencyLimit}, ConcurrencyItems: {args.ConcurrencyCount}"));
+ events.Rejected.Subscribe(args => Console.Error.WriteLine($"Item rejected with Priority: {args.Priority}"));
+ });
+});
+```
+
+:::note
+For more information about de default configurations and possible customizations see [here.](../getting_started/register_concurrency_adaptative_limiter.md#options-configuration)
+:::
+
+### 6. Use LoadShedding on the WebApi
+
+Use the `UseLoadShedding` extension method by extending the `IApplicationBuilder` interface:
+
+```csharp
+app.UseLoadShedding();
+```
+
+### 7. Run!
+
+From the `LoadSheddingQuickstart` directory run the project:
+
+```bash
+dotnet run --project LoadSheddingQuickstart/LoadSheddingQuickstart.csproj
+```
+
+And you should be able to call the available endpoint with Swagger Web Interface: https://localhost:7231/swagger/index.html
+
+Now, you can confirm that LoadShedding is correctly configured on your WebApi by calling the following endpoint: https://localhost:7231/metrics
+
+You should see the LoadShedding metrics referenced [here](../getting_started/register_concurrency_adaptative_limiter.md#reference-documentation).
+
+Additionally, you can also confirm if LoadShedding is correctly configured by checking the following console output:
+
+```
+ConcurrencyLimit: 5, ConcurrencyItems: 1
+ConcurrencyLimit: 5, ConcurrencyItems: 0
+```
diff --git a/website/docs/getting-started/samples.md b/website/docs/getting-started/samples.md
new file mode 100644
index 0000000..c943a05
--- /dev/null
+++ b/website/docs/getting-started/samples.md
@@ -0,0 +1,29 @@
+---
+sidebar_position: 4
+---
+
+# Samples
+
+We know that working code is a valuable learning tool for many, so here you can find a list of samples built to demonstrate LoadShedding capabilities.
+
+## Web API
+
+This is a simple sample that shows how LoadShedding is implemented.
+
+You can find the code here: [/samples/Samples.WebApi](https://github.com/Farfetch/loadshedding/tree/master/samples/Samples.WebApi)
+
+To run this sample with docker environment please follow the following steps:
+
+### 1. Run docker compose command
+
+Inside the _loadshedding_ project directory, run the following command:
+
+```bash
+docker compose -f docker-compose.sample.yaml up --build -d
+```
+
+### 2. Loadshedding metrics with Grafana
+
+To view the grafana dashboard just navigate to http://localhost:3000 and login with admin credentials.
+
+After that, you can access the dashboard here: http://localhost:3000/d/http_loadshedding/http-loadshedding?orgId=1&refresh=1m
diff --git a/website/docs/guides/_category_.json b/website/docs/guides/_category_.json
index 9dfeedb..f389501 100644
--- a/website/docs/guides/_category_.json
+++ b/website/docs/guides/_category_.json
@@ -1,7 +1,7 @@
{
- "label": "Guides",
- "position": 3,
- "link": {
- "type": "generated-index"
- }
- }
\ No newline at end of file
+ "label": "Guides",
+ "position": 3,
+ "link": {
+ "type": "generated-index"
+ }
+}
diff --git a/website/docs/guides/adaptative-concurreny-limiter/_category_.json b/website/docs/guides/adaptative-concurreny-limiter/_category_.json
new file mode 100644
index 0000000..7edfc02
--- /dev/null
+++ b/website/docs/guides/adaptative-concurreny-limiter/_category_.json
@@ -0,0 +1,7 @@
+{
+ "label": "Adaptative Concurrency Limiter",
+ "position": 3,
+ "link": {
+ "type": "generated-index"
+ }
+}
diff --git a/website/docs/guides/adaptative-concurreny-limiter/adaptative_concurrency_limiter.md b/website/docs/guides/adaptative-concurreny-limiter/adaptative_concurrency_limiter.md
new file mode 100644
index 0000000..a157b02
--- /dev/null
+++ b/website/docs/guides/adaptative-concurreny-limiter/adaptative_concurrency_limiter.md
@@ -0,0 +1,26 @@
+---
+sidebar_position: 1
+---
+
+# Adaptative Concurrency Limiter
+
+In this section, we will learn how **adaptative concurrency limiter** works on LoadShedding.
+
+The adaptative concurrency limiter provides a capacity to auto-adjust the accepted traffic based on the runtime performance, ensuring that latencies remain low.
+
+![Adaptative Concurrency Limiter](concurrency_limiter_graph.png)
+
+As can be seen in the previous image (adapted from [Performance Under Load Article](https://netflixtechblog.medium.com/performance-under-load-3e6fa9a60581)):
+
+* the requests are processed since the number of capacity + the number of queue slots is not reached.
+* as soon as the maximum concurrency limit is reached (possible to configure), the requests will enter a queue.
+* the requests that are waiting in the queue, will be released by a FIFO (First In, First Out) methodology.
+* as soon as the maximum queue size is reached, the system will automatically reject the following requests, returning a 503 - Service Unavailable error.
+* the latency will be kept low independent of the number of requests.
+* the capacity/concurrency limit will be automatically calculated through some algorithms taking into account the service performance degradation.
+
+## How it works
+
+The following animation shows requests being processed with LoadShedding:
+
+![adaptative-concurrency-limiter-animation](adaptative_concurrency_limiter_animation.gif)
diff --git a/website/docs/guides/adaptative-concurreny-limiter/adaptative_concurrency_limiter_animation.gif b/website/docs/guides/adaptative-concurreny-limiter/adaptative_concurrency_limiter_animation.gif
new file mode 100644
index 0000000..fb0fb1a
Binary files /dev/null and b/website/docs/guides/adaptative-concurreny-limiter/adaptative_concurrency_limiter_animation.gif differ
diff --git a/docs/resources/concurrency_limiter_graph.png b/website/docs/guides/adaptative-concurreny-limiter/concurrency_limiter_graph.png
similarity index 100%
rename from docs/resources/concurrency_limiter_graph.png
rename to website/docs/guides/adaptative-concurreny-limiter/concurrency_limiter_graph.png
diff --git a/website/docs/guides/register_concurrency_adaptative_limiter.md b/website/docs/guides/adaptative-concurreny-limiter/configuration.md
similarity index 80%
rename from website/docs/guides/register_concurrency_adaptative_limiter.md
rename to website/docs/guides/adaptative-concurreny-limiter/configuration.md
index d747c6e..e396f1d 100644
--- a/website/docs/guides/register_concurrency_adaptative_limiter.md
+++ b/website/docs/guides/adaptative-concurreny-limiter/configuration.md
@@ -1,39 +1,29 @@
---
-sidebar_position: 1
+sidebar_position: 2
---
-# Register Concurrency Adaptative Limiter
+# Configuration
-The concurrency adaptative limiter provides a capacity to auto-adjust the accepted traffic based on the runtime performance, ensuring that latencies remain low.
+In this section, we will introduce how configuration is done in LoadShedding.
-![Concurrency Adaptative Limiter](concurrency_limiter_graph.png)
+LoadShedding is a highly configured framework. You can customize it through a Fluent Builder.
-As can be seen in the previous image (adapted from [Performance Under Load Article](https://netflixtechblog.medium.com/performance-under-load-3e6fa9a60581)):
+There are a few options to configure LoadShedding:
+- [Configuration](#configuration)
+ - [Options Configuration](#options-configuration)
+ - [Events Listener Configuration](#events-listener-configuration)
+ - [Custom Queue Size Calculator Configuration](#custom-queue-size-calculator-configuration)
+ - [1 - Implement the IQueueSizeCalculator interface](#1---implement-the-iqueuesizecalculator-interface)
+ - [2 - Use a custom QueueSizeCalculator](#2---use-a-custom-queuesizecalculator)
+ - [Request Prioritization Configuration](#request-prioritization-configuration)
+ - [Http Header Priority Resolver](#http-header-priority-resolver)
+ - [Endpoint Priority Resolver](#endpoint-priority-resolver)
+ - [Metrics](#metrics)
+ - [Install Package](#install-package)
+ - [Configure](#configure)
+ - [Reference Documentation](#reference-documentation)
-* the requests are processed since the number of capacity + the number of queue slots is not reached.
-* as soon as the maximum concurrency limit is reached (possible to configure), the requests will enter a queue.
-* the requests that are waiting in the queue, will be released by a FIFO (First In, First Out) methodology.
-* as soon as the maximum queue size is reached, the system will automatically reject the following requests, returning a 503 - Service Unavailable error.
-* the latency will be kept low independent of the number of requests.
-* the capacity/concurrency limit will be automatically calculated through some algorithms taking into account the service performance degradation.
-
-## How to use it
-
-### Base Configuration
-
-Install all the needed services by calling `IServiceCollection.AddLoadShedding`.
-
-```csharp
-app.AddLoadShedding();
-```
-
-Extend the `IApplicationBuilder` using the `UseLoadShedding` extension method.
-
-```csharp
-app.UseLoadShedding();
-```
-
-### Options Configuration
+## Options Configuration
It is possible to have access to additional configurations when registering the services.
@@ -65,15 +55,15 @@ By default, the following `ConcurrencyOptions` values will be used:
| InitialQueueSize | The starting number of requests in the queue | 20 |
| QueueTimeoutInMs | The queue waiting timeout, when the timeout is reached the task will be canceled and will throw an OperationCanceledException. | Infinite |
-**Note:** These default values were defined based on:
-
-* investigation of the [Netflix Concurrency Limit](https://github.com/Netflix/concurrency-limits) tool.
-* having a huge margin of tolerance: accepting 500 requests simultaneously (and 50 more going to the queue - initially).
+:::note
+These default values were defined based on:
+* Investigation of the [Netflix Concurrency Limit](https://github.com/Netflix/concurrency-limits) tool.
+* Having a huge margin of tolerance: accepting 500 requests simultaneously (and 50 more going to the queue - initially).
+:::
On the other hand, if needed, these settings can be completely overridden by using the `ConcurrencyOptions` property:
```csharp
-
services.AddLoadShedding((provider, options) =>
{
options.AdaptativeLimiter.ConcurrencyOptions.MinConcurrencyLimit = 5;
@@ -92,7 +82,7 @@ When defining the options values, the following criteria need to be accomplished
* InitialConcurrencyLimit >= MinConcurrencyLimit && MaxConcurrencyLimit >= InitialConcurrencyLimit
* InitialQueueSize >= MinQueueSize
-### Events Listener Configuration
+## Events Listener Configuration
It is possible to monitor the service performance by subscribing internal events:
@@ -118,7 +108,7 @@ services.AddLoadShedding((provider, options) =>
});
```
-### Custom Queue Size Calculator Configuration
+## Custom Queue Size Calculator Configuration
Calculating the queue size has the main goal to find the maximum value of requests allowed to be in the queue.
@@ -126,7 +116,7 @@ The default queue size calculator is based on the square root of the concurrency
Optionally, the strategy can be overridden by:
-#### 1 - Implement the IQueueSizeCalculator interface
+### 1 - Implement the IQueueSizeCalculator interface
```csharp
public class CustomQueueSizeCalculator : IQueueSizeCalculator
@@ -140,7 +130,7 @@ Optionally, the strategy can be overridden by:
}
```
-#### 2 - Use a custom QueueSizeCalculator
+### 2 - Use a custom QueueSizeCalculator
```csharp
services.AddLoadShedding((provider, options) =>
@@ -149,13 +139,13 @@ services.AddLoadShedding((provider, options) =>
});
```
-### Request Prioritization Configuration
+## Request Prioritization Configuration
It is possible to configure the settings to establish priority resolvers for requests.
At present, only one strategy is supported, which means that solely the most recently configured strategy will be implemented.
-#### Http Header Priority Resolver
+### Http Header Priority Resolver
With the extension `UseHeaderPriorityResolver` it will automatically convert the value of the HTTP Header `X-Priority` to the request priority.
@@ -168,7 +158,7 @@ services.AddLoadShedding((provider, options) =>
});
```
-#### Endpoint Priority Resolver
+### Endpoint Priority Resolver
With the extension `UseEndpointPriorityResolver` it will automatically load the Priority defined for the endpoint from the `EndpointPriorityAttribute`.
@@ -199,17 +189,17 @@ public async Task GetPeopleAsync()
}
```
-### Including Metrics
+## Metrics
The library has the option to export adaptative limiter metrics to Prometheus.
-#### Install Package
+### Install Package
```bash
dotnet add package Farfetch.LoadShedding.Prometheus
```
-#### Configure
+### Configure
Use the `LoadSheddingOptions` extension method `AddMetrics()`.
The metrics includes the label `method` that describes the HTTP method. For this value to be correctly parsed, the `HTTPContextAccessor` should be included otherwise the `method` label will output the value `UNKNOWN`.
@@ -234,7 +224,7 @@ options.AddMetrics(options =>
});
```
-#### Reference Documentation
+### Reference Documentation
| Metric Name | Metric Description | Metric Type | Labels |
| ----------- | ------------------ | ----------- | ------ |
diff --git a/website/docs/guides/concurrency_limiter_graph.png b/website/docs/guides/concurrency_limiter_graph.png
deleted file mode 100644
index 1f2374c..0000000
Binary files a/website/docs/guides/concurrency_limiter_graph.png and /dev/null differ
diff --git a/website/docs/introduction.md b/website/docs/introduction.md
index a60fd92..6df0f66 100644
--- a/website/docs/introduction.md
+++ b/website/docs/introduction.md
@@ -6,13 +6,27 @@ slug: /
# Introduction to LoadShedding
-...
+⚡️ LoadShedding was designed to effectively manage concurrency in the .NET ecosystem.
+
+🏗 Built upon the foundational principles and concepts pioneered by [Netflix](https://netflixtechblog.medium.com/performance-under-load-3e6fa9a60581).
+
+🎁 Offers out-of-the-box Adaptative Concurrency Limiter implementation.
+
+🎁 Offers additional features like metrics, request prioritization, and configuration flexibility.
Get started by installing [LoadShedding](getting-started/installation) or following our [Quickstart](getting-started/quickstart).
## Features {#features}
-...
+Our goal is to empower you to build .NET apllications that preserve an acceptable quality of service for the maximum number of requests possible when a service is facing high demand or extreme load.
+
+To do that, LoadShedding gives you access to features like:
+
+- [Adaptative concurrency limiter](guides/adaptative-concurreny-limiter/adaptative_concurrency_limiter.md)
+- [Events listenters](guides/adaptative-concurreny-limiter/configuration.md#events-listener-configuration)
+- [Custom queue size calculator](guides/adaptative-concurreny-limiter/configuration.md#custom-queue-size-calculator-configuration)
+- [Request prioritization](guides/adaptative-concurreny-limiter/configuration.md#request-prioritization-configuration)
+- [Adaptative concurrency limiter metrics](guides/adaptative-concurreny-limiter/configuration.md#metrics)
## Join the community {#join-the-community}
diff --git a/website/docs/reference/Farfetch.LoadShedding.AspNetCore/_category_.json b/website/docs/reference/Farfetch.LoadShedding.AspNetCore/_category_.json
new file mode 100644
index 0000000..517e619
--- /dev/null
+++ b/website/docs/reference/Farfetch.LoadShedding.AspNetCore/_category_.json
@@ -0,0 +1,7 @@
+{
+ "label": "Farfetch.LoadShedding.AspNetCore",
+ "position": 2,
+ "link": {
+ "type": "generated-index"
+ }
+}
diff --git a/website/docs/reference/Farfetch.LoadShedding.Prometheus/_category_.json b/website/docs/reference/Farfetch.LoadShedding.Prometheus/_category_.json
new file mode 100644
index 0000000..d4bde26
--- /dev/null
+++ b/website/docs/reference/Farfetch.LoadShedding.Prometheus/_category_.json
@@ -0,0 +1,7 @@
+{
+ "label": "Farfetch.LoadShedding.Prometheus",
+ "position": 3,
+ "link": {
+ "type": "generated-index"
+ }
+}
diff --git a/website/docs/reference/Farfetch.LoadShedding/_category_.json b/website/docs/reference/Farfetch.LoadShedding/_category_.json
new file mode 100644
index 0000000..d442f26
--- /dev/null
+++ b/website/docs/reference/Farfetch.LoadShedding/_category_.json
@@ -0,0 +1,7 @@
+{
+ "label": "Farfetch.LoadShedding",
+ "position": 1,
+ "link": {
+ "type": "generated-index"
+ }
+}
diff --git a/website/docs/reference/_category_.json b/website/docs/reference/_category_.json
new file mode 100644
index 0000000..22e0368
--- /dev/null
+++ b/website/docs/reference/_category_.json
@@ -0,0 +1,7 @@
+{
+ "label": "Reference",
+ "position": 4,
+ "link": {
+ "type": "generated-index"
+ }
+}
diff --git a/website/docusaurus.config.js b/website/docusaurus.config.js
index 07a8c17..298c02d 100644
--- a/website/docusaurus.config.js
+++ b/website/docusaurus.config.js
@@ -16,7 +16,7 @@ const config = {
url: 'https://farfetch.github.io/',
// Set the // pathname under which your site is served
// For GitHub pages deployment, it is often '//'
- baseUrl: '/',
+ baseUrl: '/loadshedding/',
// GitHub pages deployment config.
// If you aren't using GitHub pages, you don't need these.
@@ -40,14 +40,9 @@ const config = {
/** @type {import('@docusaurus/preset-classic').Options} */
({
docs: {
- routeBasePath: '/', // Serve the docs at the site's root
- sidebarPath: './sidebars.js',
- // Please change this to your repo.
- // Remove this to remove the "edit this page" links.
- editUrl:
- 'https://github.com/farfetch/loadhsedding/tree/main/website/',
+ sidebarPath: require.resolve('./sidebars.js'),
+ editUrl: 'https://github.com/farfetch/loadhsedding/tree/main/website/',
},
- blog: false,
theme: {
customCss: './src/css/custom.css',
},
@@ -66,8 +61,17 @@ const config = {
logo: {
alt: 'LoadShedding',
src: 'img/logo.svg',
+ href: 'https://farfetch.github.io/loadshedding',
+ target: '_self',
+ height: 32,
},
items: [
+ {
+ type: 'doc',
+ docId: 'introduction',
+ position: 'right',
+ label: 'Docs',
+ },
{
href: 'https://github.com/farfetch/loadshedding',
label: 'GitHub',
@@ -83,15 +87,15 @@ const config = {
items: [
{
label: 'Introduction',
- to: '/',
+ to: '/docs',
},
{
label: 'Getting Started',
- to: '/category/getting-started',
+ to: '/docs/category/getting-started',
},
{
label: 'Guides',
- to: '/category/guides',
+ to: '/docs/category/guides',
}
]
},
diff --git a/website/src/pages/index.js b/website/src/pages/index.js
new file mode 100644
index 0000000..34e3e5c
--- /dev/null
+++ b/website/src/pages/index.js
@@ -0,0 +1,43 @@
+import React from 'react';
+import clsx from 'clsx';
+import Link from '@docusaurus/Link';
+import useBaseUrl, {useBaseUrlUtils} from '@docusaurus/useBaseUrl';
+import useDocusaurusContext from '@docusaurus/useDocusaurusContext';
+import Layout from '@theme/Layout';
+
+import styles from './index.module.css';
+
+
+function HomepageHeader() {
+ const {siteConfig} = useDocusaurusContext();
+ return (
+
+
+
+
{siteConfig.tagline}
+
+
+ Get Started
+
+
+
+
+ );
+}
+
+export default function Home() {
+ const {siteConfig} = useDocusaurusContext();
+ return (
+
+
+
+ );
+}
diff --git a/website/src/pages/index.module.css b/website/src/pages/index.module.css
new file mode 100644
index 0000000..120d310
--- /dev/null
+++ b/website/src/pages/index.module.css
@@ -0,0 +1,25 @@
+/**
+ * CSS files with the .module.css suffix will be treated as CSS modules
+ * and scoped locally.
+ */
+
+ .heroBanner {
+ padding: 4rem 0;
+ text-align: center;
+ position: relative;
+ overflow: hidden;
+ }
+
+ @media screen and (max-width: 996px) {
+ .heroBanner {
+ padding: 2rem;
+ }
+ }
+
+ .buttons {
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ }
+
+
\ No newline at end of file