Skip to content

Commit

Permalink
update README & add doc of stream::queue (#185)
Browse files Browse the repository at this point in the history
  • Loading branch information
kanarus authored Jun 17, 2024
1 parent 87ad61d commit 2d2a54d
Show file tree
Hide file tree
Showing 3 changed files with 144 additions and 1 deletion.
34 changes: 33 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ async fn create_user(body: CreateUserRequest<'_>) -> Created<User> {
}

/* Shorthand for Payload + Serialize */
#[Payload(JSON / S)]
#[Payload(JSON/S)]
struct SearchResult {
title: String,
}
Expand All @@ -158,6 +158,35 @@ async fn search(condition: SearchQuery<'_>) -> Vec<SearchResult> {

<br>

### Payload validation

`where <validation expression>` in `#[Payload()]` runs the validation when responding with it or parsing request to it.

`<validation expression>` is an expression with `self: &Self` that returns `Result<(), impl Display>`.

```rust
use ohkami::prelude::*;
use ohkami::{typed::Payload, builtin::payload::JSON};

#[Payload(JSON/D where self.valid())]
struct Hello<'req> {
name: &'req str,
repeat: usize,
}

impl Hello<'_> {
fn valid(&self) -> Result<(), String> {
(self.name.len() > 0).then_some(())
.ok_or_else(|| format!("`name` must not be empty"))?;
(self.repeat > 0).then_some(())
.ok_or_else(|| format!("`repeat` must be positive"))?;
Ok(())
}
}
```

<br>

### Use middlewares

Ohkami's request handling system is called "**fang**s", and middlewares are implemented on this :
Expand All @@ -167,6 +196,8 @@ use ohkami::prelude::*;
#[derive(Clone)]
struct GreetingFang;
/* utility trait for auto impl `Fang` */
impl FangAction for GreetingFang {
async fn fore<'a>(&'a self, req: &'a mut Request) -> Result<(), Response> {
println!("Welcomm request!: {req:?}");
Expand Down Expand Up @@ -302,6 +333,7 @@ async fn test_my_ohkami() {
- [ ] HTTP/2
- [ ] HTTP/3
- [ ] HTTPS
- [x] Server-Sent Events
- [ ] WebSocket

## MSRV (Minimum Supported Rust Version)
Expand Down
1 change: 1 addition & 0 deletions ohkami/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -452,6 +452,7 @@ pub mod serde {
pub use ::ohkami_macros::{Serialize, Deserialize};
pub use ::serde::ser::{self, Serialize, Serializer};
pub use ::serde::de::{self, Deserialize, Deserializer};
pub use ::serde_json as json;
}

// #[cfg(feature="websocket")]
Expand Down
110 changes: 110 additions & 0 deletions ohkami_lib/src/stream.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,116 @@
pub use ::futures_core::{Stream, ready};


/// # Stream of an async process with a queue
///
/// `queue(|mut q| async move { 〜 })` makes an queue for `T` values
/// and an async process that pushes items to the queue, they work as
/// a stream yeilding all the items asynchronously.
///
/// <br>
///
/// _**note**_ : It's recommended to just `use ohkami::utils::stream` and
/// call as **`stream::queue()`**, not direct `queue()`.
///
/// <br>
///
/// ---
/// *example.rs*
/// ```no_run
/// use ohkami::prelude::*;
/// use ohkami::typed::DataStream;
/// use ohkami::utils::{StreamExt, stream};
/// use tokio::time::sleep;
///
/// #[tokio::main]
/// async fn main() {
/// let qs = stream::queue(|mut q| async move {
/// for i in 1..=5 {
/// sleep(std::time::Duration::from_secs(1)).await;
/// q.push(format!("Hello, I'm message#{i}!"))
/// }
///
/// sleep(std::time::Duration::from_secs(1)).await;
///
/// q.push("done".to_string())
/// });
/// }
/// ```
///
/// <br>
///
/// ---
/// *openai.rs*
/// ```ignore
/// use ohkami::prelude::*;
/// use ohkami::Memory;
/// use ohkami::typed::DataStream;
/// use ohkami::utils::{StreamExt, stream};
///
/// pub async fn relay_chat_completion(
/// api_key: Memory<'_, &'static str>,
/// UserMessage(message): UserMessage,
/// ) -> Result<DataStream<String, Error>, Error> {
/// let mut gpt_response = reqwest::Client::new()
/// .post("https://api.openai.com/v1/chat/completions")
/// .bearer_auth(*api_key)
/// .json(&ChatCompletions {
/// model: "gpt-4o",
/// stream: true,
/// messages: vec![
/// ChatMessage {
/// role: Role::user,
/// content: message,
/// }
/// ],
/// })
/// .send().await?
/// .bytes_stream();
///
/// Ok(DataStream::from_stream(stream::queue(|mut q| async move {
/// let mut push_line = |mut line: String| {
/// line.strip_suffix("\n\n").ok();
///
/// #[cfg(debug_assertions)] {
/// if line != "[DONE]" {
/// let chunk: models::ChatCompletionChunk
/// = serde_json::from_str(&line).unwrap();
/// let content = chunk
/// .choices[0]
/// .delta
/// .content.as_deref().unwrap_or(""));
/// print!("{content}");
/// std::io::Write::flush(&mut std::io::stdout()).ok();
/// } else {
/// println!()
/// }
/// }
///
/// q.push(Ok(line));
/// };
///
/// let mut remaining = String::new();
/// while let Some(Ok(raw_chunk)) = gpt_response.next().await {
/// for line in std::str::from_utf8(&raw_chunk).unwrap()
/// .split_inclusive("\n\n")
/// {
/// if let Some(data) = line.strip_prefix("data: ") {
/// if data.ends_with("\n\n") {
/// push_line(data.to_string())
/// } else {
/// remaining = data.into()
/// }
/// } else {
/// #[cfg(debug_assertions)] {
/// assert!(line.ends_with("\n\n"))
/// }
/// push_line(std::mem::take(&mut remaining) + line)
/// }
/// }
/// }
/// })))
/// }
/// ```
pub fn queue<T, F, Fut>(f: F) -> stream::QueueStream<F, T, Fut>
where
F: FnOnce(stream::Queue<T>) -> Fut,
Expand Down

0 comments on commit 2d2a54d

Please sign in to comment.