Skip to content

Commit

Permalink
chore(examples): minor formatting changes (#307)
Browse files Browse the repository at this point in the history
  • Loading branch information
stainless-app[bot] committed Aug 19, 2024
1 parent 9cdca40 commit a3251b2
Show file tree
Hide file tree
Showing 4 changed files with 14 additions and 14 deletions.
4 changes: 2 additions & 2 deletions tests/api_resources/commits/test_test_results.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def test_method_list_with_all_params(self, client: Openlayer) -> None:
include_archived=True,
page=1,
per_page=1,
status="passing",
status="running",
type="integrity",
)
assert_matches_type(TestResultListResponse, test_result, path=["response"])
Expand Down Expand Up @@ -85,7 +85,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncOpenlayer) -
include_archived=True,
page=1,
per_page=1,
status="passing",
status="running",
type="integrity",
)
assert_matches_type(TestResultListResponse, test_result, path=["response"])
Expand Down
12 changes: 6 additions & 6 deletions tests/api_resources/inference_pipelines/test_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,19 +39,19 @@ def test_method_stream_with_all_params(self, client: Openlayer) -> None:
data = client.inference_pipelines.data.stream(
inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
config={
"num_of_token_column_name": "tokens",
"output_column_name": "output",
"context_column_name": "context",
"cost_column_name": "cost",
"ground_truth_column_name": "ground_truth",
"inference_id_column_name": "id",
"input_variable_names": ["user_query"],
"latency_column_name": "latency",
"metadata": {},
"output_column_name": "output",
"num_of_token_column_name": "tokens",
"prompt": [
{
"role": "user",
"content": "{{ user_query }}",
"role": "user",
}
],
"question_column_name": "question",
Expand Down Expand Up @@ -156,19 +156,19 @@ async def test_method_stream_with_all_params(self, async_client: AsyncOpenlayer)
data = await async_client.inference_pipelines.data.stream(
inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
config={
"num_of_token_column_name": "tokens",
"output_column_name": "output",
"context_column_name": "context",
"cost_column_name": "cost",
"ground_truth_column_name": "ground_truth",
"inference_id_column_name": "id",
"input_variable_names": ["user_query"],
"latency_column_name": "latency",
"metadata": {},
"output_column_name": "output",
"num_of_token_column_name": "tokens",
"prompt": [
{
"role": "user",
"content": "{{ user_query }}",
"role": "user",
}
],
"question_column_name": "question",
Expand Down
8 changes: 4 additions & 4 deletions tests/api_resources/inference_pipelines/test_rows.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,11 +33,11 @@ def test_method_update_with_all_params(self, client: Openlayer) -> None:
inference_id="inferenceId",
row={},
config={
"ground_truth_column_name": "ground_truth",
"human_feedback_column_name": "human_feedback",
"inference_id_column_name": "id",
"latency_column_name": "latency",
"timestamp_column_name": "timestamp",
"ground_truth_column_name": "ground_truth",
"human_feedback_column_name": "human_feedback",
},
)
assert_matches_type(RowUpdateResponse, row, path=["response"])
Expand Down Expand Up @@ -99,11 +99,11 @@ async def test_method_update_with_all_params(self, async_client: AsyncOpenlayer)
inference_id="inferenceId",
row={},
config={
"ground_truth_column_name": "ground_truth",
"human_feedback_column_name": "human_feedback",
"inference_id_column_name": "id",
"latency_column_name": "latency",
"timestamp_column_name": "timestamp",
"ground_truth_column_name": "ground_truth",
"human_feedback_column_name": "human_feedback",
},
)
assert_matches_type(RowUpdateResponse, row, path=["response"])
Expand Down
4 changes: 2 additions & 2 deletions tests/api_resources/inference_pipelines/test_test_results.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def test_method_list_with_all_params(self, client: Openlayer) -> None:
inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
page=1,
per_page=1,
status="passing",
status="running",
type="integrity",
)
assert_matches_type(TestResultListResponse, test_result, path=["response"])
Expand Down Expand Up @@ -83,7 +83,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncOpenlayer) -
inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
page=1,
per_page=1,
status="passing",
status="running",
type="integrity",
)
assert_matches_type(TestResultListResponse, test_result, path=["response"])
Expand Down

0 comments on commit a3251b2

Please sign in to comment.