Skip to content

Commit

Permalink
Fix so test_blue_onyx works on CPAI
Browse files Browse the repository at this point in the history
  • Loading branch information
xnorpx committed Jan 5, 2025
1 parent eede306 commit 516aed5
Show file tree
Hide file tree
Showing 4 changed files with 66 additions and 40 deletions.
14 changes: 7 additions & 7 deletions src/api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ pub struct VisionDetectionRequest {

#[allow(non_snake_case)]
#[derive(Serialize, Deserialize, Default, Debug)]
#[serde(rename_all = "camelCase")]
#[serde(rename_all = "camelCase", default)]
pub struct VisionDetectionResponse {
/// True if successful.
pub success: bool,
Expand All @@ -28,17 +28,17 @@ pub struct VisionDetectionResponse {
/// The command that was sent as part of this request. Can be detect, list, status.
pub command: String,
/// The Id of the module that processed this request.
pub module_id: String,
pub moduleId: String,
/// The name of the device or package handling the inference. eg CPU, GPU
pub execution_provider: String,
pub executionProvider: String,
/// True if this module can use the current GPU if one is present.
pub can_useGPU: bool,
pub canUseGPU: bool,
// The time (ms) to perform the AI inference.
pub inference_ms: i32,
pub inferenceMs: i32,
// The time (ms) to process the image (includes inference and image manipulation operations).
pub process_ms: i32,
pub processMs: i32,
// The time (ms) for the round trip to the analysis module and back.
pub analysis_round_trip_ms: i32,
pub analysisRoundTripMs: i32,
}

#[derive(Serialize, Deserialize, Clone, PartialEq)]
Expand Down
34 changes: 30 additions & 4 deletions src/bin/test_blue_onyx.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,12 +78,13 @@ async fn main() -> anyhow::Result<()> {
results.into_iter().for_each(|result| {
if let Ok(Ok(result)) = result {
vision_detection_response = result.0;
inference_times.push(vision_detection_response.inference_ms);
processing_times.push(vision_detection_response.process_ms);
inference_times.push(vision_detection_response.inferenceMs);
processing_times.push(vision_detection_response.processMs);
request_times.push(result.1);
}
});

assert!(inference_times.len() == args.number_of_requests as usize);
println!("{:#?}", vision_detection_response);

println!("Runtime duration: {:?}", runtime_duration);
Expand Down Expand Up @@ -151,8 +152,33 @@ async fn send_vision_detection_request(
.part("image", image_part);

let request_start_time = Instant::now();
let response = client.post(url).multipart(form).send().await?;
let response = response.json::<VisionDetectionResponse>().await?;
let response = match client.post(url).multipart(form).send().await {
Ok(resp) => resp,
Err(e) => {
eprintln!("Request send error: {}", e);
return Err(anyhow::anyhow!(e));
}
};
if !response.status().is_success() {
let status = response.status();
let body = match response.text().await {
Ok(text) => text,
Err(e) => {
eprintln!("Failed to read response body: {}", e);
return Err(anyhow::anyhow!(e));
}
};
eprintln!("Error: Status: {}, Body: {}", status, body);
return Err(anyhow::anyhow!("Request failed with status {}", status));
}
let response = match response.json::<VisionDetectionResponse>().await {
Ok(json) => json,
Err(e) => {

eprintln!("Failed to parse JSON: {}", e);
return Err(anyhow::anyhow!(e));
}
};

Ok((response, Instant::now().duration_since(request_start_time)))
}
34 changes: 17 additions & 17 deletions src/server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ async fn v1_vision_detection(
return Err(BlueOnyxError::from(anyhow::anyhow!("Operation timed out")));
}
};
vision_response.analysis_round_trip_ms = request_start_time.elapsed().as_millis() as i32;
vision_response.analysisRoundTripMs = request_start_time.elapsed().as_millis() as i32;

{
let mut metrics = server_state.metrics.lock().await;
Expand Down Expand Up @@ -356,23 +356,23 @@ impl Metrics {
self.number_of_requests = self.number_of_requests.wrapping_add(1);
self.total_inference_ms = self
.total_inference_ms
.wrapping_add(response.inference_ms as u128);
self.min_inference_ms = self.min_inference_ms.min(response.inference_ms);
self.max_inference_ms = self.max_inference_ms.max(response.inference_ms);
.wrapping_add(response.inferenceMs as u128);
self.min_inference_ms = self.min_inference_ms.min(response.inferenceMs);
self.max_inference_ms = self.max_inference_ms.max(response.inferenceMs);
self.total_processing_ms = self
.total_processing_ms
.wrapping_add(response.process_ms as u128);
self.min_processing_ms = self.min_processing_ms.min(response.process_ms);
self.max_processing_ms = self.max_processing_ms.max(response.process_ms);
.wrapping_add(response.processMs as u128);
self.min_processing_ms = self.min_processing_ms.min(response.processMs);
self.max_processing_ms = self.max_processing_ms.max(response.processMs);
self.total_analysis_round_trip_ms = self
.total_analysis_round_trip_ms
.wrapping_add(response.analysis_round_trip_ms as u128);
.wrapping_add(response.analysisRoundTripMs as u128);
self.min_analysis_round_trip_ms = self
.min_analysis_round_trip_ms
.min(response.analysis_round_trip_ms);
.min(response.analysisRoundTripMs);
self.max_analysis_round_trip_ms = self
.max_analysis_round_trip_ms
.max(response.analysis_round_trip_ms);
.max(response.analysisRoundTripMs);
}

fn update_dropped_requests(&mut self) {
Expand Down Expand Up @@ -471,7 +471,7 @@ async fn handle_upload(
image_data: Some(&data_url),
};

vision_response.analysis_round_trip_ms =
vision_response.analysisRoundTripMs =
request_start_time.elapsed().as_millis() as i32;

{
Expand Down Expand Up @@ -500,12 +500,12 @@ impl IntoResponse for BlueOnyxError {
predictions: vec![],
count: 0,
command: "".into(),
module_id: "".into(),
execution_provider: "".into(),
can_useGPU: false,
inference_ms: 0_i32,
process_ms: 0_i32,
analysis_round_trip_ms: 0_i32,
moduleId: "".into(),
executionProvider: "".into(),
canUseGPU: false,
inferenceMs: 0_i32,
processMs: 0_i32,
analysisRoundTripMs: 0_i32,
}),
)
.into_response()
Expand Down
24 changes: 12 additions & 12 deletions src/worker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -107,12 +107,12 @@ impl DetectorWorker {
predictions: detect_result.predictions.to_vec(),
count: detect_result.predictions.len() as i32,
command: "detect".into(),
module_id: self.detector.get_model_name().clone(),
execution_provider: detect_result.endpoint_provider.to_string(),
can_useGPU: detect_result.device_type == DeviceType::GPU,
inference_ms: detect_result.inference_time.as_millis() as i32,
process_ms: detect_result.processing_time.as_millis() as i32,
analysis_round_trip_ms: 0_i32,
moduleId: self.detector.get_model_name().clone(),
executionProvider: detect_result.endpoint_provider.to_string(),
canUseGPU: detect_result.device_type == DeviceType::GPU,
inferenceMs: detect_result.inference_time.as_millis() as i32,
processMs: detect_result.processing_time.as_millis() as i32,
analysisRoundTripMs: 0_i32,
},
Err(err) => VisionDetectionResponse {
success: false,
Expand All @@ -121,12 +121,12 @@ impl DetectorWorker {
predictions: vec![],
count: 0,
command: "detect".into(),
module_id: self.detector.get_model_name().clone(),
execution_provider: "CPU".into(),
can_useGPU: false,
inference_ms: 0_i32,
process_ms: 0_i32,
analysis_round_trip_ms: 0_i32,
moduleId: self.detector.get_model_name().clone(),
executionProvider: "CPU".into(),
canUseGPU: false,
inferenceMs: 0_i32,
processMs: 0_i32,
analysisRoundTripMs: 0_i32,
},
};

Expand Down

0 comments on commit 516aed5

Please sign in to comment.