-
Notifications
You must be signed in to change notification settings - Fork 252
Add /stats and /latest_frame endpoints #2118
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -231,6 +231,7 @@ | |
| InferencePipelineStatusResponse, | ||
| InitializeWebRTCPipelineResponse, | ||
| InitializeWebRTCResponse, | ||
| LatestFrameResponse, | ||
| ListPipelinesResponse, | ||
| ) | ||
| from inference.core.interfaces.stream_manager.api.stream_manager_client import ( | ||
|
|
@@ -1893,6 +1894,64 @@ async def consume( | |
| excluded_fields=request.excluded_fields, | ||
| ) | ||
|
|
||
| @app.get( | ||
| "/stats", | ||
| summary="Aggregated pipeline statistics", | ||
| ) | ||
| @with_route_exceptions_async | ||
| async def get_stats(): | ||
| stream_count = 0 | ||
| camera_fps_values = [] | ||
| inference_fps_values = [] | ||
| if self.stream_manager_client is not None: | ||
| try: | ||
| pipelines_resp = ( | ||
| await self.stream_manager_client.list_pipelines() | ||
| ) | ||
| pipeline_ids = pipelines_resp.pipelines | ||
| stream_count = len(pipeline_ids) | ||
| for pid in pipeline_ids: | ||
| status_resp = await self.stream_manager_client.get_status( | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. ? Not sure how much would it matter though. |
||
| pid | ||
| ) | ||
| report = status_resp.report | ||
| throughput = report.get("inference_throughput", 0.0) | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Couldn't we split this into somthing like : I think we are pushing too much into the endpoint function bodies. This is not a place for business logic. |
||
| if throughput and throughput > 0: | ||
| inference_fps_values.append(throughput) | ||
| for src in report.get("sources_metadata", []): | ||
| props = src.get("source_properties") or {} | ||
| fps = props.get("fps") | ||
| if fps and fps > 0: | ||
| camera_fps_values.append(fps) | ||
| except Exception: | ||
| pass | ||
| return { | ||
| "camera_fps": ( | ||
| sum(camera_fps_values) / len(camera_fps_values) | ||
| if camera_fps_values | ||
| else None | ||
| ), | ||
| "inference_fps": ( | ||
| sum(inference_fps_values) / len(inference_fps_values) | ||
| if inference_fps_values | ||
| else None | ||
| ), | ||
| "stream_count": stream_count, | ||
| } | ||
|
|
||
| @app.get( | ||
| "/inference_pipelines/{pipeline_id}/latest_frame", | ||
| response_model=LatestFrameResponse, | ||
| summary="[EXPERIMENTAL] Get latest frame from InferencePipeline", | ||
| ) | ||
| @with_route_exceptions_async | ||
| async def latest_frame( | ||
| pipeline_id: str, | ||
| ) -> LatestFrameResponse: | ||
| return await self.stream_manager_client.get_latest_frame( | ||
| pipeline_id=pipeline_id | ||
| ) | ||
|
|
||
| class ModelInitState: | ||
| """Class to track model initialization state.""" | ||
|
|
||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,4 +1,5 @@ | ||
| import asyncio | ||
| import base64 | ||
| import json | ||
| import os | ||
| import signal | ||
|
|
@@ -148,6 +149,8 @@ def _handle_command(self, request_id: str, payload: dict) -> None: | |
| return self._get_pipeline_status(request_id=request_id) | ||
| if command_type is CommandType.CONSUME_RESULT: | ||
| return self._consume_results(request_id=request_id, payload=payload) | ||
| if command_type is CommandType.LATEST_FRAME: | ||
| return self._handle_latest_frame(request_id=request_id) | ||
| raise NotImplementedError( | ||
| f"Command type `{command_type}` cannot be handled" | ||
| ) | ||
|
|
@@ -636,6 +639,56 @@ def _consume_results(self, request_id: str, payload: dict) -> None: | |
| error_type=ErrorType.OPERATION_ERROR, | ||
| ) | ||
|
|
||
| def _handle_latest_frame(self, request_id: str) -> None: | ||
| try: | ||
| if self._buffer_sink is None or self._buffer_sink.empty(): | ||
| response_payload = { | ||
| STATUS_KEY: OperationStatus.SUCCESS, | ||
| "frame_data": None, | ||
| "frame_id": None, | ||
| "frame_timestamp": None, | ||
| "source_id": None, | ||
| } | ||
| self._responses_queue.put((request_id, response_payload)) | ||
| return None | ||
| # Peek at the last item in the buffer (non-destructive) | ||
| predictions, frames = self._buffer_sink._buffer[-1] | ||
| # Find the last non-None VideoFrame | ||
| frame = None | ||
| for f in reversed(frames): | ||
| if f is not None: | ||
| frame = f | ||
| break | ||
| if frame is None: | ||
| response_payload = { | ||
| STATUS_KEY: OperationStatus.SUCCESS, | ||
| "frame_data": None, | ||
| "frame_id": None, | ||
| "frame_timestamp": None, | ||
| "source_id": None, | ||
| } | ||
| self._responses_queue.put((request_id, response_payload)) | ||
| return None | ||
| _, jpeg_bytes = cv.imencode( | ||
| ".jpg", frame.image, [cv.IMWRITE_JPEG_QUALITY, 70] | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Wouldn't we like to allow to parametrize this through the request? Not sure about it's usefulness at this moment because I don't know the full context, but just wanted to point this out. Although in that case I would provide some enum with some reasonable values, |
||
| ) | ||
| frame_b64 = base64.b64encode(jpeg_bytes.tobytes()).decode("ascii") | ||
| response_payload = { | ||
| STATUS_KEY: OperationStatus.SUCCESS, | ||
| "frame_data": frame_b64, | ||
| "frame_id": frame.frame_id, | ||
| "frame_timestamp": frame.frame_timestamp.isoformat(), | ||
| "source_id": frame.source_id, | ||
| } | ||
| self._responses_queue.put((request_id, response_payload)) | ||
| except Exception as error: | ||
| self._handle_error( | ||
| request_id=request_id, | ||
| error=error, | ||
| public_error_message="Unexpected error retrieving latest frame.", | ||
| error_type=ErrorType.OPERATION_ERROR, | ||
| ) | ||
|
|
||
| def _handle_error( | ||
| self, | ||
| request_id: str, | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
description+response_modelplease ;)