OLD | NEW |
(Empty) | |
| 1 syntax = "proto2"; |
| 2 |
| 3 option optimize_for = LITE_RUNTIME; |
| 4 |
| 5 package mri; |
| 6 |
| 7 // The output of a Drishti media perception graph. Implicitly tied to the |
| 8 // MediaPerception dictionary defined in Chromium source at |
| 9 // src/extensions/common/api/media_perception_private.idl for the |
| 10 // Chromium mediaPerceptionPrivate API. A duplicate copy of this proto also |
| 11 // exists in Chromium codebase. |
| 12 // |
| 13 // The duplicate copy of this proto lives in Chromium source here: |
| 14 // src/chromeos/dbus/proto/media_perception.proto |
| 15 // If you change this file, you need to make sure to update this file in the |
| 16 // Chromium source tree as well. |
| 17 // |
| 18 // This message is packaged by the Drishti graph runner when a PerceptionSample |
| 19 // or array of PerceptionSamples comes out of the graph. |
| 20 message MediaPerception { |
| 21 // The timestamp attached when this data originated from the analysis process. |
| 22 optional uint64 timestamp = 1; // In milliseconds since Epoch. |
| 23 |
| 24 // A single FramePerception message or array of perceptions (if reporting the |
| 25 // results from multiple frames). |
| 26 repeated FramePerception frame_perception = 2; |
| 27 } |
| 28 |
| 29 // Used to transmit a history of image frames and their associated annotations. |
| 30 // This is accumulated over time by the Drishti graph runner. |
| 31 message Diagnostics { |
| 32 repeated PerceptionSample perception_sample = 1; |
| 33 } |
| 34 |
| 35 message State { |
| 36 enum Status { |
| 37 STATUS_UNSPECIFIED = 0; // Unused required default value for Proto enums. |
| 38 TIMEOUT = 1; // Unable to reach media analysis process. |
| 39 UNINITIALIZED = 2; // Media analytics working on loading configuration. |
| 40 STARTED = 3; // Analysis process running but not recieving frames. |
| 41 RUNNING = 4; // Analysis process running and injesting frames. |
| 42 SUSPENDED = 5; // Media analytics process waiting to be started. |
| 43 } |
| 44 |
| 45 // Note: RUNNING and SUSPENDED are the only two states which should be sent to |
| 46 // SetState. |
| 47 optional Status status = 1; |
| 48 |
| 49 // Device context so that the media analytics process can better select the |
| 50 // right video device to open. |
| 51 optional string device_context = 2; |
| 52 } |
| 53 |
| 54 // This is the output of the MediaPerceptionSinkCalculator. |
| 55 message PerceptionSample { |
| 56 optional FramePerception frame_perception = 1; |
| 57 // The image frame data associated with the frame perception. |
| 58 optional RawImageFrame raw_image_frame = 2; |
| 59 } |
| 60 |
| 61 // Note: this is a replica of image/content/flow/image/raw_image_data.proto |
| 62 // because this proto needs to be self-contained (to be checked in to Chromium |
| 63 // as well). |
| 64 message RawImageFrame { |
| 65 optional int32 width = 1; |
| 66 optional int32 height = 2; |
| 67 // colorspace is defined in the same way as SimpleImage::ColorSpace. |
| 68 optional int32 colorspace = 3; |
| 69 // By default, 1 channel means Grayscale, 2 channels meangs Grayscale + Alpha, |
| 70 // 3 channels means RGB, and 4 channels means RGBA. |
| 71 optional int32 channels = 4; |
| 72 // The raw pixel data as a string of uint8. |
| 73 // The size of pixel_data is height*width*channels. |
| 74 // Byte order is RGBARGBARGBA. |
| 75 // TODO(lasoren): Replace with compressed image format. |
| 76 optional bytes pixel_data = 5; |
| 77 } |
| 78 |
| 79 // The set of computer vision metadata for an image frame. |
| 80 message FramePerception { |
| 81 optional uint64 frame_id = 1; |
| 82 |
| 83 optional uint32 frame_width_in_px = 2; |
| 84 optional uint32 frame_height_in_px = 3; |
| 85 |
| 86 // The timestamp associated with the frame (when it enters the Drishti |
| 87 // graph). |
| 88 optional uint64 timestamp = 4; |
| 89 |
| 90 // The list of entities detected for this frame. |
| 91 repeated Entity entity = 5; |
| 92 } |
| 93 |
| 94 message Entity { |
| 95 // A unique id associated with the detected entity, which can be used to track |
| 96 // the entity over time. |
| 97 optional uint32 id = 1; |
| 98 |
| 99 enum EntityType { |
| 100 UNSPECIFIED = 0; |
| 101 FACE = 1; |
| 102 PERSON = 2; |
| 103 } |
| 104 |
| 105 optional EntityType type = 2; |
| 106 |
| 107 // Minimum box, which captures entire detected entity. |
| 108 optional BoundingBox bounding_box = 3; |
| 109 |
| 110 // A value for the quality of this detection. |
| 111 optional float confidence = 4; |
| 112 } |
| 113 |
| 114 message BoundingBox { |
| 115 // The points that define the corners of a bounding box. |
| 116 optional Point top_left = 1; |
| 117 optional Point bottom_right = 2; |
| 118 // Indicates whether or not these coordinates are normalized to values between |
| 119 // 0 and 1. |
| 120 optional bool normalized = 3 [default = false]; |
| 121 } |
| 122 |
| 123 message Point { |
| 124 // x represents the horizontal distance from the top left corner of the image |
| 125 // to the point. |
| 126 optional float x = 1; |
| 127 // y represents the vertical distance from the top left corner of the image to |
| 128 // the point. |
| 129 optional float y = 2; |
| 130 } |
OLD | NEW |