Chromium Code Reviews| OLD | NEW |
|---|---|
| (Empty) | |
| 1 syntax = "proto2"; | |
| 2 | |
| 3 option optimize_for = LITE_RUNTIME; | |
| 4 | |
| 5 package mri; | |
| 6 | |
| 7 // The output of the media analytics process. Implicitly tied to the | |
| 8 // MediaPerception dictionary defined in Chromium source at | |
| 9 // src/extensions/common/api/media_perception_private.idl for the | |
| 10 // Chromium mediaPerceptionPrivate API. This proto has two copies - one being | |
| 11 // here (the other internal to Google) - that must be updated together. | |
|
tbarzic
2017/05/09 20:28:41
Maybe:
This file has to kept in sync with changes
rkc1
2017/05/09 20:38:49
"...that must be updated together."
This is not q
Luke Sorenson
2017/05/09 22:21:25
Done.
| |
| 12 // | |
| 13 // This message is packaged by the graph runner when a PerceptionSample | |
| 14 // or array of PerceptionSamples comes out of the graph. | |
| 15 message MediaPerception { | |
| 16 // The timestamp attached when this data originated from the analysis process. | |
| 17 optional uint64 timestamp = 1; // In milliseconds since Epoch. | |
| 18 | |
| 19 // A single FramePerception message or array of perceptions (if reporting the | |
| 20 // results from multiple frames). | |
| 21 repeated FramePerception frame_perception = 2; | |
| 22 } | |
| 23 | |
| 24 // Used to transmit a history of image frames and their associated annotations. | |
| 25 // This is accumulated over time by the graph runner. | |
| 26 message Diagnostics { | |
| 27 repeated PerceptionSample perception_sample = 1; | |
| 28 } | |
| 29 | |
| 30 message State { | |
| 31 enum Status { | |
| 32 STATUS_UNSPECIFIED = 0; // Unused required default value for Proto enums. | |
| 33 TIMEOUT = 1; // Unable to reach media analysis process. | |
| 34 UNINITIALIZED = 2; // Media analytics working on loading configuration. | |
| 35 STARTED = 3; // Analysis process running but not recieving frames. | |
| 36 RUNNING = 4; // Analysis process running and injesting frames. | |
| 37 SUSPENDED = 5; // Media analytics process waiting to be started. | |
| 38 } | |
| 39 | |
| 40 // Note: RUNNING and SUSPENDED are the only two states which should be sent to | |
| 41 // SetState. | |
| 42 optional Status status = 1; | |
| 43 | |
| 44 // Device context so that the media analytics process can better select the | |
| 45 // right video device to open. | |
| 46 optional string device_context = 2; | |
| 47 } | |
| 48 | |
| 49 // This is the output of the MediaPerceptionSinkCalculator. | |
| 50 message PerceptionSample { | |
| 51 optional FramePerception frame_perception = 1; | |
| 52 // The image frame data associated with the frame perception. | |
| 53 optional RawImageFrame raw_image_frame = 2; | |
| 54 } | |
| 55 | |
| 56 // Note: this is a replica of image/content/flow/image/raw_image_data.proto | |
| 57 // because this proto needs to be self-contained (to be checked in to Chromium | |
| 58 // as well). | |
| 59 message RawImageFrame { | |
| 60 optional int32 width = 1; | |
| 61 optional int32 height = 2; | |
| 62 // colorspace is defined in the same way as SimpleImage::ColorSpace. | |
| 63 optional int32 colorspace = 3; | |
| 64 // By default, 1 channel means Grayscale, 2 channels meangs Grayscale + Alpha, | |
| 65 // 3 channels means RGB, and 4 channels means RGBA. | |
| 66 optional int32 channels = 4; | |
| 67 // The raw pixel data as a string of uint8. | |
| 68 // The size of pixel_data is height*width*channels. | |
| 69 // Byte order is RGBARGBARGBA. | |
| 70 // TODO(lasoren): Replace with compressed image format. | |
| 71 optional bytes pixel_data = 5; | |
| 72 } | |
| 73 | |
| 74 // The set of computer vision metadata for an image frame. | |
| 75 message FramePerception { | |
| 76 optional uint64 frame_id = 1; | |
| 77 | |
| 78 optional uint32 frame_width_in_px = 2; | |
| 79 optional uint32 frame_height_in_px = 3; | |
| 80 | |
| 81 // The timestamp associated with the frame (when it enters the graph). | |
| 82 optional uint64 timestamp = 4; | |
| 83 | |
| 84 // The list of entities detected for this frame. | |
| 85 repeated Entity entity = 5; | |
| 86 } | |
| 87 | |
| 88 message Entity { | |
| 89 // A unique id associated with the detected entity, which can be used to track | |
| 90 // the entity over time. | |
| 91 optional uint32 id = 1; | |
| 92 | |
| 93 enum EntityType { | |
| 94 UNSPECIFIED = 0; | |
| 95 FACE = 1; | |
| 96 PERSON = 2; | |
| 97 } | |
| 98 | |
| 99 optional EntityType type = 2; | |
| 100 | |
| 101 // Minimum box, which captures entire detected entity. | |
| 102 optional BoundingBox bounding_box = 3; | |
| 103 | |
| 104 // A value for the quality of this detection. | |
| 105 optional float confidence = 4; | |
| 106 } | |
| 107 | |
| 108 message BoundingBox { | |
| 109 // The points that define the corners of a bounding box. | |
| 110 optional Point top_left = 1; | |
| 111 optional Point bottom_right = 2; | |
| 112 // Indicates whether or not these coordinates are normalized to values between | |
| 113 // 0 and 1. | |
| 114 optional bool normalized = 3 [default = false]; | |
| 115 } | |
| 116 | |
| 117 message Point { | |
| 118 // x represents the horizontal distance from the top left corner of the image | |
| 119 // to the point. | |
| 120 optional float x = 1; | |
| 121 // y represents the vertical distance from the top left corner of the image to | |
| 122 // the point. | |
| 123 optional float y = 2; | |
| 124 } | |
| OLD | NEW |