OLD | NEW |
---|---|
(Empty) | |
1 syntax = "proto2"; | |
2 | |
3 option optimize_for = LITE_RUNTIME; | |
4 | |
5 package mri; | |
6 | |
7 // The output of a Drishti media perception graph. Implicitly tied to the | |
8 // MediaPerception dictionary defined in Chromium source at | |
9 // src/extensions/common/api/media_perception_private.idl for the | |
10 // Chromium mediaPerceptionPrivate API. A duplicate copy of this proto also | |
11 // exists in Chromium for the mediaPerceptionPrivate API implementation to | |
tbarzic
2017/05/05 21:10:42
is this still valid statement?
I'd prefer to have
Luke Sorenson
2017/05/08 19:06:08
Edited the text a little. To be clear there is jus
tbarzic
2017/05/08 23:04:55
Oh.
Can you please reword this a bit further. Some
Luke Sorenson
2017/05/09 17:39:45
Done.
Also removed references to "Drishti" since
| |
12 // deserialize this proto. | |
13 // | |
14 // The duplicate copy of this proto lives in Chromium source here: | |
15 // src/chromeos/media_perception/media_perception.proto | |
16 // If you change this file, you need to make sure to update this file in the | |
17 // Chromium source tree as well. | |
18 // | |
19 // This message is packaged by the Drishti graph runner when a PerceptionSample | |
20 // or array of PerceptionSamples comes out of the graph. | |
21 message MediaPerception { | |
22 // The timestamp attached when this data originated from the analysis process. | |
23 optional uint64 timestamp = 1; | |
24 | |
25 // A single FramePerception message or array of perceptions (if reporting the | |
26 // results from multiple frames). | |
27 repeated FramePerception frame_perception = 2; | |
28 } | |
29 | |
30 // Used to transmit a history of image frames and their associated annotations. | |
31 // This is accumulated over time by the Drishti graph runner. | |
32 message Diagnostics { | |
33 repeated PerceptionSample perception_sample = 1; | |
34 } | |
35 | |
36 message State { | |
37 enum Status { | |
38 TIMEOUT = 0; // Unable to reach media analysis process. | |
39 UNINITIALIZED = 1; // Media analytics working on loading configuration. | |
40 STARTED = 2; // Analysis process running but not recieving frames. | |
41 RUNNING = 3; // Analysis process running and injesting frames. | |
42 // Media analytics process waiting to be started. | |
43 SUSPENDED = 4; | |
44 } | |
45 | |
46 // Note: RUNNING and SUSPENDED are the only two states which should be sent to | |
47 // SetState. | |
48 optional Status status = 1; | |
49 | |
50 // Device context so that the media analytics process can better select the | |
51 // right video device to open. | |
52 optional string device_context = 2; | |
53 } | |
54 | |
55 // This is the output of the MediaPerceptionSinkCalculator. | |
56 message PerceptionSample { | |
57 optional FramePerception frame_perception = 1; | |
58 // The image frame data associated with the frame perception. | |
59 optional RawImageFrame raw_image_frame = 2; | |
60 } | |
61 | |
62 // Note: this is a replica of image/content/flow/image/raw_image_data.proto | |
63 // because this proto needs to be self-contained (to be checked in to Chromium | |
64 // as well). | |
65 message RawImageFrame { | |
66 optional int32 width = 1; | |
67 optional int32 height = 2; | |
68 // colorspace is defined in the same way as SimpleImage::ColorSpace. | |
69 optional int32 colorspace = 3; | |
70 // By default, 1 channel means Grayscale, 2 channels meangs Grayscale + Alpha, | |
71 // 3 channels means RGB, and 4 channels means RGBA. | |
72 optional int32 channels = 4; | |
73 // The raw pixel data as a string of uint8. | |
74 // The size of pixel_data is height*width*channels. | |
75 // Byte order is RGBARGBARGBA. | |
76 // TODO(lasoren): Replace with compressed image format. | |
77 optional bytes pixel_data = 5; | |
78 } | |
79 | |
80 // The set of computer vision metadata for an image frame. | |
81 message FramePerception { | |
82 optional uint64 frame_id = 1; | |
83 | |
84 optional uint32 frame_width_in_px = 2; | |
85 optional uint32 frame_height_in_px = 3; | |
86 | |
87 // The timestamp associated with the frame (when it enters the Drishti | |
88 // graph). | |
89 optional uint64 timestamp = 4; | |
90 | |
91 // The list of entities detected for this frame. | |
92 repeated Entity entity = 5; | |
93 } | |
94 | |
95 message Entity { | |
96 // A unique id associated with the detected entity, which can be used to track | |
97 // the entity over time. | |
98 optional uint32 id = 1; | |
99 | |
100 enum EntityType { | |
101 UNSPECIFIED = 0; | |
102 FACE = 1; | |
103 PERSON = 2; | |
104 } | |
105 | |
106 optional EntityType type = 2; | |
107 | |
108 // Minimum box, which captures entire detected entity. | |
109 optional BoundingBox bounding_box = 3; | |
110 | |
111 // A value for the quality of this detection. | |
112 optional float confidence = 4; | |
113 } | |
114 | |
115 message BoundingBox { | |
116 // The points that define the corners of a bounding box. | |
117 optional Point top_left = 1; | |
118 optional Point bottom_right = 2; | |
119 // Indicates whether or not these coordinates are normalized to values between | |
120 // 0 and 1. | |
121 optional bool normalized = 3 [default = false]; | |
122 } | |
123 | |
124 message Point { | |
125 // x represents the horizontal distance from the top left corner of the image | |
126 // to the point. | |
127 optional float x = 1; | |
128 // y represents the vertical distance from the top left corner of the image to | |
129 // the point. | |
130 optional float y = 2; | |
131 } | |
OLD | NEW |