Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(153)

Side by Side Diff: generated/googleapis/lib/language/v1.dart

Issue 3006323002: Api-Roll 54: 2017-09-11 (Closed)
Patch Set: use 2.0.0-dev.infinity sdk constraint in pubspecs Created 3 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « generated/googleapis/lib/kgsearch/v1.dart ('k') | generated/googleapis/lib/licensing/v1.dart » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // This is a generated file (see the discoveryapis_generator project). 1 // This is a generated file (see the discoveryapis_generator project).
2 2
3 library googleapis.language.v1; 3 library googleapis.language.v1;
4 4
5 import 'dart:core' as core; 5 import 'dart:core' as core;
6 import 'dart:async' as async; 6 import 'dart:async' as async;
7 import 'dart:convert' as convert; 7 import 'dart:convert' as convert;
8 8
9 import 'package:_discoveryapis_commons/_discoveryapis_commons.dart' as commons; 9 import 'package:_discoveryapis_commons/_discoveryapis_commons.dart' as commons;
10 import 'package:http/http.dart' as http; 10 import 'package:http/http.dart' as http;
11 11
12 export 'package:_discoveryapis_commons/_discoveryapis_commons.dart' show 12 export 'package:_discoveryapis_commons/_discoveryapis_commons.dart'
13 ApiRequestError, DetailedApiRequestError; 13 show ApiRequestError, DetailedApiRequestError;
14 14
15 const core.String USER_AGENT = 'dart-api-client language/v1'; 15 const core.String USER_AGENT = 'dart-api-client language/v1';
16 16
17 /** 17 /// Provides natural language understanding technologies to developers.
18 * Provides natural language understanding technologies to developers. Examples 18 /// Examples include sentiment analysis, entity recognition, entity sentiment
19 * include sentiment analysis, entity recognition, entity sentiment analysis, 19 /// analysis, and text annotations.
20 * and text annotations.
21 */
22 class LanguageApi { 20 class LanguageApi {
23 /** 21 /// Apply machine learning models to reveal the structure and meaning of text
24 * Apply machine learning models to reveal the structure and meaning of text 22 static const CloudLanguageScope =
25 */ 23 "https://www.googleapis.com/auth/cloud-language";
26 static const CloudLanguageScope = "https://www.googleapis.com/auth/cloud-langu age";
27 24
28 /** View and manage your data across Google Cloud Platform services */ 25 /// View and manage your data across Google Cloud Platform services
29 static const CloudPlatformScope = "https://www.googleapis.com/auth/cloud-platf orm"; 26 static const CloudPlatformScope =
30 27 "https://www.googleapis.com/auth/cloud-platform";
31 28
32 final commons.ApiRequester _requester; 29 final commons.ApiRequester _requester;
33 30
34 DocumentsResourceApi get documents => new DocumentsResourceApi(_requester); 31 DocumentsResourceApi get documents => new DocumentsResourceApi(_requester);
35 32
36 LanguageApi(http.Client client, {core.String rootUrl: "https://language.google apis.com/", core.String servicePath: ""}) : 33 LanguageApi(http.Client client,
37 _requester = new commons.ApiRequester(client, rootUrl, servicePath, USER_A GENT); 34 {core.String rootUrl: "https://language.googleapis.com/",
35 core.String servicePath: ""})
36 : _requester =
37 new commons.ApiRequester(client, rootUrl, servicePath, USER_AGENT);
38 } 38 }
39 39
40
41 class DocumentsResourceApi { 40 class DocumentsResourceApi {
42 final commons.ApiRequester _requester; 41 final commons.ApiRequester _requester;
43 42
44 DocumentsResourceApi(commons.ApiRequester client) : 43 DocumentsResourceApi(commons.ApiRequester client) : _requester = client;
45 _requester = client;
46 44
47 /** 45 /// Finds named entities (currently proper names and common nouns) in the
48 * Finds named entities (currently proper names and common nouns) in the text 46 /// text
49 * along with entity types, salience, mentions for each entity, and 47 /// along with entity types, salience, mentions for each entity, and
50 * other properties. 48 /// other properties.
51 * 49 ///
52 * [request] - The metadata request object. 50 /// [request] - The metadata request object.
53 * 51 ///
54 * Request parameters: 52 /// Request parameters:
55 * 53 ///
56 * Completes with a [AnalyzeEntitiesResponse]. 54 /// Completes with a [AnalyzeEntitiesResponse].
57 * 55 ///
58 * Completes with a [commons.ApiRequestError] if the API endpoint returned an 56 /// Completes with a [commons.ApiRequestError] if the API endpoint returned
59 * error. 57 /// an error.
60 * 58 ///
61 * If the used [http.Client] completes with an error when making a REST call, 59 /// If the used [http.Client] completes with an error when making a REST
62 * this method will complete with the same error. 60 /// call, this method will complete with the same error.
63 */ 61 async.Future<AnalyzeEntitiesResponse> analyzeEntities(
64 async.Future<AnalyzeEntitiesResponse> analyzeEntities(AnalyzeEntitiesRequest r equest) { 62 AnalyzeEntitiesRequest request) {
65 var _url = null; 63 var _url = null;
66 var _queryParams = new core.Map(); 64 var _queryParams = new core.Map();
67 var _uploadMedia = null; 65 var _uploadMedia = null;
68 var _uploadOptions = null; 66 var _uploadOptions = null;
69 var _downloadOptions = commons.DownloadOptions.Metadata; 67 var _downloadOptions = commons.DownloadOptions.Metadata;
70 var _body = null; 68 var _body = null;
71 69
72 if (request != null) { 70 if (request != null) {
73 _body = convert.JSON.encode((request).toJson()); 71 _body = convert.JSON.encode((request).toJson());
74 } 72 }
75 73
76 _url = 'v1/documents:analyzeEntities'; 74 _url = 'v1/documents:analyzeEntities';
77 75
78 var _response = _requester.request(_url, 76 var _response = _requester.request(_url, "POST",
79 "POST", 77 body: _body,
80 body: _body, 78 queryParams: _queryParams,
81 queryParams: _queryParams, 79 uploadOptions: _uploadOptions,
82 uploadOptions: _uploadOptions, 80 uploadMedia: _uploadMedia,
83 uploadMedia: _uploadMedia, 81 downloadOptions: _downloadOptions);
84 downloadOptions: _downloadOptions);
85 return _response.then((data) => new AnalyzeEntitiesResponse.fromJson(data)); 82 return _response.then((data) => new AnalyzeEntitiesResponse.fromJson(data));
86 } 83 }
87 84
88 /** 85 /// Analyzes the sentiment of the provided text.
89 * Analyzes the sentiment of the provided text. 86 ///
90 * 87 /// [request] - The metadata request object.
91 * [request] - The metadata request object. 88 ///
92 * 89 /// Request parameters:
93 * Request parameters: 90 ///
94 * 91 /// Completes with a [AnalyzeSentimentResponse].
95 * Completes with a [AnalyzeSentimentResponse]. 92 ///
96 * 93 /// Completes with a [commons.ApiRequestError] if the API endpoint returned
97 * Completes with a [commons.ApiRequestError] if the API endpoint returned an 94 /// an error.
98 * error. 95 ///
99 * 96 /// If the used [http.Client] completes with an error when making a REST
100 * If the used [http.Client] completes with an error when making a REST call, 97 /// call, this method will complete with the same error.
101 * this method will complete with the same error. 98 async.Future<AnalyzeSentimentResponse> analyzeSentiment(
102 */ 99 AnalyzeSentimentRequest request) {
103 async.Future<AnalyzeSentimentResponse> analyzeSentiment(AnalyzeSentimentReques t request) {
104 var _url = null; 100 var _url = null;
105 var _queryParams = new core.Map(); 101 var _queryParams = new core.Map();
106 var _uploadMedia = null; 102 var _uploadMedia = null;
107 var _uploadOptions = null; 103 var _uploadOptions = null;
108 var _downloadOptions = commons.DownloadOptions.Metadata; 104 var _downloadOptions = commons.DownloadOptions.Metadata;
109 var _body = null; 105 var _body = null;
110 106
111 if (request != null) { 107 if (request != null) {
112 _body = convert.JSON.encode((request).toJson()); 108 _body = convert.JSON.encode((request).toJson());
113 } 109 }
114 110
115 _url = 'v1/documents:analyzeSentiment'; 111 _url = 'v1/documents:analyzeSentiment';
116 112
117 var _response = _requester.request(_url, 113 var _response = _requester.request(_url, "POST",
118 "POST", 114 body: _body,
119 body: _body, 115 queryParams: _queryParams,
120 queryParams: _queryParams, 116 uploadOptions: _uploadOptions,
121 uploadOptions: _uploadOptions, 117 uploadMedia: _uploadMedia,
122 uploadMedia: _uploadMedia, 118 downloadOptions: _downloadOptions);
123 downloadOptions: _downloadOptions); 119 return _response
124 return _response.then((data) => new AnalyzeSentimentResponse.fromJson(data)) ; 120 .then((data) => new AnalyzeSentimentResponse.fromJson(data));
125 } 121 }
126 122
127 /** 123 /// Analyzes the syntax of the text and provides sentence boundaries and
128 * Analyzes the syntax of the text and provides sentence boundaries and 124 /// tokenization along with part of speech tags, dependency trees, and other
129 * tokenization along with part of speech tags, dependency trees, and other 125 /// properties.
130 * properties. 126 ///
131 * 127 /// [request] - The metadata request object.
132 * [request] - The metadata request object. 128 ///
133 * 129 /// Request parameters:
134 * Request parameters: 130 ///
135 * 131 /// Completes with a [AnalyzeSyntaxResponse].
136 * Completes with a [AnalyzeSyntaxResponse]. 132 ///
137 * 133 /// Completes with a [commons.ApiRequestError] if the API endpoint returned
138 * Completes with a [commons.ApiRequestError] if the API endpoint returned an 134 /// an error.
139 * error. 135 ///
140 * 136 /// If the used [http.Client] completes with an error when making a REST
141 * If the used [http.Client] completes with an error when making a REST call, 137 /// call, this method will complete with the same error.
142 * this method will complete with the same error. 138 async.Future<AnalyzeSyntaxResponse> analyzeSyntax(
143 */ 139 AnalyzeSyntaxRequest request) {
144 async.Future<AnalyzeSyntaxResponse> analyzeSyntax(AnalyzeSyntaxRequest request ) {
145 var _url = null; 140 var _url = null;
146 var _queryParams = new core.Map(); 141 var _queryParams = new core.Map();
147 var _uploadMedia = null; 142 var _uploadMedia = null;
148 var _uploadOptions = null; 143 var _uploadOptions = null;
149 var _downloadOptions = commons.DownloadOptions.Metadata; 144 var _downloadOptions = commons.DownloadOptions.Metadata;
150 var _body = null; 145 var _body = null;
151 146
152 if (request != null) { 147 if (request != null) {
153 _body = convert.JSON.encode((request).toJson()); 148 _body = convert.JSON.encode((request).toJson());
154 } 149 }
155 150
156 _url = 'v1/documents:analyzeSyntax'; 151 _url = 'v1/documents:analyzeSyntax';
157 152
158 var _response = _requester.request(_url, 153 var _response = _requester.request(_url, "POST",
159 "POST", 154 body: _body,
160 body: _body, 155 queryParams: _queryParams,
161 queryParams: _queryParams, 156 uploadOptions: _uploadOptions,
162 uploadOptions: _uploadOptions, 157 uploadMedia: _uploadMedia,
163 uploadMedia: _uploadMedia, 158 downloadOptions: _downloadOptions);
164 downloadOptions: _downloadOptions);
165 return _response.then((data) => new AnalyzeSyntaxResponse.fromJson(data)); 159 return _response.then((data) => new AnalyzeSyntaxResponse.fromJson(data));
166 } 160 }
167 161
168 /** 162 /// A convenience method that provides all the features that
169 * A convenience method that provides all the features that analyzeSentiment, 163 /// analyzeSentiment,
170 * analyzeEntities, and analyzeSyntax provide in one call. 164 /// analyzeEntities, and analyzeSyntax provide in one call.
171 * 165 ///
172 * [request] - The metadata request object. 166 /// [request] - The metadata request object.
173 * 167 ///
174 * Request parameters: 168 /// Request parameters:
175 * 169 ///
176 * Completes with a [AnnotateTextResponse]. 170 /// Completes with a [AnnotateTextResponse].
177 * 171 ///
178 * Completes with a [commons.ApiRequestError] if the API endpoint returned an 172 /// Completes with a [commons.ApiRequestError] if the API endpoint returned
179 * error. 173 /// an error.
180 * 174 ///
181 * If the used [http.Client] completes with an error when making a REST call, 175 /// If the used [http.Client] completes with an error when making a REST
182 * this method will complete with the same error. 176 /// call, this method will complete with the same error.
183 */
184 async.Future<AnnotateTextResponse> annotateText(AnnotateTextRequest request) { 177 async.Future<AnnotateTextResponse> annotateText(AnnotateTextRequest request) {
185 var _url = null; 178 var _url = null;
186 var _queryParams = new core.Map(); 179 var _queryParams = new core.Map();
187 var _uploadMedia = null; 180 var _uploadMedia = null;
188 var _uploadOptions = null; 181 var _uploadOptions = null;
189 var _downloadOptions = commons.DownloadOptions.Metadata; 182 var _downloadOptions = commons.DownloadOptions.Metadata;
190 var _body = null; 183 var _body = null;
191 184
192 if (request != null) { 185 if (request != null) {
193 _body = convert.JSON.encode((request).toJson()); 186 _body = convert.JSON.encode((request).toJson());
194 } 187 }
195 188
196 _url = 'v1/documents:annotateText'; 189 _url = 'v1/documents:annotateText';
197 190
198 var _response = _requester.request(_url, 191 var _response = _requester.request(_url, "POST",
199 "POST", 192 body: _body,
200 body: _body, 193 queryParams: _queryParams,
201 queryParams: _queryParams, 194 uploadOptions: _uploadOptions,
202 uploadOptions: _uploadOptions, 195 uploadMedia: _uploadMedia,
203 uploadMedia: _uploadMedia, 196 downloadOptions: _downloadOptions);
204 downloadOptions: _downloadOptions);
205 return _response.then((data) => new AnnotateTextResponse.fromJson(data)); 197 return _response.then((data) => new AnnotateTextResponse.fromJson(data));
206 } 198 }
207
208 } 199 }
209 200
201 /// The entity analysis request message.
202 class AnalyzeEntitiesRequest {
203 /// Input document.
204 Document document;
210 205
211 206 /// The encoding type used by the API to calculate offsets.
212 /** The entity analysis request message. */ 207 /// Possible string values are:
213 class AnalyzeEntitiesRequest { 208 /// - "NONE" : If `EncodingType` is not specified, encoding-dependent
214 /** Input document. */ 209 /// information (such as
215 Document document; 210 /// `begin_offset`) will be set at `-1`.
216 /** 211 /// - "UTF8" : Encoding-dependent information (such as `begin_offset`) is
217 * The encoding type used by the API to calculate offsets. 212 /// calculated based
218 * Possible string values are: 213 /// on the UTF-8 encoding of the input. C++ and Go are examples of languages
219 * - "NONE" : If `EncodingType` is not specified, encoding-dependent 214 /// that use this encoding natively.
220 * information (such as 215 /// - "UTF16" : Encoding-dependent information (such as `begin_offset`) is
221 * `begin_offset`) will be set at `-1`. 216 /// calculated based
222 * - "UTF8" : Encoding-dependent information (such as `begin_offset`) is 217 /// on the UTF-16 encoding of the input. Java and Javascript are examples of
223 * calculated based 218 /// languages that use this encoding natively.
224 * on the UTF-8 encoding of the input. C++ and Go are examples of languages 219 /// - "UTF32" : Encoding-dependent information (such as `begin_offset`) is
225 * that use this encoding natively. 220 /// calculated based
226 * - "UTF16" : Encoding-dependent information (such as `begin_offset`) is 221 /// on the UTF-32 encoding of the input. Python is an example of a language
227 * calculated based 222 /// that uses this encoding natively.
228 * on the UTF-16 encoding of the input. Java and Javascript are examples of
229 * languages that use this encoding natively.
230 * - "UTF32" : Encoding-dependent information (such as `begin_offset`) is
231 * calculated based
232 * on the UTF-32 encoding of the input. Python is an example of a language
233 * that uses this encoding natively.
234 */
235 core.String encodingType; 223 core.String encodingType;
236 224
237 AnalyzeEntitiesRequest(); 225 AnalyzeEntitiesRequest();
238 226
239 AnalyzeEntitiesRequest.fromJson(core.Map _json) { 227 AnalyzeEntitiesRequest.fromJson(core.Map _json) {
240 if (_json.containsKey("document")) { 228 if (_json.containsKey("document")) {
241 document = new Document.fromJson(_json["document"]); 229 document = new Document.fromJson(_json["document"]);
242 } 230 }
243 if (_json.containsKey("encodingType")) { 231 if (_json.containsKey("encodingType")) {
244 encodingType = _json["encodingType"]; 232 encodingType = _json["encodingType"];
245 } 233 }
246 } 234 }
247 235
248 core.Map<core.String, core.Object> toJson() { 236 core.Map<core.String, core.Object> toJson() {
249 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 237 final core.Map<core.String, core.Object> _json =
238 new core.Map<core.String, core.Object>();
250 if (document != null) { 239 if (document != null) {
251 _json["document"] = (document).toJson(); 240 _json["document"] = (document).toJson();
252 } 241 }
253 if (encodingType != null) { 242 if (encodingType != null) {
254 _json["encodingType"] = encodingType; 243 _json["encodingType"] = encodingType;
255 } 244 }
256 return _json; 245 return _json;
257 } 246 }
258 } 247 }
259 248
260 /** The entity analysis response message. */ 249 /// The entity analysis response message.
261 class AnalyzeEntitiesResponse { 250 class AnalyzeEntitiesResponse {
262 /** The recognized entities in the input document. */ 251 /// The recognized entities in the input document.
263 core.List<Entity> entities; 252 core.List<Entity> entities;
264 /** 253
265 * The language of the text, which will be the same as the language specified 254 /// The language of the text, which will be the same as the language
266 * in the request or, if not specified, the automatically-detected language. 255 /// specified
267 * See Document.language field for more details. 256 /// in the request or, if not specified, the automatically-detected language.
268 */ 257 /// See Document.language field for more details.
269 core.String language; 258 core.String language;
270 259
271 AnalyzeEntitiesResponse(); 260 AnalyzeEntitiesResponse();
272 261
273 AnalyzeEntitiesResponse.fromJson(core.Map _json) { 262 AnalyzeEntitiesResponse.fromJson(core.Map _json) {
274 if (_json.containsKey("entities")) { 263 if (_json.containsKey("entities")) {
275 entities = _json["entities"].map((value) => new Entity.fromJson(value)).to List(); 264 entities =
265 _json["entities"].map((value) => new Entity.fromJson(value)).toList();
276 } 266 }
277 if (_json.containsKey("language")) { 267 if (_json.containsKey("language")) {
278 language = _json["language"]; 268 language = _json["language"];
279 } 269 }
280 } 270 }
281 271
282 core.Map<core.String, core.Object> toJson() { 272 core.Map<core.String, core.Object> toJson() {
283 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 273 final core.Map<core.String, core.Object> _json =
274 new core.Map<core.String, core.Object>();
284 if (entities != null) { 275 if (entities != null) {
285 _json["entities"] = entities.map((value) => (value).toJson()).toList(); 276 _json["entities"] = entities.map((value) => (value).toJson()).toList();
286 } 277 }
287 if (language != null) { 278 if (language != null) {
288 _json["language"] = language; 279 _json["language"] = language;
289 } 280 }
290 return _json; 281 return _json;
291 } 282 }
292 } 283 }
293 284
294 /** The sentiment analysis request message. */ 285 /// The sentiment analysis request message.
295 class AnalyzeSentimentRequest { 286 class AnalyzeSentimentRequest {
296 /** Input document. */ 287 /// Input document.
297 Document document; 288 Document document;
298 /** 289
299 * The encoding type used by the API to calculate sentence offsets. 290 /// The encoding type used by the API to calculate sentence offsets.
300 * Possible string values are: 291 /// Possible string values are:
301 * - "NONE" : If `EncodingType` is not specified, encoding-dependent 292 /// - "NONE" : If `EncodingType` is not specified, encoding-dependent
302 * information (such as 293 /// information (such as
303 * `begin_offset`) will be set at `-1`. 294 /// `begin_offset`) will be set at `-1`.
304 * - "UTF8" : Encoding-dependent information (such as `begin_offset`) is 295 /// - "UTF8" : Encoding-dependent information (such as `begin_offset`) is
305 * calculated based 296 /// calculated based
306 * on the UTF-8 encoding of the input. C++ and Go are examples of languages 297 /// on the UTF-8 encoding of the input. C++ and Go are examples of languages
307 * that use this encoding natively. 298 /// that use this encoding natively.
308 * - "UTF16" : Encoding-dependent information (such as `begin_offset`) is 299 /// - "UTF16" : Encoding-dependent information (such as `begin_offset`) is
309 * calculated based 300 /// calculated based
310 * on the UTF-16 encoding of the input. Java and Javascript are examples of 301 /// on the UTF-16 encoding of the input. Java and Javascript are examples of
311 * languages that use this encoding natively. 302 /// languages that use this encoding natively.
312 * - "UTF32" : Encoding-dependent information (such as `begin_offset`) is 303 /// - "UTF32" : Encoding-dependent information (such as `begin_offset`) is
313 * calculated based 304 /// calculated based
314 * on the UTF-32 encoding of the input. Python is an example of a language 305 /// on the UTF-32 encoding of the input. Python is an example of a language
315 * that uses this encoding natively. 306 /// that uses this encoding natively.
316 */
317 core.String encodingType; 307 core.String encodingType;
318 308
319 AnalyzeSentimentRequest(); 309 AnalyzeSentimentRequest();
320 310
321 AnalyzeSentimentRequest.fromJson(core.Map _json) { 311 AnalyzeSentimentRequest.fromJson(core.Map _json) {
322 if (_json.containsKey("document")) { 312 if (_json.containsKey("document")) {
323 document = new Document.fromJson(_json["document"]); 313 document = new Document.fromJson(_json["document"]);
324 } 314 }
325 if (_json.containsKey("encodingType")) { 315 if (_json.containsKey("encodingType")) {
326 encodingType = _json["encodingType"]; 316 encodingType = _json["encodingType"];
327 } 317 }
328 } 318 }
329 319
330 core.Map<core.String, core.Object> toJson() { 320 core.Map<core.String, core.Object> toJson() {
331 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 321 final core.Map<core.String, core.Object> _json =
322 new core.Map<core.String, core.Object>();
332 if (document != null) { 323 if (document != null) {
333 _json["document"] = (document).toJson(); 324 _json["document"] = (document).toJson();
334 } 325 }
335 if (encodingType != null) { 326 if (encodingType != null) {
336 _json["encodingType"] = encodingType; 327 _json["encodingType"] = encodingType;
337 } 328 }
338 return _json; 329 return _json;
339 } 330 }
340 } 331 }
341 332
342 /** The sentiment analysis response message. */ 333 /// The sentiment analysis response message.
343 class AnalyzeSentimentResponse { 334 class AnalyzeSentimentResponse {
344 /** The overall sentiment of the input document. */ 335 /// The overall sentiment of the input document.
345 Sentiment documentSentiment; 336 Sentiment documentSentiment;
346 /** 337
347 * The language of the text, which will be the same as the language specified 338 /// The language of the text, which will be the same as the language
348 * in the request or, if not specified, the automatically-detected language. 339 /// specified
349 * See Document.language field for more details. 340 /// in the request or, if not specified, the automatically-detected language.
350 */ 341 /// See Document.language field for more details.
351 core.String language; 342 core.String language;
352 /** The sentiment for all the sentences in the document. */ 343
344 /// The sentiment for all the sentences in the document.
353 core.List<Sentence> sentences; 345 core.List<Sentence> sentences;
354 346
355 AnalyzeSentimentResponse(); 347 AnalyzeSentimentResponse();
356 348
357 AnalyzeSentimentResponse.fromJson(core.Map _json) { 349 AnalyzeSentimentResponse.fromJson(core.Map _json) {
358 if (_json.containsKey("documentSentiment")) { 350 if (_json.containsKey("documentSentiment")) {
359 documentSentiment = new Sentiment.fromJson(_json["documentSentiment"]); 351 documentSentiment = new Sentiment.fromJson(_json["documentSentiment"]);
360 } 352 }
361 if (_json.containsKey("language")) { 353 if (_json.containsKey("language")) {
362 language = _json["language"]; 354 language = _json["language"];
363 } 355 }
364 if (_json.containsKey("sentences")) { 356 if (_json.containsKey("sentences")) {
365 sentences = _json["sentences"].map((value) => new Sentence.fromJson(value) ).toList(); 357 sentences = _json["sentences"]
358 .map((value) => new Sentence.fromJson(value))
359 .toList();
366 } 360 }
367 } 361 }
368 362
369 core.Map<core.String, core.Object> toJson() { 363 core.Map<core.String, core.Object> toJson() {
370 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 364 final core.Map<core.String, core.Object> _json =
365 new core.Map<core.String, core.Object>();
371 if (documentSentiment != null) { 366 if (documentSentiment != null) {
372 _json["documentSentiment"] = (documentSentiment).toJson(); 367 _json["documentSentiment"] = (documentSentiment).toJson();
373 } 368 }
374 if (language != null) { 369 if (language != null) {
375 _json["language"] = language; 370 _json["language"] = language;
376 } 371 }
377 if (sentences != null) { 372 if (sentences != null) {
378 _json["sentences"] = sentences.map((value) => (value).toJson()).toList(); 373 _json["sentences"] = sentences.map((value) => (value).toJson()).toList();
379 } 374 }
380 return _json; 375 return _json;
381 } 376 }
382 } 377 }
383 378
384 /** The syntax analysis request message. */ 379 /// The syntax analysis request message.
385 class AnalyzeSyntaxRequest { 380 class AnalyzeSyntaxRequest {
386 /** Input document. */ 381 /// Input document.
387 Document document; 382 Document document;
388 /** 383
389 * The encoding type used by the API to calculate offsets. 384 /// The encoding type used by the API to calculate offsets.
390 * Possible string values are: 385 /// Possible string values are:
391 * - "NONE" : If `EncodingType` is not specified, encoding-dependent 386 /// - "NONE" : If `EncodingType` is not specified, encoding-dependent
392 * information (such as 387 /// information (such as
393 * `begin_offset`) will be set at `-1`. 388 /// `begin_offset`) will be set at `-1`.
394 * - "UTF8" : Encoding-dependent information (such as `begin_offset`) is 389 /// - "UTF8" : Encoding-dependent information (such as `begin_offset`) is
395 * calculated based 390 /// calculated based
396 * on the UTF-8 encoding of the input. C++ and Go are examples of languages 391 /// on the UTF-8 encoding of the input. C++ and Go are examples of languages
397 * that use this encoding natively. 392 /// that use this encoding natively.
398 * - "UTF16" : Encoding-dependent information (such as `begin_offset`) is 393 /// - "UTF16" : Encoding-dependent information (such as `begin_offset`) is
399 * calculated based 394 /// calculated based
400 * on the UTF-16 encoding of the input. Java and Javascript are examples of 395 /// on the UTF-16 encoding of the input. Java and Javascript are examples of
401 * languages that use this encoding natively. 396 /// languages that use this encoding natively.
402 * - "UTF32" : Encoding-dependent information (such as `begin_offset`) is 397 /// - "UTF32" : Encoding-dependent information (such as `begin_offset`) is
403 * calculated based 398 /// calculated based
404 * on the UTF-32 encoding of the input. Python is an example of a language 399 /// on the UTF-32 encoding of the input. Python is an example of a language
405 * that uses this encoding natively. 400 /// that uses this encoding natively.
406 */
407 core.String encodingType; 401 core.String encodingType;
408 402
409 AnalyzeSyntaxRequest(); 403 AnalyzeSyntaxRequest();
410 404
411 AnalyzeSyntaxRequest.fromJson(core.Map _json) { 405 AnalyzeSyntaxRequest.fromJson(core.Map _json) {
412 if (_json.containsKey("document")) { 406 if (_json.containsKey("document")) {
413 document = new Document.fromJson(_json["document"]); 407 document = new Document.fromJson(_json["document"]);
414 } 408 }
415 if (_json.containsKey("encodingType")) { 409 if (_json.containsKey("encodingType")) {
416 encodingType = _json["encodingType"]; 410 encodingType = _json["encodingType"];
417 } 411 }
418 } 412 }
419 413
420 core.Map<core.String, core.Object> toJson() { 414 core.Map<core.String, core.Object> toJson() {
421 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 415 final core.Map<core.String, core.Object> _json =
416 new core.Map<core.String, core.Object>();
422 if (document != null) { 417 if (document != null) {
423 _json["document"] = (document).toJson(); 418 _json["document"] = (document).toJson();
424 } 419 }
425 if (encodingType != null) { 420 if (encodingType != null) {
426 _json["encodingType"] = encodingType; 421 _json["encodingType"] = encodingType;
427 } 422 }
428 return _json; 423 return _json;
429 } 424 }
430 } 425 }
431 426
432 /** The syntax analysis response message. */ 427 /// The syntax analysis response message.
433 class AnalyzeSyntaxResponse { 428 class AnalyzeSyntaxResponse {
434 /** 429 /// The language of the text, which will be the same as the language
435 * The language of the text, which will be the same as the language specified 430 /// specified
436 * in the request or, if not specified, the automatically-detected language. 431 /// in the request or, if not specified, the automatically-detected language.
437 * See Document.language field for more details. 432 /// See Document.language field for more details.
438 */
439 core.String language; 433 core.String language;
440 /** Sentences in the input document. */ 434
435 /// Sentences in the input document.
441 core.List<Sentence> sentences; 436 core.List<Sentence> sentences;
442 /** Tokens, along with their syntactic information, in the input document. */ 437
438 /// Tokens, along with their syntactic information, in the input document.
443 core.List<Token> tokens; 439 core.List<Token> tokens;
444 440
445 AnalyzeSyntaxResponse(); 441 AnalyzeSyntaxResponse();
446 442
447 AnalyzeSyntaxResponse.fromJson(core.Map _json) { 443 AnalyzeSyntaxResponse.fromJson(core.Map _json) {
448 if (_json.containsKey("language")) { 444 if (_json.containsKey("language")) {
449 language = _json["language"]; 445 language = _json["language"];
450 } 446 }
451 if (_json.containsKey("sentences")) { 447 if (_json.containsKey("sentences")) {
452 sentences = _json["sentences"].map((value) => new Sentence.fromJson(value) ).toList(); 448 sentences = _json["sentences"]
449 .map((value) => new Sentence.fromJson(value))
450 .toList();
453 } 451 }
454 if (_json.containsKey("tokens")) { 452 if (_json.containsKey("tokens")) {
455 tokens = _json["tokens"].map((value) => new Token.fromJson(value)).toList( ); 453 tokens =
454 _json["tokens"].map((value) => new Token.fromJson(value)).toList();
456 } 455 }
457 } 456 }
458 457
459 core.Map<core.String, core.Object> toJson() { 458 core.Map<core.String, core.Object> toJson() {
460 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 459 final core.Map<core.String, core.Object> _json =
460 new core.Map<core.String, core.Object>();
461 if (language != null) { 461 if (language != null) {
462 _json["language"] = language; 462 _json["language"] = language;
463 } 463 }
464 if (sentences != null) { 464 if (sentences != null) {
465 _json["sentences"] = sentences.map((value) => (value).toJson()).toList(); 465 _json["sentences"] = sentences.map((value) => (value).toJson()).toList();
466 } 466 }
467 if (tokens != null) { 467 if (tokens != null) {
468 _json["tokens"] = tokens.map((value) => (value).toJson()).toList(); 468 _json["tokens"] = tokens.map((value) => (value).toJson()).toList();
469 } 469 }
470 return _json; 470 return _json;
471 } 471 }
472 } 472 }
473 473
474 /** 474 /// The request message for the text annotation API, which can perform multiple
475 * The request message for the text annotation API, which can perform multiple 475 /// analysis types (sentiment, entities, and syntax) in one call.
476 * analysis types (sentiment, entities, and syntax) in one call.
477 */
478 class AnnotateTextRequest { 476 class AnnotateTextRequest {
479 /** Input document. */ 477 /// Input document.
480 Document document; 478 Document document;
481 /** 479
482 * The encoding type used by the API to calculate offsets. 480 /// The encoding type used by the API to calculate offsets.
483 * Possible string values are: 481 /// Possible string values are:
484 * - "NONE" : If `EncodingType` is not specified, encoding-dependent 482 /// - "NONE" : If `EncodingType` is not specified, encoding-dependent
485 * information (such as 483 /// information (such as
486 * `begin_offset`) will be set at `-1`. 484 /// `begin_offset`) will be set at `-1`.
487 * - "UTF8" : Encoding-dependent information (such as `begin_offset`) is 485 /// - "UTF8" : Encoding-dependent information (such as `begin_offset`) is
488 * calculated based 486 /// calculated based
489 * on the UTF-8 encoding of the input. C++ and Go are examples of languages 487 /// on the UTF-8 encoding of the input. C++ and Go are examples of languages
490 * that use this encoding natively. 488 /// that use this encoding natively.
491 * - "UTF16" : Encoding-dependent information (such as `begin_offset`) is 489 /// - "UTF16" : Encoding-dependent information (such as `begin_offset`) is
492 * calculated based 490 /// calculated based
493 * on the UTF-16 encoding of the input. Java and Javascript are examples of 491 /// on the UTF-16 encoding of the input. Java and Javascript are examples of
494 * languages that use this encoding natively. 492 /// languages that use this encoding natively.
495 * - "UTF32" : Encoding-dependent information (such as `begin_offset`) is 493 /// - "UTF32" : Encoding-dependent information (such as `begin_offset`) is
496 * calculated based 494 /// calculated based
497 * on the UTF-32 encoding of the input. Python is an example of a language 495 /// on the UTF-32 encoding of the input. Python is an example of a language
498 * that uses this encoding natively. 496 /// that uses this encoding natively.
499 */
500 core.String encodingType; 497 core.String encodingType;
501 /** The enabled features. */ 498
499 /// The enabled features.
502 Features features; 500 Features features;
503 501
504 AnnotateTextRequest(); 502 AnnotateTextRequest();
505 503
506 AnnotateTextRequest.fromJson(core.Map _json) { 504 AnnotateTextRequest.fromJson(core.Map _json) {
507 if (_json.containsKey("document")) { 505 if (_json.containsKey("document")) {
508 document = new Document.fromJson(_json["document"]); 506 document = new Document.fromJson(_json["document"]);
509 } 507 }
510 if (_json.containsKey("encodingType")) { 508 if (_json.containsKey("encodingType")) {
511 encodingType = _json["encodingType"]; 509 encodingType = _json["encodingType"];
512 } 510 }
513 if (_json.containsKey("features")) { 511 if (_json.containsKey("features")) {
514 features = new Features.fromJson(_json["features"]); 512 features = new Features.fromJson(_json["features"]);
515 } 513 }
516 } 514 }
517 515
518 core.Map<core.String, core.Object> toJson() { 516 core.Map<core.String, core.Object> toJson() {
519 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 517 final core.Map<core.String, core.Object> _json =
518 new core.Map<core.String, core.Object>();
520 if (document != null) { 519 if (document != null) {
521 _json["document"] = (document).toJson(); 520 _json["document"] = (document).toJson();
522 } 521 }
523 if (encodingType != null) { 522 if (encodingType != null) {
524 _json["encodingType"] = encodingType; 523 _json["encodingType"] = encodingType;
525 } 524 }
526 if (features != null) { 525 if (features != null) {
527 _json["features"] = (features).toJson(); 526 _json["features"] = (features).toJson();
528 } 527 }
529 return _json; 528 return _json;
530 } 529 }
531 } 530 }
532 531
533 /** The text annotations response message. */ 532 /// The text annotations response message.
534 class AnnotateTextResponse { 533 class AnnotateTextResponse {
535 /** 534 /// The overall sentiment for the document. Populated if the user enables
536 * The overall sentiment for the document. Populated if the user enables 535 /// AnnotateTextRequest.Features.extract_document_sentiment.
537 * AnnotateTextRequest.Features.extract_document_sentiment.
538 */
539 Sentiment documentSentiment; 536 Sentiment documentSentiment;
540 /** 537
541 * Entities, along with their semantic information, in the input document. 538 /// Entities, along with their semantic information, in the input document.
542 * Populated if the user enables 539 /// Populated if the user enables
543 * AnnotateTextRequest.Features.extract_entities. 540 /// AnnotateTextRequest.Features.extract_entities.
544 */
545 core.List<Entity> entities; 541 core.List<Entity> entities;
546 /** 542
547 * The language of the text, which will be the same as the language specified 543 /// The language of the text, which will be the same as the language
548 * in the request or, if not specified, the automatically-detected language. 544 /// specified
549 * See Document.language field for more details. 545 /// in the request or, if not specified, the automatically-detected language.
550 */ 546 /// See Document.language field for more details.
551 core.String language; 547 core.String language;
552 /** 548
553 * Sentences in the input document. Populated if the user enables 549 /// Sentences in the input document. Populated if the user enables
554 * AnnotateTextRequest.Features.extract_syntax. 550 /// AnnotateTextRequest.Features.extract_syntax.
555 */
556 core.List<Sentence> sentences; 551 core.List<Sentence> sentences;
557 /** 552
558 * Tokens, along with their syntactic information, in the input document. 553 /// Tokens, along with their syntactic information, in the input document.
559 * Populated if the user enables 554 /// Populated if the user enables
560 * AnnotateTextRequest.Features.extract_syntax. 555 /// AnnotateTextRequest.Features.extract_syntax.
561 */
562 core.List<Token> tokens; 556 core.List<Token> tokens;
563 557
564 AnnotateTextResponse(); 558 AnnotateTextResponse();
565 559
566 AnnotateTextResponse.fromJson(core.Map _json) { 560 AnnotateTextResponse.fromJson(core.Map _json) {
567 if (_json.containsKey("documentSentiment")) { 561 if (_json.containsKey("documentSentiment")) {
568 documentSentiment = new Sentiment.fromJson(_json["documentSentiment"]); 562 documentSentiment = new Sentiment.fromJson(_json["documentSentiment"]);
569 } 563 }
570 if (_json.containsKey("entities")) { 564 if (_json.containsKey("entities")) {
571 entities = _json["entities"].map((value) => new Entity.fromJson(value)).to List(); 565 entities =
566 _json["entities"].map((value) => new Entity.fromJson(value)).toList();
572 } 567 }
573 if (_json.containsKey("language")) { 568 if (_json.containsKey("language")) {
574 language = _json["language"]; 569 language = _json["language"];
575 } 570 }
576 if (_json.containsKey("sentences")) { 571 if (_json.containsKey("sentences")) {
577 sentences = _json["sentences"].map((value) => new Sentence.fromJson(value) ).toList(); 572 sentences = _json["sentences"]
573 .map((value) => new Sentence.fromJson(value))
574 .toList();
578 } 575 }
579 if (_json.containsKey("tokens")) { 576 if (_json.containsKey("tokens")) {
580 tokens = _json["tokens"].map((value) => new Token.fromJson(value)).toList( ); 577 tokens =
578 _json["tokens"].map((value) => new Token.fromJson(value)).toList();
581 } 579 }
582 } 580 }
583 581
584 core.Map<core.String, core.Object> toJson() { 582 core.Map<core.String, core.Object> toJson() {
585 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 583 final core.Map<core.String, core.Object> _json =
584 new core.Map<core.String, core.Object>();
586 if (documentSentiment != null) { 585 if (documentSentiment != null) {
587 _json["documentSentiment"] = (documentSentiment).toJson(); 586 _json["documentSentiment"] = (documentSentiment).toJson();
588 } 587 }
589 if (entities != null) { 588 if (entities != null) {
590 _json["entities"] = entities.map((value) => (value).toJson()).toList(); 589 _json["entities"] = entities.map((value) => (value).toJson()).toList();
591 } 590 }
592 if (language != null) { 591 if (language != null) {
593 _json["language"] = language; 592 _json["language"] = language;
594 } 593 }
595 if (sentences != null) { 594 if (sentences != null) {
596 _json["sentences"] = sentences.map((value) => (value).toJson()).toList(); 595 _json["sentences"] = sentences.map((value) => (value).toJson()).toList();
597 } 596 }
598 if (tokens != null) { 597 if (tokens != null) {
599 _json["tokens"] = tokens.map((value) => (value).toJson()).toList(); 598 _json["tokens"] = tokens.map((value) => (value).toJson()).toList();
600 } 599 }
601 return _json; 600 return _json;
602 } 601 }
603 } 602 }
604 603
605 /** 604 /// Represents dependency parse tree information for a token. (For more
606 * Represents dependency parse tree information for a token. (For more 605 /// information on dependency labels, see
607 * information on dependency labels, see 606 /// http://www.aclweb.org/anthology/P13-2017
608 * http://www.aclweb.org/anthology/P13-2017
609 */
610 class DependencyEdge { 607 class DependencyEdge {
611 /** 608 /// Represents the head of this token in the dependency tree.
612 * Represents the head of this token in the dependency tree. 609 /// This is the index of the token which has an arc going to this token.
613 * This is the index of the token which has an arc going to this token. 610 /// The index is the position of the token in the array of tokens returned
614 * The index is the position of the token in the array of tokens returned 611 /// by the API method. If this token is a root token, then the
615 * by the API method. If this token is a root token, then the 612 /// `head_token_index` is its own index.
616 * `head_token_index` is its own index.
617 */
618 core.int headTokenIndex; 613 core.int headTokenIndex;
619 /** 614
620 * The parse label for the token. 615 /// The parse label for the token.
621 * Possible string values are: 616 /// Possible string values are:
622 * - "UNKNOWN" : Unknown 617 /// - "UNKNOWN" : Unknown
623 * - "ABBREV" : Abbreviation modifier 618 /// - "ABBREV" : Abbreviation modifier
624 * - "ACOMP" : Adjectival complement 619 /// - "ACOMP" : Adjectival complement
625 * - "ADVCL" : Adverbial clause modifier 620 /// - "ADVCL" : Adverbial clause modifier
626 * - "ADVMOD" : Adverbial modifier 621 /// - "ADVMOD" : Adverbial modifier
627 * - "AMOD" : Adjectival modifier of an NP 622 /// - "AMOD" : Adjectival modifier of an NP
628 * - "APPOS" : Appositional modifier of an NP 623 /// - "APPOS" : Appositional modifier of an NP
629 * - "ATTR" : Attribute dependent of a copular verb 624 /// - "ATTR" : Attribute dependent of a copular verb
630 * - "AUX" : Auxiliary (non-main) verb 625 /// - "AUX" : Auxiliary (non-main) verb
631 * - "AUXPASS" : Passive auxiliary 626 /// - "AUXPASS" : Passive auxiliary
632 * - "CC" : Coordinating conjunction 627 /// - "CC" : Coordinating conjunction
633 * - "CCOMP" : Clausal complement of a verb or adjective 628 /// - "CCOMP" : Clausal complement of a verb or adjective
634 * - "CONJ" : Conjunct 629 /// - "CONJ" : Conjunct
635 * - "CSUBJ" : Clausal subject 630 /// - "CSUBJ" : Clausal subject
636 * - "CSUBJPASS" : Clausal passive subject 631 /// - "CSUBJPASS" : Clausal passive subject
637 * - "DEP" : Dependency (unable to determine) 632 /// - "DEP" : Dependency (unable to determine)
638 * - "DET" : Determiner 633 /// - "DET" : Determiner
639 * - "DISCOURSE" : Discourse 634 /// - "DISCOURSE" : Discourse
640 * - "DOBJ" : Direct object 635 /// - "DOBJ" : Direct object
641 * - "EXPL" : Expletive 636 /// - "EXPL" : Expletive
642 * - "GOESWITH" : Goes with (part of a word in a text not well edited) 637 /// - "GOESWITH" : Goes with (part of a word in a text not well edited)
643 * - "IOBJ" : Indirect object 638 /// - "IOBJ" : Indirect object
644 * - "MARK" : Marker (word introducing a subordinate clause) 639 /// - "MARK" : Marker (word introducing a subordinate clause)
645 * - "MWE" : Multi-word expression 640 /// - "MWE" : Multi-word expression
646 * - "MWV" : Multi-word verbal expression 641 /// - "MWV" : Multi-word verbal expression
647 * - "NEG" : Negation modifier 642 /// - "NEG" : Negation modifier
648 * - "NN" : Noun compound modifier 643 /// - "NN" : Noun compound modifier
649 * - "NPADVMOD" : Noun phrase used as an adverbial modifier 644 /// - "NPADVMOD" : Noun phrase used as an adverbial modifier
650 * - "NSUBJ" : Nominal subject 645 /// - "NSUBJ" : Nominal subject
651 * - "NSUBJPASS" : Passive nominal subject 646 /// - "NSUBJPASS" : Passive nominal subject
652 * - "NUM" : Numeric modifier of a noun 647 /// - "NUM" : Numeric modifier of a noun
653 * - "NUMBER" : Element of compound number 648 /// - "NUMBER" : Element of compound number
654 * - "P" : Punctuation mark 649 /// - "P" : Punctuation mark
655 * - "PARATAXIS" : Parataxis relation 650 /// - "PARATAXIS" : Parataxis relation
656 * - "PARTMOD" : Participial modifier 651 /// - "PARTMOD" : Participial modifier
657 * - "PCOMP" : The complement of a preposition is a clause 652 /// - "PCOMP" : The complement of a preposition is a clause
658 * - "POBJ" : Object of a preposition 653 /// - "POBJ" : Object of a preposition
659 * - "POSS" : Possession modifier 654 /// - "POSS" : Possession modifier
660 * - "POSTNEG" : Postverbal negative particle 655 /// - "POSTNEG" : Postverbal negative particle
661 * - "PRECOMP" : Predicate complement 656 /// - "PRECOMP" : Predicate complement
662 * - "PRECONJ" : Preconjunt 657 /// - "PRECONJ" : Preconjunt
663 * - "PREDET" : Predeterminer 658 /// - "PREDET" : Predeterminer
664 * - "PREF" : Prefix 659 /// - "PREF" : Prefix
665 * - "PREP" : Prepositional modifier 660 /// - "PREP" : Prepositional modifier
666 * - "PRONL" : The relationship between a verb and verbal morpheme 661 /// - "PRONL" : The relationship between a verb and verbal morpheme
667 * - "PRT" : Particle 662 /// - "PRT" : Particle
668 * - "PS" : Associative or possessive marker 663 /// - "PS" : Associative or possessive marker
669 * - "QUANTMOD" : Quantifier phrase modifier 664 /// - "QUANTMOD" : Quantifier phrase modifier
670 * - "RCMOD" : Relative clause modifier 665 /// - "RCMOD" : Relative clause modifier
671 * - "RCMODREL" : Complementizer in relative clause 666 /// - "RCMODREL" : Complementizer in relative clause
672 * - "RDROP" : Ellipsis without a preceding predicate 667 /// - "RDROP" : Ellipsis without a preceding predicate
673 * - "REF" : Referent 668 /// - "REF" : Referent
674 * - "REMNANT" : Remnant 669 /// - "REMNANT" : Remnant
675 * - "REPARANDUM" : Reparandum 670 /// - "REPARANDUM" : Reparandum
676 * - "ROOT" : Root 671 /// - "ROOT" : Root
677 * - "SNUM" : Suffix specifying a unit of number 672 /// - "SNUM" : Suffix specifying a unit of number
678 * - "SUFF" : Suffix 673 /// - "SUFF" : Suffix
679 * - "TMOD" : Temporal modifier 674 /// - "TMOD" : Temporal modifier
680 * - "TOPIC" : Topic marker 675 /// - "TOPIC" : Topic marker
681 * - "VMOD" : Clause headed by an infinite form of the verb that modifies a 676 /// - "VMOD" : Clause headed by an infinite form of the verb that modifies a
682 * noun 677 /// noun
683 * - "VOCATIVE" : Vocative 678 /// - "VOCATIVE" : Vocative
684 * - "XCOMP" : Open clausal complement 679 /// - "XCOMP" : Open clausal complement
685 * - "SUFFIX" : Name suffix 680 /// - "SUFFIX" : Name suffix
686 * - "TITLE" : Name title 681 /// - "TITLE" : Name title
687 * - "ADVPHMOD" : Adverbial phrase modifier 682 /// - "ADVPHMOD" : Adverbial phrase modifier
688 * - "AUXCAUS" : Causative auxiliary 683 /// - "AUXCAUS" : Causative auxiliary
689 * - "AUXVV" : Helper auxiliary 684 /// - "AUXVV" : Helper auxiliary
690 * - "DTMOD" : Rentaishi (Prenominal modifier) 685 /// - "DTMOD" : Rentaishi (Prenominal modifier)
691 * - "FOREIGN" : Foreign words 686 /// - "FOREIGN" : Foreign words
692 * - "KW" : Keyword 687 /// - "KW" : Keyword
693 * - "LIST" : List for chains of comparable items 688 /// - "LIST" : List for chains of comparable items
694 * - "NOMC" : Nominalized clause 689 /// - "NOMC" : Nominalized clause
695 * - "NOMCSUBJ" : Nominalized clausal subject 690 /// - "NOMCSUBJ" : Nominalized clausal subject
696 * - "NOMCSUBJPASS" : Nominalized clausal passive 691 /// - "NOMCSUBJPASS" : Nominalized clausal passive
697 * - "NUMC" : Compound of numeric modifier 692 /// - "NUMC" : Compound of numeric modifier
698 * - "COP" : Copula 693 /// - "COP" : Copula
699 * - "DISLOCATED" : Dislocated relation (for fronted/topicalized elements) 694 /// - "DISLOCATED" : Dislocated relation (for fronted/topicalized elements)
700 */
701 core.String label; 695 core.String label;
702 696
703 DependencyEdge(); 697 DependencyEdge();
704 698
705 DependencyEdge.fromJson(core.Map _json) { 699 DependencyEdge.fromJson(core.Map _json) {
706 if (_json.containsKey("headTokenIndex")) { 700 if (_json.containsKey("headTokenIndex")) {
707 headTokenIndex = _json["headTokenIndex"]; 701 headTokenIndex = _json["headTokenIndex"];
708 } 702 }
709 if (_json.containsKey("label")) { 703 if (_json.containsKey("label")) {
710 label = _json["label"]; 704 label = _json["label"];
711 } 705 }
712 } 706 }
713 707
714 core.Map<core.String, core.Object> toJson() { 708 core.Map<core.String, core.Object> toJson() {
715 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 709 final core.Map<core.String, core.Object> _json =
710 new core.Map<core.String, core.Object>();
716 if (headTokenIndex != null) { 711 if (headTokenIndex != null) {
717 _json["headTokenIndex"] = headTokenIndex; 712 _json["headTokenIndex"] = headTokenIndex;
718 } 713 }
719 if (label != null) { 714 if (label != null) {
720 _json["label"] = label; 715 _json["label"] = label;
721 } 716 }
722 return _json; 717 return _json;
723 } 718 }
724 } 719 }
725 720
726 /** 721 /// ################################################################ #
727 * ################################################################ # 722 ///
728 * 723 /// Represents the input to API methods.
729 * Represents the input to API methods.
730 */
731 class Document { 724 class Document {
732 /** The content of the input in string format. */ 725 /// The content of the input in string format.
733 core.String content; 726 core.String content;
734 /** 727
735 * The Google Cloud Storage URI where the file content is located. 728 /// The Google Cloud Storage URI where the file content is located.
736 * This URI must be of the form: gs://bucket_name/object_name. For more 729 /// This URI must be of the form: gs://bucket_name/object_name. For more
737 * details, see https://cloud.google.com/storage/docs/reference-uris. 730 /// details, see https://cloud.google.com/storage/docs/reference-uris.
738 * NOTE: Cloud Storage object versioning is not supported. 731 /// NOTE: Cloud Storage object versioning is not supported.
739 */
740 core.String gcsContentUri; 732 core.String gcsContentUri;
741 /** 733
742 * The language of the document (if not specified, the language is 734 /// The language of the document (if not specified, the language is
743 * automatically detected). Both ISO and BCP-47 language codes are 735 /// automatically detected). Both ISO and BCP-47 language codes are
744 * accepted.<br> 736 /// accepted.<br>
745 * [Language Support](/natural-language/docs/languages) 737 /// [Language Support](/natural-language/docs/languages)
746 * lists currently supported languages for each API method. 738 /// lists currently supported languages for each API method.
747 * If the language (either specified by the caller or automatically detected) 739 /// If the language (either specified by the caller or automatically
748 * is not supported by the called API method, an `INVALID_ARGUMENT` error 740 /// detected)
749 * is returned. 741 /// is not supported by the called API method, an `INVALID_ARGUMENT` error
750 */ 742 /// is returned.
751 core.String language; 743 core.String language;
752 /** 744
753 * Required. If the type is not set or is `TYPE_UNSPECIFIED`, 745 /// Required. If the type is not set or is `TYPE_UNSPECIFIED`,
754 * returns an `INVALID_ARGUMENT` error. 746 /// returns an `INVALID_ARGUMENT` error.
755 * Possible string values are: 747 /// Possible string values are:
756 * - "TYPE_UNSPECIFIED" : The content type is not specified. 748 /// - "TYPE_UNSPECIFIED" : The content type is not specified.
757 * - "PLAIN_TEXT" : Plain text 749 /// - "PLAIN_TEXT" : Plain text
758 * - "HTML" : HTML 750 /// - "HTML" : HTML
759 */
760 core.String type; 751 core.String type;
761 752
762 Document(); 753 Document();
763 754
764 Document.fromJson(core.Map _json) { 755 Document.fromJson(core.Map _json) {
765 if (_json.containsKey("content")) { 756 if (_json.containsKey("content")) {
766 content = _json["content"]; 757 content = _json["content"];
767 } 758 }
768 if (_json.containsKey("gcsContentUri")) { 759 if (_json.containsKey("gcsContentUri")) {
769 gcsContentUri = _json["gcsContentUri"]; 760 gcsContentUri = _json["gcsContentUri"];
770 } 761 }
771 if (_json.containsKey("language")) { 762 if (_json.containsKey("language")) {
772 language = _json["language"]; 763 language = _json["language"];
773 } 764 }
774 if (_json.containsKey("type")) { 765 if (_json.containsKey("type")) {
775 type = _json["type"]; 766 type = _json["type"];
776 } 767 }
777 } 768 }
778 769
779 core.Map<core.String, core.Object> toJson() { 770 core.Map<core.String, core.Object> toJson() {
780 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 771 final core.Map<core.String, core.Object> _json =
772 new core.Map<core.String, core.Object>();
781 if (content != null) { 773 if (content != null) {
782 _json["content"] = content; 774 _json["content"] = content;
783 } 775 }
784 if (gcsContentUri != null) { 776 if (gcsContentUri != null) {
785 _json["gcsContentUri"] = gcsContentUri; 777 _json["gcsContentUri"] = gcsContentUri;
786 } 778 }
787 if (language != null) { 779 if (language != null) {
788 _json["language"] = language; 780 _json["language"] = language;
789 } 781 }
790 if (type != null) { 782 if (type != null) {
791 _json["type"] = type; 783 _json["type"] = type;
792 } 784 }
793 return _json; 785 return _json;
794 } 786 }
795 } 787 }
796 788
797 /** 789 /// Represents a phrase in the text that is a known entity, such as
798 * Represents a phrase in the text that is a known entity, such as 790 /// a person, an organization, or location. The API associates information,
799 * a person, an organization, or location. The API associates information, such 791 /// such
800 * as salience and mentions, with entities. 792 /// as salience and mentions, with entities.
801 */
802 class Entity { 793 class Entity {
803 /** 794 /// The mentions of this entity in the input document. The API currently
804 * The mentions of this entity in the input document. The API currently 795 /// supports proper noun mentions.
805 * supports proper noun mentions.
806 */
807 core.List<EntityMention> mentions; 796 core.List<EntityMention> mentions;
808 /** 797
809 * Metadata associated with the entity. 798 /// Metadata associated with the entity.
810 * 799 ///
811 * Currently, Wikipedia URLs and Knowledge Graph MIDs are provided, if 800 /// Currently, Wikipedia URLs and Knowledge Graph MIDs are provided, if
812 * available. The associated keys are "wikipedia_url" and "mid", respectively. 801 /// available. The associated keys are "wikipedia_url" and "mid",
813 */ 802 /// respectively.
814 core.Map<core.String, core.String> metadata; 803 core.Map<core.String, core.String> metadata;
815 /** The representative name for the entity. */ 804
805 /// The representative name for the entity.
816 core.String name; 806 core.String name;
817 /** 807
818 * The salience score associated with the entity in the [0, 1.0] range. 808 /// The salience score associated with the entity in the [0, 1.0] range.
819 * 809 ///
820 * The salience score for an entity provides information about the 810 /// The salience score for an entity provides information about the
821 * importance or centrality of that entity to the entire document text. 811 /// importance or centrality of that entity to the entire document text.
822 * Scores closer to 0 are less salient, while scores closer to 1.0 are highly 812 /// Scores closer to 0 are less salient, while scores closer to 1.0 are
823 * salient. 813 /// highly
824 */ 814 /// salient.
825 core.double salience; 815 core.double salience;
826 /** 816
827 * The entity type. 817 /// The entity type.
828 * Possible string values are: 818 /// Possible string values are:
829 * - "UNKNOWN" : Unknown 819 /// - "UNKNOWN" : Unknown
830 * - "PERSON" : Person 820 /// - "PERSON" : Person
831 * - "LOCATION" : Location 821 /// - "LOCATION" : Location
832 * - "ORGANIZATION" : Organization 822 /// - "ORGANIZATION" : Organization
833 * - "EVENT" : Event 823 /// - "EVENT" : Event
834 * - "WORK_OF_ART" : Work of art 824 /// - "WORK_OF_ART" : Work of art
835 * - "CONSUMER_GOOD" : Consumer goods 825 /// - "CONSUMER_GOOD" : Consumer goods
836 * - "OTHER" : Other types 826 /// - "OTHER" : Other types
837 */
838 core.String type; 827 core.String type;
839 828
840 Entity(); 829 Entity();
841 830
842 Entity.fromJson(core.Map _json) { 831 Entity.fromJson(core.Map _json) {
843 if (_json.containsKey("mentions")) { 832 if (_json.containsKey("mentions")) {
844 mentions = _json["mentions"].map((value) => new EntityMention.fromJson(val ue)).toList(); 833 mentions = _json["mentions"]
834 .map((value) => new EntityMention.fromJson(value))
835 .toList();
845 } 836 }
846 if (_json.containsKey("metadata")) { 837 if (_json.containsKey("metadata")) {
847 metadata = _json["metadata"]; 838 metadata = _json["metadata"];
848 } 839 }
849 if (_json.containsKey("name")) { 840 if (_json.containsKey("name")) {
850 name = _json["name"]; 841 name = _json["name"];
851 } 842 }
852 if (_json.containsKey("salience")) { 843 if (_json.containsKey("salience")) {
853 salience = _json["salience"]; 844 salience = _json["salience"];
854 } 845 }
855 if (_json.containsKey("type")) { 846 if (_json.containsKey("type")) {
856 type = _json["type"]; 847 type = _json["type"];
857 } 848 }
858 } 849 }
859 850
860 core.Map<core.String, core.Object> toJson() { 851 core.Map<core.String, core.Object> toJson() {
861 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 852 final core.Map<core.String, core.Object> _json =
853 new core.Map<core.String, core.Object>();
862 if (mentions != null) { 854 if (mentions != null) {
863 _json["mentions"] = mentions.map((value) => (value).toJson()).toList(); 855 _json["mentions"] = mentions.map((value) => (value).toJson()).toList();
864 } 856 }
865 if (metadata != null) { 857 if (metadata != null) {
866 _json["metadata"] = metadata; 858 _json["metadata"] = metadata;
867 } 859 }
868 if (name != null) { 860 if (name != null) {
869 _json["name"] = name; 861 _json["name"] = name;
870 } 862 }
871 if (salience != null) { 863 if (salience != null) {
872 _json["salience"] = salience; 864 _json["salience"] = salience;
873 } 865 }
874 if (type != null) { 866 if (type != null) {
875 _json["type"] = type; 867 _json["type"] = type;
876 } 868 }
877 return _json; 869 return _json;
878 } 870 }
879 } 871 }
880 872
881 /** 873 /// Represents a mention for an entity in the text. Currently, proper noun
882 * Represents a mention for an entity in the text. Currently, proper noun 874 /// mentions are supported.
883 * mentions are supported.
884 */
885 class EntityMention { 875 class EntityMention {
886 /** The mention text. */ 876 /// The mention text.
887 TextSpan text; 877 TextSpan text;
888 /** 878
889 * The type of the entity mention. 879 /// The type of the entity mention.
890 * Possible string values are: 880 /// Possible string values are:
891 * - "TYPE_UNKNOWN" : Unknown 881 /// - "TYPE_UNKNOWN" : Unknown
892 * - "PROPER" : Proper name 882 /// - "PROPER" : Proper name
893 * - "COMMON" : Common noun (or noun compound) 883 /// - "COMMON" : Common noun (or noun compound)
894 */
895 core.String type; 884 core.String type;
896 885
897 EntityMention(); 886 EntityMention();
898 887
899 EntityMention.fromJson(core.Map _json) { 888 EntityMention.fromJson(core.Map _json) {
900 if (_json.containsKey("text")) { 889 if (_json.containsKey("text")) {
901 text = new TextSpan.fromJson(_json["text"]); 890 text = new TextSpan.fromJson(_json["text"]);
902 } 891 }
903 if (_json.containsKey("type")) { 892 if (_json.containsKey("type")) {
904 type = _json["type"]; 893 type = _json["type"];
905 } 894 }
906 } 895 }
907 896
908 core.Map<core.String, core.Object> toJson() { 897 core.Map<core.String, core.Object> toJson() {
909 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 898 final core.Map<core.String, core.Object> _json =
899 new core.Map<core.String, core.Object>();
910 if (text != null) { 900 if (text != null) {
911 _json["text"] = (text).toJson(); 901 _json["text"] = (text).toJson();
912 } 902 }
913 if (type != null) { 903 if (type != null) {
914 _json["type"] = type; 904 _json["type"] = type;
915 } 905 }
916 return _json; 906 return _json;
917 } 907 }
918 } 908 }
919 909
920 /** 910 /// All available features for sentiment, syntax, and semantic analysis.
921 * All available features for sentiment, syntax, and semantic analysis. 911 /// Setting each one to true will enable that specific analysis for the input.
922 * Setting each one to true will enable that specific analysis for the input.
923 */
924 class Features { 912 class Features {
925 /** Extract document-level sentiment. */ 913 /// Extract document-level sentiment.
926 core.bool extractDocumentSentiment; 914 core.bool extractDocumentSentiment;
927 /** Extract entities. */ 915
916 /// Extract entities.
928 core.bool extractEntities; 917 core.bool extractEntities;
929 /** Extract syntax information. */ 918
919 /// Extract syntax information.
930 core.bool extractSyntax; 920 core.bool extractSyntax;
931 921
932 Features(); 922 Features();
933 923
934 Features.fromJson(core.Map _json) { 924 Features.fromJson(core.Map _json) {
935 if (_json.containsKey("extractDocumentSentiment")) { 925 if (_json.containsKey("extractDocumentSentiment")) {
936 extractDocumentSentiment = _json["extractDocumentSentiment"]; 926 extractDocumentSentiment = _json["extractDocumentSentiment"];
937 } 927 }
938 if (_json.containsKey("extractEntities")) { 928 if (_json.containsKey("extractEntities")) {
939 extractEntities = _json["extractEntities"]; 929 extractEntities = _json["extractEntities"];
940 } 930 }
941 if (_json.containsKey("extractSyntax")) { 931 if (_json.containsKey("extractSyntax")) {
942 extractSyntax = _json["extractSyntax"]; 932 extractSyntax = _json["extractSyntax"];
943 } 933 }
944 } 934 }
945 935
946 core.Map<core.String, core.Object> toJson() { 936 core.Map<core.String, core.Object> toJson() {
947 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 937 final core.Map<core.String, core.Object> _json =
938 new core.Map<core.String, core.Object>();
948 if (extractDocumentSentiment != null) { 939 if (extractDocumentSentiment != null) {
949 _json["extractDocumentSentiment"] = extractDocumentSentiment; 940 _json["extractDocumentSentiment"] = extractDocumentSentiment;
950 } 941 }
951 if (extractEntities != null) { 942 if (extractEntities != null) {
952 _json["extractEntities"] = extractEntities; 943 _json["extractEntities"] = extractEntities;
953 } 944 }
954 if (extractSyntax != null) { 945 if (extractSyntax != null) {
955 _json["extractSyntax"] = extractSyntax; 946 _json["extractSyntax"] = extractSyntax;
956 } 947 }
957 return _json; 948 return _json;
958 } 949 }
959 } 950 }
960 951
961 /** 952 /// Represents part of speech information for a token. Parts of speech
962 * Represents part of speech information for a token. Parts of speech 953 /// are as defined in
963 * are as defined in 954 /// http://www.lrec-conf.org/proceedings/lrec2012/pdf/274_Paper.pdf
964 * http://www.lrec-conf.org/proceedings/lrec2012/pdf/274_Paper.pdf
965 */
966 class PartOfSpeech { 955 class PartOfSpeech {
967 /** 956 /// The grammatical aspect.
968 * The grammatical aspect. 957 /// Possible string values are:
969 * Possible string values are: 958 /// - "ASPECT_UNKNOWN" : Aspect is not applicable in the analyzed language or
970 * - "ASPECT_UNKNOWN" : Aspect is not applicable in the analyzed language or 959 /// is not predicted.
971 * is not predicted. 960 /// - "PERFECTIVE" : Perfective
972 * - "PERFECTIVE" : Perfective 961 /// - "IMPERFECTIVE" : Imperfective
973 * - "IMPERFECTIVE" : Imperfective 962 /// - "PROGRESSIVE" : Progressive
974 * - "PROGRESSIVE" : Progressive
975 */
976 core.String aspect; 963 core.String aspect;
977 /** 964
978 * The grammatical case. 965 /// The grammatical case.
979 * Possible string values are: 966 /// Possible string values are:
980 * - "CASE_UNKNOWN" : Case is not applicable in the analyzed language or is 967 /// - "CASE_UNKNOWN" : Case is not applicable in the analyzed language or is
981 * not predicted. 968 /// not predicted.
982 * - "ACCUSATIVE" : Accusative 969 /// - "ACCUSATIVE" : Accusative
983 * - "ADVERBIAL" : Adverbial 970 /// - "ADVERBIAL" : Adverbial
984 * - "COMPLEMENTIVE" : Complementive 971 /// - "COMPLEMENTIVE" : Complementive
985 * - "DATIVE" : Dative 972 /// - "DATIVE" : Dative
986 * - "GENITIVE" : Genitive 973 /// - "GENITIVE" : Genitive
987 * - "INSTRUMENTAL" : Instrumental 974 /// - "INSTRUMENTAL" : Instrumental
988 * - "LOCATIVE" : Locative 975 /// - "LOCATIVE" : Locative
989 * - "NOMINATIVE" : Nominative 976 /// - "NOMINATIVE" : Nominative
990 * - "OBLIQUE" : Oblique 977 /// - "OBLIQUE" : Oblique
991 * - "PARTITIVE" : Partitive 978 /// - "PARTITIVE" : Partitive
992 * - "PREPOSITIONAL" : Prepositional 979 /// - "PREPOSITIONAL" : Prepositional
993 * - "REFLEXIVE_CASE" : Reflexive 980 /// - "REFLEXIVE_CASE" : Reflexive
994 * - "RELATIVE_CASE" : Relative 981 /// - "RELATIVE_CASE" : Relative
995 * - "VOCATIVE" : Vocative 982 /// - "VOCATIVE" : Vocative
996 */
997 core.String case_; 983 core.String case_;
998 /** 984
999 * The grammatical form. 985 /// The grammatical form.
1000 * Possible string values are: 986 /// Possible string values are:
1001 * - "FORM_UNKNOWN" : Form is not applicable in the analyzed language or is 987 /// - "FORM_UNKNOWN" : Form is not applicable in the analyzed language or is
1002 * not predicted. 988 /// not predicted.
1003 * - "ADNOMIAL" : Adnomial 989 /// - "ADNOMIAL" : Adnomial
1004 * - "AUXILIARY" : Auxiliary 990 /// - "AUXILIARY" : Auxiliary
1005 * - "COMPLEMENTIZER" : Complementizer 991 /// - "COMPLEMENTIZER" : Complementizer
1006 * - "FINAL_ENDING" : Final ending 992 /// - "FINAL_ENDING" : Final ending
1007 * - "GERUND" : Gerund 993 /// - "GERUND" : Gerund
1008 * - "REALIS" : Realis 994 /// - "REALIS" : Realis
1009 * - "IRREALIS" : Irrealis 995 /// - "IRREALIS" : Irrealis
1010 * - "SHORT" : Short form 996 /// - "SHORT" : Short form
1011 * - "LONG" : Long form 997 /// - "LONG" : Long form
1012 * - "ORDER" : Order form 998 /// - "ORDER" : Order form
1013 * - "SPECIFIC" : Specific form 999 /// - "SPECIFIC" : Specific form
1014 */
1015 core.String form; 1000 core.String form;
1016 /** 1001
1017 * The grammatical gender. 1002 /// The grammatical gender.
1018 * Possible string values are: 1003 /// Possible string values are:
1019 * - "GENDER_UNKNOWN" : Gender is not applicable in the analyzed language or 1004 /// - "GENDER_UNKNOWN" : Gender is not applicable in the analyzed language or
1020 * is not predicted. 1005 /// is not predicted.
1021 * - "FEMININE" : Feminine 1006 /// - "FEMININE" : Feminine
1022 * - "MASCULINE" : Masculine 1007 /// - "MASCULINE" : Masculine
1023 * - "NEUTER" : Neuter 1008 /// - "NEUTER" : Neuter
1024 */
1025 core.String gender; 1009 core.String gender;
1026 /** 1010
1027 * The grammatical mood. 1011 /// The grammatical mood.
1028 * Possible string values are: 1012 /// Possible string values are:
1029 * - "MOOD_UNKNOWN" : Mood is not applicable in the analyzed language or is 1013 /// - "MOOD_UNKNOWN" : Mood is not applicable in the analyzed language or is
1030 * not predicted. 1014 /// not predicted.
1031 * - "CONDITIONAL_MOOD" : Conditional 1015 /// - "CONDITIONAL_MOOD" : Conditional
1032 * - "IMPERATIVE" : Imperative 1016 /// - "IMPERATIVE" : Imperative
1033 * - "INDICATIVE" : Indicative 1017 /// - "INDICATIVE" : Indicative
1034 * - "INTERROGATIVE" : Interrogative 1018 /// - "INTERROGATIVE" : Interrogative
1035 * - "JUSSIVE" : Jussive 1019 /// - "JUSSIVE" : Jussive
1036 * - "SUBJUNCTIVE" : Subjunctive 1020 /// - "SUBJUNCTIVE" : Subjunctive
1037 */
1038 core.String mood; 1021 core.String mood;
1039 /** 1022
1040 * The grammatical number. 1023 /// The grammatical number.
1041 * Possible string values are: 1024 /// Possible string values are:
1042 * - "NUMBER_UNKNOWN" : Number is not applicable in the analyzed language or 1025 /// - "NUMBER_UNKNOWN" : Number is not applicable in the analyzed language or
1043 * is not predicted. 1026 /// is not predicted.
1044 * - "SINGULAR" : Singular 1027 /// - "SINGULAR" : Singular
1045 * - "PLURAL" : Plural 1028 /// - "PLURAL" : Plural
1046 * - "DUAL" : Dual 1029 /// - "DUAL" : Dual
1047 */
1048 core.String number; 1030 core.String number;
1049 /** 1031
1050 * The grammatical person. 1032 /// The grammatical person.
1051 * Possible string values are: 1033 /// Possible string values are:
1052 * - "PERSON_UNKNOWN" : Person is not applicable in the analyzed language or 1034 /// - "PERSON_UNKNOWN" : Person is not applicable in the analyzed language or
1053 * is not predicted. 1035 /// is not predicted.
1054 * - "FIRST" : First 1036 /// - "FIRST" : First
1055 * - "SECOND" : Second 1037 /// - "SECOND" : Second
1056 * - "THIRD" : Third 1038 /// - "THIRD" : Third
1057 * - "REFLEXIVE_PERSON" : Reflexive 1039 /// - "REFLEXIVE_PERSON" : Reflexive
1058 */
1059 core.String person; 1040 core.String person;
1060 /** 1041
1061 * The grammatical properness. 1042 /// The grammatical properness.
1062 * Possible string values are: 1043 /// Possible string values are:
1063 * - "PROPER_UNKNOWN" : Proper is not applicable in the analyzed language or 1044 /// - "PROPER_UNKNOWN" : Proper is not applicable in the analyzed language or
1064 * is not predicted. 1045 /// is not predicted.
1065 * - "PROPER" : Proper 1046 /// - "PROPER" : Proper
1066 * - "NOT_PROPER" : Not proper 1047 /// - "NOT_PROPER" : Not proper
1067 */
1068 core.String proper; 1048 core.String proper;
1069 /** 1049
1070 * The grammatical reciprocity. 1050 /// The grammatical reciprocity.
1071 * Possible string values are: 1051 /// Possible string values are:
1072 * - "RECIPROCITY_UNKNOWN" : Reciprocity is not applicable in the analyzed 1052 /// - "RECIPROCITY_UNKNOWN" : Reciprocity is not applicable in the analyzed
1073 * language or is not 1053 /// language or is not
1074 * predicted. 1054 /// predicted.
1075 * - "RECIPROCAL" : Reciprocal 1055 /// - "RECIPROCAL" : Reciprocal
1076 * - "NON_RECIPROCAL" : Non-reciprocal 1056 /// - "NON_RECIPROCAL" : Non-reciprocal
1077 */
1078 core.String reciprocity; 1057 core.String reciprocity;
1079 /** 1058
1080 * The part of speech tag. 1059 /// The part of speech tag.
1081 * Possible string values are: 1060 /// Possible string values are:
1082 * - "UNKNOWN" : Unknown 1061 /// - "UNKNOWN" : Unknown
1083 * - "ADJ" : Adjective 1062 /// - "ADJ" : Adjective
1084 * - "ADP" : Adposition (preposition and postposition) 1063 /// - "ADP" : Adposition (preposition and postposition)
1085 * - "ADV" : Adverb 1064 /// - "ADV" : Adverb
1086 * - "CONJ" : Conjunction 1065 /// - "CONJ" : Conjunction
1087 * - "DET" : Determiner 1066 /// - "DET" : Determiner
1088 * - "NOUN" : Noun (common and proper) 1067 /// - "NOUN" : Noun (common and proper)
1089 * - "NUM" : Cardinal number 1068 /// - "NUM" : Cardinal number
1090 * - "PRON" : Pronoun 1069 /// - "PRON" : Pronoun
1091 * - "PRT" : Particle or other function word 1070 /// - "PRT" : Particle or other function word
1092 * - "PUNCT" : Punctuation 1071 /// - "PUNCT" : Punctuation
1093 * - "VERB" : Verb (all tenses and modes) 1072 /// - "VERB" : Verb (all tenses and modes)
1094 * - "X" : Other: foreign words, typos, abbreviations 1073 /// - "X" : Other: foreign words, typos, abbreviations
1095 * - "AFFIX" : Affix 1074 /// - "AFFIX" : Affix
1096 */
1097 core.String tag; 1075 core.String tag;
1098 /** 1076
1099 * The grammatical tense. 1077 /// The grammatical tense.
1100 * Possible string values are: 1078 /// Possible string values are:
1101 * - "TENSE_UNKNOWN" : Tense is not applicable in the analyzed language or is 1079 /// - "TENSE_UNKNOWN" : Tense is not applicable in the analyzed language or
1102 * not predicted. 1080 /// is not predicted.
1103 * - "CONDITIONAL_TENSE" : Conditional 1081 /// - "CONDITIONAL_TENSE" : Conditional
1104 * - "FUTURE" : Future 1082 /// - "FUTURE" : Future
1105 * - "PAST" : Past 1083 /// - "PAST" : Past
1106 * - "PRESENT" : Present 1084 /// - "PRESENT" : Present
1107 * - "IMPERFECT" : Imperfect 1085 /// - "IMPERFECT" : Imperfect
1108 * - "PLUPERFECT" : Pluperfect 1086 /// - "PLUPERFECT" : Pluperfect
1109 */
1110 core.String tense; 1087 core.String tense;
1111 /** 1088
1112 * The grammatical voice. 1089 /// The grammatical voice.
1113 * Possible string values are: 1090 /// Possible string values are:
1114 * - "VOICE_UNKNOWN" : Voice is not applicable in the analyzed language or is 1091 /// - "VOICE_UNKNOWN" : Voice is not applicable in the analyzed language or
1115 * not predicted. 1092 /// is not predicted.
1116 * - "ACTIVE" : Active 1093 /// - "ACTIVE" : Active
1117 * - "CAUSATIVE" : Causative 1094 /// - "CAUSATIVE" : Causative
1118 * - "PASSIVE" : Passive 1095 /// - "PASSIVE" : Passive
1119 */
1120 core.String voice; 1096 core.String voice;
1121 1097
1122 PartOfSpeech(); 1098 PartOfSpeech();
1123 1099
1124 PartOfSpeech.fromJson(core.Map _json) { 1100 PartOfSpeech.fromJson(core.Map _json) {
1125 if (_json.containsKey("aspect")) { 1101 if (_json.containsKey("aspect")) {
1126 aspect = _json["aspect"]; 1102 aspect = _json["aspect"];
1127 } 1103 }
1128 if (_json.containsKey("case")) { 1104 if (_json.containsKey("case")) {
1129 case_ = _json["case"]; 1105 case_ = _json["case"];
(...skipping 24 matching lines...) Expand all
1154 } 1130 }
1155 if (_json.containsKey("tense")) { 1131 if (_json.containsKey("tense")) {
1156 tense = _json["tense"]; 1132 tense = _json["tense"];
1157 } 1133 }
1158 if (_json.containsKey("voice")) { 1134 if (_json.containsKey("voice")) {
1159 voice = _json["voice"]; 1135 voice = _json["voice"];
1160 } 1136 }
1161 } 1137 }
1162 1138
1163 core.Map<core.String, core.Object> toJson() { 1139 core.Map<core.String, core.Object> toJson() {
1164 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 1140 final core.Map<core.String, core.Object> _json =
1141 new core.Map<core.String, core.Object>();
1165 if (aspect != null) { 1142 if (aspect != null) {
1166 _json["aspect"] = aspect; 1143 _json["aspect"] = aspect;
1167 } 1144 }
1168 if (case_ != null) { 1145 if (case_ != null) {
1169 _json["case"] = case_; 1146 _json["case"] = case_;
1170 } 1147 }
1171 if (form != null) { 1148 if (form != null) {
1172 _json["form"] = form; 1149 _json["form"] = form;
1173 } 1150 }
1174 if (gender != null) { 1151 if (gender != null) {
(...skipping 20 matching lines...) Expand all
1195 if (tense != null) { 1172 if (tense != null) {
1196 _json["tense"] = tense; 1173 _json["tense"] = tense;
1197 } 1174 }
1198 if (voice != null) { 1175 if (voice != null) {
1199 _json["voice"] = voice; 1176 _json["voice"] = voice;
1200 } 1177 }
1201 return _json; 1178 return _json;
1202 } 1179 }
1203 } 1180 }
1204 1181
1205 /** Represents a sentence in the input document. */ 1182 /// Represents a sentence in the input document.
1206 class Sentence { 1183 class Sentence {
1207 /** 1184 /// For calls to AnalyzeSentiment or if
1208 * For calls to AnalyzeSentiment or if 1185 /// AnnotateTextRequest.Features.extract_document_sentiment is set to
1209 * AnnotateTextRequest.Features.extract_document_sentiment is set to 1186 /// true, this field will contain the sentiment for the sentence.
1210 * true, this field will contain the sentiment for the sentence.
1211 */
1212 Sentiment sentiment; 1187 Sentiment sentiment;
1213 /** The sentence text. */ 1188
1189 /// The sentence text.
1214 TextSpan text; 1190 TextSpan text;
1215 1191
1216 Sentence(); 1192 Sentence();
1217 1193
1218 Sentence.fromJson(core.Map _json) { 1194 Sentence.fromJson(core.Map _json) {
1219 if (_json.containsKey("sentiment")) { 1195 if (_json.containsKey("sentiment")) {
1220 sentiment = new Sentiment.fromJson(_json["sentiment"]); 1196 sentiment = new Sentiment.fromJson(_json["sentiment"]);
1221 } 1197 }
1222 if (_json.containsKey("text")) { 1198 if (_json.containsKey("text")) {
1223 text = new TextSpan.fromJson(_json["text"]); 1199 text = new TextSpan.fromJson(_json["text"]);
1224 } 1200 }
1225 } 1201 }
1226 1202
1227 core.Map<core.String, core.Object> toJson() { 1203 core.Map<core.String, core.Object> toJson() {
1228 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 1204 final core.Map<core.String, core.Object> _json =
1205 new core.Map<core.String, core.Object>();
1229 if (sentiment != null) { 1206 if (sentiment != null) {
1230 _json["sentiment"] = (sentiment).toJson(); 1207 _json["sentiment"] = (sentiment).toJson();
1231 } 1208 }
1232 if (text != null) { 1209 if (text != null) {
1233 _json["text"] = (text).toJson(); 1210 _json["text"] = (text).toJson();
1234 } 1211 }
1235 return _json; 1212 return _json;
1236 } 1213 }
1237 } 1214 }
1238 1215
1239 /** 1216 /// Represents the feeling associated with the entire text or entities in
1240 * Represents the feeling associated with the entire text or entities in 1217 /// the text.
1241 * the text.
1242 */
1243 class Sentiment { 1218 class Sentiment {
1244 /** 1219 /// A non-negative number in the [0, +inf) range, which represents
1245 * A non-negative number in the [0, +inf) range, which represents 1220 /// the absolute magnitude of sentiment regardless of score (positive or
1246 * the absolute magnitude of sentiment regardless of score (positive or 1221 /// negative).
1247 * negative).
1248 */
1249 core.double magnitude; 1222 core.double magnitude;
1250 /** 1223
1251 * Sentiment score between -1.0 (negative sentiment) and 1.0 1224 /// Sentiment score between -1.0 (negative sentiment) and 1.0
1252 * (positive sentiment). 1225 /// (positive sentiment).
1253 */
1254 core.double score; 1226 core.double score;
1255 1227
1256 Sentiment(); 1228 Sentiment();
1257 1229
1258 Sentiment.fromJson(core.Map _json) { 1230 Sentiment.fromJson(core.Map _json) {
1259 if (_json.containsKey("magnitude")) { 1231 if (_json.containsKey("magnitude")) {
1260 magnitude = _json["magnitude"]; 1232 magnitude = _json["magnitude"];
1261 } 1233 }
1262 if (_json.containsKey("score")) { 1234 if (_json.containsKey("score")) {
1263 score = _json["score"]; 1235 score = _json["score"];
1264 } 1236 }
1265 } 1237 }
1266 1238
1267 core.Map<core.String, core.Object> toJson() { 1239 core.Map<core.String, core.Object> toJson() {
1268 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 1240 final core.Map<core.String, core.Object> _json =
1241 new core.Map<core.String, core.Object>();
1269 if (magnitude != null) { 1242 if (magnitude != null) {
1270 _json["magnitude"] = magnitude; 1243 _json["magnitude"] = magnitude;
1271 } 1244 }
1272 if (score != null) { 1245 if (score != null) {
1273 _json["score"] = score; 1246 _json["score"] = score;
1274 } 1247 }
1275 return _json; 1248 return _json;
1276 } 1249 }
1277 } 1250 }
1278 1251
1279 /** 1252 /// The `Status` type defines a logical error model that is suitable for
1280 * The `Status` type defines a logical error model that is suitable for 1253 /// different
1281 * different 1254 /// programming environments, including REST APIs and RPC APIs. It is used by
1282 * programming environments, including REST APIs and RPC APIs. It is used by 1255 /// [gRPC](https://github.com/grpc). The error model is designed to be:
1283 * [gRPC](https://github.com/grpc). The error model is designed to be: 1256 ///
1284 * 1257 /// - Simple to use and understand for most users
1285 * - Simple to use and understand for most users 1258 /// - Flexible enough to meet unexpected needs
1286 * - Flexible enough to meet unexpected needs 1259 ///
1287 * 1260 /// # Overview
1288 * # Overview 1261 ///
1289 * 1262 /// The `Status` message contains three pieces of data: error code, error
1290 * The `Status` message contains three pieces of data: error code, error 1263 /// message,
1291 * message, 1264 /// and error details. The error code should be an enum value of
1292 * and error details. The error code should be an enum value of 1265 /// google.rpc.Code, but it may accept additional error codes if needed. The
1293 * google.rpc.Code, but it may accept additional error codes if needed. The 1266 /// error message should be a developer-facing English message that helps
1294 * error message should be a developer-facing English message that helps 1267 /// developers *understand* and *resolve* the error. If a localized user-facing
1295 * developers *understand* and *resolve* the error. If a localized user-facing 1268 /// error message is needed, put the localized message in the error details or
1296 * error message is needed, put the localized message in the error details or 1269 /// localize it in the client. The optional error details may contain arbitrary
1297 * localize it in the client. The optional error details may contain arbitrary 1270 /// information about the error. There is a predefined set of error detail
1298 * information about the error. There is a predefined set of error detail types 1271 /// types
1299 * in the package `google.rpc` that can be used for common error conditions. 1272 /// in the package `google.rpc` that can be used for common error conditions.
1300 * 1273 ///
1301 * # Language mapping 1274 /// # Language mapping
1302 * 1275 ///
1303 * The `Status` message is the logical representation of the error model, but it 1276 /// The `Status` message is the logical representation of the error model, but
1304 * is not necessarily the actual wire format. When the `Status` message is 1277 /// it
1305 * exposed in different client libraries and different wire protocols, it can be 1278 /// is not necessarily the actual wire format. When the `Status` message is
1306 * mapped differently. For example, it will likely be mapped to some exceptions 1279 /// exposed in different client libraries and different wire protocols, it can
1307 * in Java, but more likely mapped to some error codes in C. 1280 /// be
1308 * 1281 /// mapped differently. For example, it will likely be mapped to some
1309 * # Other uses 1282 /// exceptions
1310 * 1283 /// in Java, but more likely mapped to some error codes in C.
1311 * The error model and the `Status` message can be used in a variety of 1284 ///
1312 * environments, either with or without APIs, to provide a 1285 /// # Other uses
1313 * consistent developer experience across different environments. 1286 ///
1314 * 1287 /// The error model and the `Status` message can be used in a variety of
1315 * Example uses of this error model include: 1288 /// environments, either with or without APIs, to provide a
1316 * 1289 /// consistent developer experience across different environments.
1317 * - Partial errors. If a service needs to return partial errors to the client, 1290 ///
1318 * it may embed the `Status` in the normal response to indicate the partial 1291 /// Example uses of this error model include:
1319 * errors. 1292 ///
1320 * 1293 /// - Partial errors. If a service needs to return partial errors to the
1321 * - Workflow errors. A typical workflow has multiple steps. Each step may 1294 /// client,
1322 * have a `Status` message for error reporting. 1295 /// it may embed the `Status` in the normal response to indicate the partial
1323 * 1296 /// errors.
1324 * - Batch operations. If a client uses batch request and batch response, the 1297 ///
1325 * `Status` message should be used directly inside batch response, one for 1298 /// - Workflow errors. A typical workflow has multiple steps. Each step may
1326 * each error sub-response. 1299 /// have a `Status` message for error reporting.
1327 * 1300 ///
1328 * - Asynchronous operations. If an API call embeds asynchronous operation 1301 /// - Batch operations. If a client uses batch request and batch response, the
1329 * results in its response, the status of those operations should be 1302 /// `Status` message should be used directly inside batch response, one for
1330 * represented directly using the `Status` message. 1303 /// each error sub-response.
1331 * 1304 ///
1332 * - Logging. If some API errors are stored in logs, the message `Status` could 1305 /// - Asynchronous operations. If an API call embeds asynchronous operation
1333 * be used directly after any stripping needed for security/privacy reasons. 1306 /// results in its response, the status of those operations should be
1334 */ 1307 /// represented directly using the `Status` message.
1308 ///
1309 /// - Logging. If some API errors are stored in logs, the message `Status`
1310 /// could
1311 /// be used directly after any stripping needed for security/privacy reasons.
1335 class Status { 1312 class Status {
1336 /** The status code, which should be an enum value of google.rpc.Code. */ 1313 /// The status code, which should be an enum value of google.rpc.Code.
1337 core.int code; 1314 core.int code;
1338 /** 1315
1339 * A list of messages that carry the error details. There is a common set of 1316 /// A list of messages that carry the error details. There is a common set
1340 * message types for APIs to use. 1317 /// of
1341 * 1318 /// message types for APIs to use.
1342 * The values for Object must be JSON objects. It can consist of `num`, 1319 ///
1343 * `String`, `bool` and `null` as well as `Map` and `List` values. 1320 /// The values for Object must be JSON objects. It can consist of `num`,
1344 */ 1321 /// `String`, `bool` and `null` as well as `Map` and `List` values.
1345 core.List<core.Map<core.String, core.Object>> details; 1322 core.List<core.Map<core.String, core.Object>> details;
1346 /** 1323
1347 * A developer-facing error message, which should be in English. Any 1324 /// A developer-facing error message, which should be in English. Any
1348 * user-facing error message should be localized and sent in the 1325 /// user-facing error message should be localized and sent in the
1349 * google.rpc.Status.details field, or localized by the client. 1326 /// google.rpc.Status.details field, or localized by the client.
1350 */
1351 core.String message; 1327 core.String message;
1352 1328
1353 Status(); 1329 Status();
1354 1330
1355 Status.fromJson(core.Map _json) { 1331 Status.fromJson(core.Map _json) {
1356 if (_json.containsKey("code")) { 1332 if (_json.containsKey("code")) {
1357 code = _json["code"]; 1333 code = _json["code"];
1358 } 1334 }
1359 if (_json.containsKey("details")) { 1335 if (_json.containsKey("details")) {
1360 details = _json["details"]; 1336 details = _json["details"];
1361 } 1337 }
1362 if (_json.containsKey("message")) { 1338 if (_json.containsKey("message")) {
1363 message = _json["message"]; 1339 message = _json["message"];
1364 } 1340 }
1365 } 1341 }
1366 1342
1367 core.Map<core.String, core.Object> toJson() { 1343 core.Map<core.String, core.Object> toJson() {
1368 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 1344 final core.Map<core.String, core.Object> _json =
1345 new core.Map<core.String, core.Object>();
1369 if (code != null) { 1346 if (code != null) {
1370 _json["code"] = code; 1347 _json["code"] = code;
1371 } 1348 }
1372 if (details != null) { 1349 if (details != null) {
1373 _json["details"] = details; 1350 _json["details"] = details;
1374 } 1351 }
1375 if (message != null) { 1352 if (message != null) {
1376 _json["message"] = message; 1353 _json["message"] = message;
1377 } 1354 }
1378 return _json; 1355 return _json;
1379 } 1356 }
1380 } 1357 }
1381 1358
1382 /** Represents an output piece of text. */ 1359 /// Represents an output piece of text.
1383 class TextSpan { 1360 class TextSpan {
1384 /** 1361 /// The API calculates the beginning offset of the content in the original
1385 * The API calculates the beginning offset of the content in the original 1362 /// document according to the EncodingType specified in the API request.
1386 * document according to the EncodingType specified in the API request.
1387 */
1388 core.int beginOffset; 1363 core.int beginOffset;
1389 /** The content of the output text. */ 1364
1365 /// The content of the output text.
1390 core.String content; 1366 core.String content;
1391 1367
1392 TextSpan(); 1368 TextSpan();
1393 1369
1394 TextSpan.fromJson(core.Map _json) { 1370 TextSpan.fromJson(core.Map _json) {
1395 if (_json.containsKey("beginOffset")) { 1371 if (_json.containsKey("beginOffset")) {
1396 beginOffset = _json["beginOffset"]; 1372 beginOffset = _json["beginOffset"];
1397 } 1373 }
1398 if (_json.containsKey("content")) { 1374 if (_json.containsKey("content")) {
1399 content = _json["content"]; 1375 content = _json["content"];
1400 } 1376 }
1401 } 1377 }
1402 1378
1403 core.Map<core.String, core.Object> toJson() { 1379 core.Map<core.String, core.Object> toJson() {
1404 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 1380 final core.Map<core.String, core.Object> _json =
1381 new core.Map<core.String, core.Object>();
1405 if (beginOffset != null) { 1382 if (beginOffset != null) {
1406 _json["beginOffset"] = beginOffset; 1383 _json["beginOffset"] = beginOffset;
1407 } 1384 }
1408 if (content != null) { 1385 if (content != null) {
1409 _json["content"] = content; 1386 _json["content"] = content;
1410 } 1387 }
1411 return _json; 1388 return _json;
1412 } 1389 }
1413 } 1390 }
1414 1391
1415 /** Represents the smallest syntactic building block of the text. */ 1392 /// Represents the smallest syntactic building block of the text.
1416 class Token { 1393 class Token {
1417 /** Dependency tree parse for this token. */ 1394 /// Dependency tree parse for this token.
1418 DependencyEdge dependencyEdge; 1395 DependencyEdge dependencyEdge;
1419 /** 1396
1420 * [Lemma](https://en.wikipedia.org/wiki/Lemma_%28morphology%29) of the token. 1397 /// [Lemma](https://en.wikipedia.org/wiki/Lemma_%28morphology%29) of the
1421 */ 1398 /// token.
1422 core.String lemma; 1399 core.String lemma;
1423 /** Parts of speech tag for this token. */ 1400
1401 /// Parts of speech tag for this token.
1424 PartOfSpeech partOfSpeech; 1402 PartOfSpeech partOfSpeech;
1425 /** The token text. */ 1403
1404 /// The token text.
1426 TextSpan text; 1405 TextSpan text;
1427 1406
1428 Token(); 1407 Token();
1429 1408
1430 Token.fromJson(core.Map _json) { 1409 Token.fromJson(core.Map _json) {
1431 if (_json.containsKey("dependencyEdge")) { 1410 if (_json.containsKey("dependencyEdge")) {
1432 dependencyEdge = new DependencyEdge.fromJson(_json["dependencyEdge"]); 1411 dependencyEdge = new DependencyEdge.fromJson(_json["dependencyEdge"]);
1433 } 1412 }
1434 if (_json.containsKey("lemma")) { 1413 if (_json.containsKey("lemma")) {
1435 lemma = _json["lemma"]; 1414 lemma = _json["lemma"];
1436 } 1415 }
1437 if (_json.containsKey("partOfSpeech")) { 1416 if (_json.containsKey("partOfSpeech")) {
1438 partOfSpeech = new PartOfSpeech.fromJson(_json["partOfSpeech"]); 1417 partOfSpeech = new PartOfSpeech.fromJson(_json["partOfSpeech"]);
1439 } 1418 }
1440 if (_json.containsKey("text")) { 1419 if (_json.containsKey("text")) {
1441 text = new TextSpan.fromJson(_json["text"]); 1420 text = new TextSpan.fromJson(_json["text"]);
1442 } 1421 }
1443 } 1422 }
1444 1423
1445 core.Map<core.String, core.Object> toJson() { 1424 core.Map<core.String, core.Object> toJson() {
1446 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 1425 final core.Map<core.String, core.Object> _json =
1426 new core.Map<core.String, core.Object>();
1447 if (dependencyEdge != null) { 1427 if (dependencyEdge != null) {
1448 _json["dependencyEdge"] = (dependencyEdge).toJson(); 1428 _json["dependencyEdge"] = (dependencyEdge).toJson();
1449 } 1429 }
1450 if (lemma != null) { 1430 if (lemma != null) {
1451 _json["lemma"] = lemma; 1431 _json["lemma"] = lemma;
1452 } 1432 }
1453 if (partOfSpeech != null) { 1433 if (partOfSpeech != null) {
1454 _json["partOfSpeech"] = (partOfSpeech).toJson(); 1434 _json["partOfSpeech"] = (partOfSpeech).toJson();
1455 } 1435 }
1456 if (text != null) { 1436 if (text != null) {
1457 _json["text"] = (text).toJson(); 1437 _json["text"] = (text).toJson();
1458 } 1438 }
1459 return _json; 1439 return _json;
1460 } 1440 }
1461 } 1441 }
OLDNEW
« no previous file with comments | « generated/googleapis/lib/kgsearch/v1.dart ('k') | generated/googleapis/lib/licensing/v1.dart » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698