Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(51)

Side by Side Diff: generated/googleapis_beta/lib/language/v1beta1.dart

Issue 3006323002: Api-Roll 54: 2017-09-11 (Closed)
Patch Set: use 2.0.0-dev.infinity sdk constraint in pubspecs Created 3 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // This is a generated file (see the discoveryapis_generator project). 1 // This is a generated file (see the discoveryapis_generator project).
2 2
3 library googleapis_beta.language.v1beta1; 3 library googleapis_beta.language.v1beta1;
4 4
5 import 'dart:core' as core; 5 import 'dart:core' as core;
6 import 'dart:async' as async; 6 import 'dart:async' as async;
7 import 'dart:convert' as convert; 7 import 'dart:convert' as convert;
8 8
9 import 'package:_discoveryapis_commons/_discoveryapis_commons.dart' as commons; 9 import 'package:_discoveryapis_commons/_discoveryapis_commons.dart' as commons;
10 import 'package:http/http.dart' as http; 10 import 'package:http/http.dart' as http;
11 11
12 export 'package:_discoveryapis_commons/_discoveryapis_commons.dart' show 12 export 'package:_discoveryapis_commons/_discoveryapis_commons.dart'
13 ApiRequestError, DetailedApiRequestError; 13 show ApiRequestError, DetailedApiRequestError;
14 14
15 const core.String USER_AGENT = 'dart-api-client language/v1beta1'; 15 const core.String USER_AGENT = 'dart-api-client language/v1beta1';
16 16
17 /** 17 /// Provides natural language understanding technologies to developers.
18 * Provides natural language understanding technologies to developers. Examples 18 /// Examples include sentiment analysis, entity recognition, entity sentiment
19 * include sentiment analysis, entity recognition, entity sentiment analysis, 19 /// analysis, and text annotations.
20 * and text annotations.
21 */
22 class LanguageApi { 20 class LanguageApi {
23 /** 21 /// Apply machine learning models to reveal the structure and meaning of text
24 * Apply machine learning models to reveal the structure and meaning of text 22 static const CloudLanguageScope =
25 */ 23 "https://www.googleapis.com/auth/cloud-language";
26 static const CloudLanguageScope = "https://www.googleapis.com/auth/cloud-langu age";
27 24
28 /** View and manage your data across Google Cloud Platform services */ 25 /// View and manage your data across Google Cloud Platform services
29 static const CloudPlatformScope = "https://www.googleapis.com/auth/cloud-platf orm"; 26 static const CloudPlatformScope =
30 27 "https://www.googleapis.com/auth/cloud-platform";
31 28
32 final commons.ApiRequester _requester; 29 final commons.ApiRequester _requester;
33 30
34 DocumentsResourceApi get documents => new DocumentsResourceApi(_requester); 31 DocumentsResourceApi get documents => new DocumentsResourceApi(_requester);
35 32
36 LanguageApi(http.Client client, {core.String rootUrl: "https://language.google apis.com/", core.String servicePath: ""}) : 33 LanguageApi(http.Client client,
37 _requester = new commons.ApiRequester(client, rootUrl, servicePath, USER_A GENT); 34 {core.String rootUrl: "https://language.googleapis.com/",
35 core.String servicePath: ""})
36 : _requester =
37 new commons.ApiRequester(client, rootUrl, servicePath, USER_AGENT);
38 } 38 }
39 39
40
41 class DocumentsResourceApi { 40 class DocumentsResourceApi {
42 final commons.ApiRequester _requester; 41 final commons.ApiRequester _requester;
43 42
44 DocumentsResourceApi(commons.ApiRequester client) : 43 DocumentsResourceApi(commons.ApiRequester client) : _requester = client;
45 _requester = client;
46 44
47 /** 45 /// Finds named entities (currently proper names and common nouns) in the
48 * Finds named entities (currently proper names and common nouns) in the text 46 /// text
49 * along with entity types, salience, mentions for each entity, and 47 /// along with entity types, salience, mentions for each entity, and
50 * other properties. 48 /// other properties.
51 * 49 ///
52 * [request] - The metadata request object. 50 /// [request] - The metadata request object.
53 * 51 ///
54 * Request parameters: 52 /// Request parameters:
55 * 53 ///
56 * Completes with a [AnalyzeEntitiesResponse]. 54 /// Completes with a [AnalyzeEntitiesResponse].
57 * 55 ///
58 * Completes with a [commons.ApiRequestError] if the API endpoint returned an 56 /// Completes with a [commons.ApiRequestError] if the API endpoint returned
59 * error. 57 /// an error.
60 * 58 ///
61 * If the used [http.Client] completes with an error when making a REST call, 59 /// If the used [http.Client] completes with an error when making a REST
62 * this method will complete with the same error. 60 /// call, this method will complete with the same error.
63 */ 61 async.Future<AnalyzeEntitiesResponse> analyzeEntities(
64 async.Future<AnalyzeEntitiesResponse> analyzeEntities(AnalyzeEntitiesRequest r equest) { 62 AnalyzeEntitiesRequest request) {
65 var _url = null; 63 var _url = null;
66 var _queryParams = new core.Map(); 64 var _queryParams = new core.Map();
67 var _uploadMedia = null; 65 var _uploadMedia = null;
68 var _uploadOptions = null; 66 var _uploadOptions = null;
69 var _downloadOptions = commons.DownloadOptions.Metadata; 67 var _downloadOptions = commons.DownloadOptions.Metadata;
70 var _body = null; 68 var _body = null;
71 69
72 if (request != null) { 70 if (request != null) {
73 _body = convert.JSON.encode((request).toJson()); 71 _body = convert.JSON.encode((request).toJson());
74 } 72 }
75 73
76 _url = 'v1beta1/documents:analyzeEntities'; 74 _url = 'v1beta1/documents:analyzeEntities';
77 75
78 var _response = _requester.request(_url, 76 var _response = _requester.request(_url, "POST",
79 "POST", 77 body: _body,
80 body: _body, 78 queryParams: _queryParams,
81 queryParams: _queryParams, 79 uploadOptions: _uploadOptions,
82 uploadOptions: _uploadOptions, 80 uploadMedia: _uploadMedia,
83 uploadMedia: _uploadMedia, 81 downloadOptions: _downloadOptions);
84 downloadOptions: _downloadOptions);
85 return _response.then((data) => new AnalyzeEntitiesResponse.fromJson(data)); 82 return _response.then((data) => new AnalyzeEntitiesResponse.fromJson(data));
86 } 83 }
87 84
88 /** 85 /// Analyzes the sentiment of the provided text.
89 * Analyzes the sentiment of the provided text. 86 ///
90 * 87 /// [request] - The metadata request object.
91 * [request] - The metadata request object. 88 ///
92 * 89 /// Request parameters:
93 * Request parameters: 90 ///
94 * 91 /// Completes with a [AnalyzeSentimentResponse].
95 * Completes with a [AnalyzeSentimentResponse]. 92 ///
96 * 93 /// Completes with a [commons.ApiRequestError] if the API endpoint returned
97 * Completes with a [commons.ApiRequestError] if the API endpoint returned an 94 /// an error.
98 * error. 95 ///
99 * 96 /// If the used [http.Client] completes with an error when making a REST
100 * If the used [http.Client] completes with an error when making a REST call, 97 /// call, this method will complete with the same error.
101 * this method will complete with the same error. 98 async.Future<AnalyzeSentimentResponse> analyzeSentiment(
102 */ 99 AnalyzeSentimentRequest request) {
103 async.Future<AnalyzeSentimentResponse> analyzeSentiment(AnalyzeSentimentReques t request) {
104 var _url = null; 100 var _url = null;
105 var _queryParams = new core.Map(); 101 var _queryParams = new core.Map();
106 var _uploadMedia = null; 102 var _uploadMedia = null;
107 var _uploadOptions = null; 103 var _uploadOptions = null;
108 var _downloadOptions = commons.DownloadOptions.Metadata; 104 var _downloadOptions = commons.DownloadOptions.Metadata;
109 var _body = null; 105 var _body = null;
110 106
111 if (request != null) { 107 if (request != null) {
112 _body = convert.JSON.encode((request).toJson()); 108 _body = convert.JSON.encode((request).toJson());
113 } 109 }
114 110
115 _url = 'v1beta1/documents:analyzeSentiment'; 111 _url = 'v1beta1/documents:analyzeSentiment';
116 112
117 var _response = _requester.request(_url, 113 var _response = _requester.request(_url, "POST",
118 "POST", 114 body: _body,
119 body: _body, 115 queryParams: _queryParams,
120 queryParams: _queryParams, 116 uploadOptions: _uploadOptions,
121 uploadOptions: _uploadOptions, 117 uploadMedia: _uploadMedia,
122 uploadMedia: _uploadMedia, 118 downloadOptions: _downloadOptions);
123 downloadOptions: _downloadOptions); 119 return _response
124 return _response.then((data) => new AnalyzeSentimentResponse.fromJson(data)) ; 120 .then((data) => new AnalyzeSentimentResponse.fromJson(data));
125 } 121 }
126 122
127 /** 123 /// Analyzes the syntax of the text and provides sentence boundaries and
128 * Analyzes the syntax of the text and provides sentence boundaries and 124 /// tokenization along with part of speech tags, dependency trees, and other
129 * tokenization along with part of speech tags, dependency trees, and other 125 /// properties.
130 * properties. 126 ///
131 * 127 /// [request] - The metadata request object.
132 * [request] - The metadata request object. 128 ///
133 * 129 /// Request parameters:
134 * Request parameters: 130 ///
135 * 131 /// Completes with a [AnalyzeSyntaxResponse].
136 * Completes with a [AnalyzeSyntaxResponse]. 132 ///
137 * 133 /// Completes with a [commons.ApiRequestError] if the API endpoint returned
138 * Completes with a [commons.ApiRequestError] if the API endpoint returned an 134 /// an error.
139 * error. 135 ///
140 * 136 /// If the used [http.Client] completes with an error when making a REST
141 * If the used [http.Client] completes with an error when making a REST call, 137 /// call, this method will complete with the same error.
142 * this method will complete with the same error. 138 async.Future<AnalyzeSyntaxResponse> analyzeSyntax(
143 */ 139 AnalyzeSyntaxRequest request) {
144 async.Future<AnalyzeSyntaxResponse> analyzeSyntax(AnalyzeSyntaxRequest request ) {
145 var _url = null; 140 var _url = null;
146 var _queryParams = new core.Map(); 141 var _queryParams = new core.Map();
147 var _uploadMedia = null; 142 var _uploadMedia = null;
148 var _uploadOptions = null; 143 var _uploadOptions = null;
149 var _downloadOptions = commons.DownloadOptions.Metadata; 144 var _downloadOptions = commons.DownloadOptions.Metadata;
150 var _body = null; 145 var _body = null;
151 146
152 if (request != null) { 147 if (request != null) {
153 _body = convert.JSON.encode((request).toJson()); 148 _body = convert.JSON.encode((request).toJson());
154 } 149 }
155 150
156 _url = 'v1beta1/documents:analyzeSyntax'; 151 _url = 'v1beta1/documents:analyzeSyntax';
157 152
158 var _response = _requester.request(_url, 153 var _response = _requester.request(_url, "POST",
159 "POST", 154 body: _body,
160 body: _body, 155 queryParams: _queryParams,
161 queryParams: _queryParams, 156 uploadOptions: _uploadOptions,
162 uploadOptions: _uploadOptions, 157 uploadMedia: _uploadMedia,
163 uploadMedia: _uploadMedia, 158 downloadOptions: _downloadOptions);
164 downloadOptions: _downloadOptions);
165 return _response.then((data) => new AnalyzeSyntaxResponse.fromJson(data)); 159 return _response.then((data) => new AnalyzeSyntaxResponse.fromJson(data));
166 } 160 }
167 161
168 /** 162 /// A convenience method that provides all the features that
169 * A convenience method that provides all the features that analyzeSentiment, 163 /// analyzeSentiment,
170 * analyzeEntities, and analyzeSyntax provide in one call. 164 /// analyzeEntities, and analyzeSyntax provide in one call.
171 * 165 ///
172 * [request] - The metadata request object. 166 /// [request] - The metadata request object.
173 * 167 ///
174 * Request parameters: 168 /// Request parameters:
175 * 169 ///
176 * Completes with a [AnnotateTextResponse]. 170 /// Completes with a [AnnotateTextResponse].
177 * 171 ///
178 * Completes with a [commons.ApiRequestError] if the API endpoint returned an 172 /// Completes with a [commons.ApiRequestError] if the API endpoint returned
179 * error. 173 /// an error.
180 * 174 ///
181 * If the used [http.Client] completes with an error when making a REST call, 175 /// If the used [http.Client] completes with an error when making a REST
182 * this method will complete with the same error. 176 /// call, this method will complete with the same error.
183 */
184 async.Future<AnnotateTextResponse> annotateText(AnnotateTextRequest request) { 177 async.Future<AnnotateTextResponse> annotateText(AnnotateTextRequest request) {
185 var _url = null; 178 var _url = null;
186 var _queryParams = new core.Map(); 179 var _queryParams = new core.Map();
187 var _uploadMedia = null; 180 var _uploadMedia = null;
188 var _uploadOptions = null; 181 var _uploadOptions = null;
189 var _downloadOptions = commons.DownloadOptions.Metadata; 182 var _downloadOptions = commons.DownloadOptions.Metadata;
190 var _body = null; 183 var _body = null;
191 184
192 if (request != null) { 185 if (request != null) {
193 _body = convert.JSON.encode((request).toJson()); 186 _body = convert.JSON.encode((request).toJson());
194 } 187 }
195 188
196 _url = 'v1beta1/documents:annotateText'; 189 _url = 'v1beta1/documents:annotateText';
197 190
198 var _response = _requester.request(_url, 191 var _response = _requester.request(_url, "POST",
199 "POST", 192 body: _body,
200 body: _body, 193 queryParams: _queryParams,
201 queryParams: _queryParams, 194 uploadOptions: _uploadOptions,
202 uploadOptions: _uploadOptions, 195 uploadMedia: _uploadMedia,
203 uploadMedia: _uploadMedia, 196 downloadOptions: _downloadOptions);
204 downloadOptions: _downloadOptions);
205 return _response.then((data) => new AnnotateTextResponse.fromJson(data)); 197 return _response.then((data) => new AnnotateTextResponse.fromJson(data));
206 } 198 }
207
208 } 199 }
209 200
201 /// The entity analysis request message.
202 class AnalyzeEntitiesRequest {
203 /// Input document.
204 Document document;
210 205
211 206 /// The encoding type used by the API to calculate offsets.
212 /** The entity analysis request message. */ 207 /// Possible string values are:
213 class AnalyzeEntitiesRequest { 208 /// - "NONE" : If `EncodingType` is not specified, encoding-dependent
214 /** Input document. */ 209 /// information (such as
215 Document document; 210 /// `begin_offset`) will be set at `-1`.
216 /** 211 /// - "UTF8" : Encoding-dependent information (such as `begin_offset`) is
217 * The encoding type used by the API to calculate offsets. 212 /// calculated based
218 * Possible string values are: 213 /// on the UTF-8 encoding of the input. C++ and Go are examples of languages
219 * - "NONE" : If `EncodingType` is not specified, encoding-dependent 214 /// that use this encoding natively.
220 * information (such as 215 /// - "UTF16" : Encoding-dependent information (such as `begin_offset`) is
221 * `begin_offset`) will be set at `-1`. 216 /// calculated based
222 * - "UTF8" : Encoding-dependent information (such as `begin_offset`) is 217 /// on the UTF-16 encoding of the input. Java and Javascript are examples of
223 * calculated based 218 /// languages that use this encoding natively.
224 * on the UTF-8 encoding of the input. C++ and Go are examples of languages 219 /// - "UTF32" : Encoding-dependent information (such as `begin_offset`) is
225 * that use this encoding natively. 220 /// calculated based
226 * - "UTF16" : Encoding-dependent information (such as `begin_offset`) is 221 /// on the UTF-32 encoding of the input. Python is an example of a language
227 * calculated based 222 /// that uses this encoding natively.
228 * on the UTF-16 encoding of the input. Java and Javascript are examples of
229 * languages that use this encoding natively.
230 * - "UTF32" : Encoding-dependent information (such as `begin_offset`) is
231 * calculated based
232 * on the UTF-32 encoding of the input. Python is an example of a language
233 * that uses this encoding natively.
234 */
235 core.String encodingType; 223 core.String encodingType;
236 224
237 AnalyzeEntitiesRequest(); 225 AnalyzeEntitiesRequest();
238 226
239 AnalyzeEntitiesRequest.fromJson(core.Map _json) { 227 AnalyzeEntitiesRequest.fromJson(core.Map _json) {
240 if (_json.containsKey("document")) { 228 if (_json.containsKey("document")) {
241 document = new Document.fromJson(_json["document"]); 229 document = new Document.fromJson(_json["document"]);
242 } 230 }
243 if (_json.containsKey("encodingType")) { 231 if (_json.containsKey("encodingType")) {
244 encodingType = _json["encodingType"]; 232 encodingType = _json["encodingType"];
245 } 233 }
246 } 234 }
247 235
248 core.Map<core.String, core.Object> toJson() { 236 core.Map<core.String, core.Object> toJson() {
249 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 237 final core.Map<core.String, core.Object> _json =
238 new core.Map<core.String, core.Object>();
250 if (document != null) { 239 if (document != null) {
251 _json["document"] = (document).toJson(); 240 _json["document"] = (document).toJson();
252 } 241 }
253 if (encodingType != null) { 242 if (encodingType != null) {
254 _json["encodingType"] = encodingType; 243 _json["encodingType"] = encodingType;
255 } 244 }
256 return _json; 245 return _json;
257 } 246 }
258 } 247 }
259 248
260 /** The entity analysis response message. */ 249 /// The entity analysis response message.
261 class AnalyzeEntitiesResponse { 250 class AnalyzeEntitiesResponse {
262 /** The recognized entities in the input document. */ 251 /// The recognized entities in the input document.
263 core.List<Entity> entities; 252 core.List<Entity> entities;
264 /** 253
265 * The language of the text, which will be the same as the language specified 254 /// The language of the text, which will be the same as the language
266 * in the request or, if not specified, the automatically-detected language. 255 /// specified
267 * See Document.language field for more details. 256 /// in the request or, if not specified, the automatically-detected language.
268 */ 257 /// See Document.language field for more details.
269 core.String language; 258 core.String language;
270 259
271 AnalyzeEntitiesResponse(); 260 AnalyzeEntitiesResponse();
272 261
273 AnalyzeEntitiesResponse.fromJson(core.Map _json) { 262 AnalyzeEntitiesResponse.fromJson(core.Map _json) {
274 if (_json.containsKey("entities")) { 263 if (_json.containsKey("entities")) {
275 entities = _json["entities"].map((value) => new Entity.fromJson(value)).to List(); 264 entities =
265 _json["entities"].map((value) => new Entity.fromJson(value)).toList();
276 } 266 }
277 if (_json.containsKey("language")) { 267 if (_json.containsKey("language")) {
278 language = _json["language"]; 268 language = _json["language"];
279 } 269 }
280 } 270 }
281 271
282 core.Map<core.String, core.Object> toJson() { 272 core.Map<core.String, core.Object> toJson() {
283 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 273 final core.Map<core.String, core.Object> _json =
274 new core.Map<core.String, core.Object>();
284 if (entities != null) { 275 if (entities != null) {
285 _json["entities"] = entities.map((value) => (value).toJson()).toList(); 276 _json["entities"] = entities.map((value) => (value).toJson()).toList();
286 } 277 }
287 if (language != null) { 278 if (language != null) {
288 _json["language"] = language; 279 _json["language"] = language;
289 } 280 }
290 return _json; 281 return _json;
291 } 282 }
292 } 283 }
293 284
294 /** The sentiment analysis request message. */ 285 /// The sentiment analysis request message.
295 class AnalyzeSentimentRequest { 286 class AnalyzeSentimentRequest {
296 /** Input document. */ 287 /// Input document.
297 Document document; 288 Document document;
298 /** 289
299 * The encoding type used by the API to calculate sentence offsets for the 290 /// The encoding type used by the API to calculate sentence offsets for the
300 * sentence sentiment. 291 /// sentence sentiment.
301 * Possible string values are: 292 /// Possible string values are:
302 * - "NONE" : If `EncodingType` is not specified, encoding-dependent 293 /// - "NONE" : If `EncodingType` is not specified, encoding-dependent
303 * information (such as 294 /// information (such as
304 * `begin_offset`) will be set at `-1`. 295 /// `begin_offset`) will be set at `-1`.
305 * - "UTF8" : Encoding-dependent information (such as `begin_offset`) is 296 /// - "UTF8" : Encoding-dependent information (such as `begin_offset`) is
306 * calculated based 297 /// calculated based
307 * on the UTF-8 encoding of the input. C++ and Go are examples of languages 298 /// on the UTF-8 encoding of the input. C++ and Go are examples of languages
308 * that use this encoding natively. 299 /// that use this encoding natively.
309 * - "UTF16" : Encoding-dependent information (such as `begin_offset`) is 300 /// - "UTF16" : Encoding-dependent information (such as `begin_offset`) is
310 * calculated based 301 /// calculated based
311 * on the UTF-16 encoding of the input. Java and Javascript are examples of 302 /// on the UTF-16 encoding of the input. Java and Javascript are examples of
312 * languages that use this encoding natively. 303 /// languages that use this encoding natively.
313 * - "UTF32" : Encoding-dependent information (such as `begin_offset`) is 304 /// - "UTF32" : Encoding-dependent information (such as `begin_offset`) is
314 * calculated based 305 /// calculated based
315 * on the UTF-32 encoding of the input. Python is an example of a language 306 /// on the UTF-32 encoding of the input. Python is an example of a language
316 * that uses this encoding natively. 307 /// that uses this encoding natively.
317 */
318 core.String encodingType; 308 core.String encodingType;
319 309
320 AnalyzeSentimentRequest(); 310 AnalyzeSentimentRequest();
321 311
322 AnalyzeSentimentRequest.fromJson(core.Map _json) { 312 AnalyzeSentimentRequest.fromJson(core.Map _json) {
323 if (_json.containsKey("document")) { 313 if (_json.containsKey("document")) {
324 document = new Document.fromJson(_json["document"]); 314 document = new Document.fromJson(_json["document"]);
325 } 315 }
326 if (_json.containsKey("encodingType")) { 316 if (_json.containsKey("encodingType")) {
327 encodingType = _json["encodingType"]; 317 encodingType = _json["encodingType"];
328 } 318 }
329 } 319 }
330 320
331 core.Map<core.String, core.Object> toJson() { 321 core.Map<core.String, core.Object> toJson() {
332 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 322 final core.Map<core.String, core.Object> _json =
323 new core.Map<core.String, core.Object>();
333 if (document != null) { 324 if (document != null) {
334 _json["document"] = (document).toJson(); 325 _json["document"] = (document).toJson();
335 } 326 }
336 if (encodingType != null) { 327 if (encodingType != null) {
337 _json["encodingType"] = encodingType; 328 _json["encodingType"] = encodingType;
338 } 329 }
339 return _json; 330 return _json;
340 } 331 }
341 } 332 }
342 333
343 /** The sentiment analysis response message. */ 334 /// The sentiment analysis response message.
344 class AnalyzeSentimentResponse { 335 class AnalyzeSentimentResponse {
345 /** The overall sentiment of the input document. */ 336 /// The overall sentiment of the input document.
346 Sentiment documentSentiment; 337 Sentiment documentSentiment;
347 /** 338
348 * The language of the text, which will be the same as the language specified 339 /// The language of the text, which will be the same as the language
349 * in the request or, if not specified, the automatically-detected language. 340 /// specified
350 * See Document.language field for more details. 341 /// in the request or, if not specified, the automatically-detected language.
351 */ 342 /// See Document.language field for more details.
352 core.String language; 343 core.String language;
353 /** The sentiment for all the sentences in the document. */ 344
345 /// The sentiment for all the sentences in the document.
354 core.List<Sentence> sentences; 346 core.List<Sentence> sentences;
355 347
356 AnalyzeSentimentResponse(); 348 AnalyzeSentimentResponse();
357 349
358 AnalyzeSentimentResponse.fromJson(core.Map _json) { 350 AnalyzeSentimentResponse.fromJson(core.Map _json) {
359 if (_json.containsKey("documentSentiment")) { 351 if (_json.containsKey("documentSentiment")) {
360 documentSentiment = new Sentiment.fromJson(_json["documentSentiment"]); 352 documentSentiment = new Sentiment.fromJson(_json["documentSentiment"]);
361 } 353 }
362 if (_json.containsKey("language")) { 354 if (_json.containsKey("language")) {
363 language = _json["language"]; 355 language = _json["language"];
364 } 356 }
365 if (_json.containsKey("sentences")) { 357 if (_json.containsKey("sentences")) {
366 sentences = _json["sentences"].map((value) => new Sentence.fromJson(value) ).toList(); 358 sentences = _json["sentences"]
359 .map((value) => new Sentence.fromJson(value))
360 .toList();
367 } 361 }
368 } 362 }
369 363
370 core.Map<core.String, core.Object> toJson() { 364 core.Map<core.String, core.Object> toJson() {
371 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 365 final core.Map<core.String, core.Object> _json =
366 new core.Map<core.String, core.Object>();
372 if (documentSentiment != null) { 367 if (documentSentiment != null) {
373 _json["documentSentiment"] = (documentSentiment).toJson(); 368 _json["documentSentiment"] = (documentSentiment).toJson();
374 } 369 }
375 if (language != null) { 370 if (language != null) {
376 _json["language"] = language; 371 _json["language"] = language;
377 } 372 }
378 if (sentences != null) { 373 if (sentences != null) {
379 _json["sentences"] = sentences.map((value) => (value).toJson()).toList(); 374 _json["sentences"] = sentences.map((value) => (value).toJson()).toList();
380 } 375 }
381 return _json; 376 return _json;
382 } 377 }
383 } 378 }
384 379
385 /** The syntax analysis request message. */ 380 /// The syntax analysis request message.
386 class AnalyzeSyntaxRequest { 381 class AnalyzeSyntaxRequest {
387 /** Input document. */ 382 /// Input document.
388 Document document; 383 Document document;
389 /** 384
390 * The encoding type used by the API to calculate offsets. 385 /// The encoding type used by the API to calculate offsets.
391 * Possible string values are: 386 /// Possible string values are:
392 * - "NONE" : If `EncodingType` is not specified, encoding-dependent 387 /// - "NONE" : If `EncodingType` is not specified, encoding-dependent
393 * information (such as 388 /// information (such as
394 * `begin_offset`) will be set at `-1`. 389 /// `begin_offset`) will be set at `-1`.
395 * - "UTF8" : Encoding-dependent information (such as `begin_offset`) is 390 /// - "UTF8" : Encoding-dependent information (such as `begin_offset`) is
396 * calculated based 391 /// calculated based
397 * on the UTF-8 encoding of the input. C++ and Go are examples of languages 392 /// on the UTF-8 encoding of the input. C++ and Go are examples of languages
398 * that use this encoding natively. 393 /// that use this encoding natively.
399 * - "UTF16" : Encoding-dependent information (such as `begin_offset`) is 394 /// - "UTF16" : Encoding-dependent information (such as `begin_offset`) is
400 * calculated based 395 /// calculated based
401 * on the UTF-16 encoding of the input. Java and Javascript are examples of 396 /// on the UTF-16 encoding of the input. Java and Javascript are examples of
402 * languages that use this encoding natively. 397 /// languages that use this encoding natively.
403 * - "UTF32" : Encoding-dependent information (such as `begin_offset`) is 398 /// - "UTF32" : Encoding-dependent information (such as `begin_offset`) is
404 * calculated based 399 /// calculated based
405 * on the UTF-32 encoding of the input. Python is an example of a language 400 /// on the UTF-32 encoding of the input. Python is an example of a language
406 * that uses this encoding natively. 401 /// that uses this encoding natively.
407 */
408 core.String encodingType; 402 core.String encodingType;
409 403
410 AnalyzeSyntaxRequest(); 404 AnalyzeSyntaxRequest();
411 405
412 AnalyzeSyntaxRequest.fromJson(core.Map _json) { 406 AnalyzeSyntaxRequest.fromJson(core.Map _json) {
413 if (_json.containsKey("document")) { 407 if (_json.containsKey("document")) {
414 document = new Document.fromJson(_json["document"]); 408 document = new Document.fromJson(_json["document"]);
415 } 409 }
416 if (_json.containsKey("encodingType")) { 410 if (_json.containsKey("encodingType")) {
417 encodingType = _json["encodingType"]; 411 encodingType = _json["encodingType"];
418 } 412 }
419 } 413 }
420 414
421 core.Map<core.String, core.Object> toJson() { 415 core.Map<core.String, core.Object> toJson() {
422 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 416 final core.Map<core.String, core.Object> _json =
417 new core.Map<core.String, core.Object>();
423 if (document != null) { 418 if (document != null) {
424 _json["document"] = (document).toJson(); 419 _json["document"] = (document).toJson();
425 } 420 }
426 if (encodingType != null) { 421 if (encodingType != null) {
427 _json["encodingType"] = encodingType; 422 _json["encodingType"] = encodingType;
428 } 423 }
429 return _json; 424 return _json;
430 } 425 }
431 } 426 }
432 427
433 /** The syntax analysis response message. */ 428 /// The syntax analysis response message.
434 class AnalyzeSyntaxResponse { 429 class AnalyzeSyntaxResponse {
435 /** 430 /// The language of the text, which will be the same as the language
436 * The language of the text, which will be the same as the language specified 431 /// specified
437 * in the request or, if not specified, the automatically-detected language. 432 /// in the request or, if not specified, the automatically-detected language.
438 * See Document.language field for more details. 433 /// See Document.language field for more details.
439 */
440 core.String language; 434 core.String language;
441 /** Sentences in the input document. */ 435
436 /// Sentences in the input document.
442 core.List<Sentence> sentences; 437 core.List<Sentence> sentences;
443 /** Tokens, along with their syntactic information, in the input document. */ 438
439 /// Tokens, along with their syntactic information, in the input document.
444 core.List<Token> tokens; 440 core.List<Token> tokens;
445 441
446 AnalyzeSyntaxResponse(); 442 AnalyzeSyntaxResponse();
447 443
448 AnalyzeSyntaxResponse.fromJson(core.Map _json) { 444 AnalyzeSyntaxResponse.fromJson(core.Map _json) {
449 if (_json.containsKey("language")) { 445 if (_json.containsKey("language")) {
450 language = _json["language"]; 446 language = _json["language"];
451 } 447 }
452 if (_json.containsKey("sentences")) { 448 if (_json.containsKey("sentences")) {
453 sentences = _json["sentences"].map((value) => new Sentence.fromJson(value) ).toList(); 449 sentences = _json["sentences"]
450 .map((value) => new Sentence.fromJson(value))
451 .toList();
454 } 452 }
455 if (_json.containsKey("tokens")) { 453 if (_json.containsKey("tokens")) {
456 tokens = _json["tokens"].map((value) => new Token.fromJson(value)).toList( ); 454 tokens =
455 _json["tokens"].map((value) => new Token.fromJson(value)).toList();
457 } 456 }
458 } 457 }
459 458
460 core.Map<core.String, core.Object> toJson() { 459 core.Map<core.String, core.Object> toJson() {
461 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 460 final core.Map<core.String, core.Object> _json =
461 new core.Map<core.String, core.Object>();
462 if (language != null) { 462 if (language != null) {
463 _json["language"] = language; 463 _json["language"] = language;
464 } 464 }
465 if (sentences != null) { 465 if (sentences != null) {
466 _json["sentences"] = sentences.map((value) => (value).toJson()).toList(); 466 _json["sentences"] = sentences.map((value) => (value).toJson()).toList();
467 } 467 }
468 if (tokens != null) { 468 if (tokens != null) {
469 _json["tokens"] = tokens.map((value) => (value).toJson()).toList(); 469 _json["tokens"] = tokens.map((value) => (value).toJson()).toList();
470 } 470 }
471 return _json; 471 return _json;
472 } 472 }
473 } 473 }
474 474
475 /** 475 /// The request message for the text annotation API, which can perform multiple
476 * The request message for the text annotation API, which can perform multiple 476 /// analysis types (sentiment, entities, and syntax) in one call.
477 * analysis types (sentiment, entities, and syntax) in one call.
478 */
479 class AnnotateTextRequest { 477 class AnnotateTextRequest {
480 /** Input document. */ 478 /// Input document.
481 Document document; 479 Document document;
482 /** 480
483 * The encoding type used by the API to calculate offsets. 481 /// The encoding type used by the API to calculate offsets.
484 * Possible string values are: 482 /// Possible string values are:
485 * - "NONE" : If `EncodingType` is not specified, encoding-dependent 483 /// - "NONE" : If `EncodingType` is not specified, encoding-dependent
486 * information (such as 484 /// information (such as
487 * `begin_offset`) will be set at `-1`. 485 /// `begin_offset`) will be set at `-1`.
488 * - "UTF8" : Encoding-dependent information (such as `begin_offset`) is 486 /// - "UTF8" : Encoding-dependent information (such as `begin_offset`) is
489 * calculated based 487 /// calculated based
490 * on the UTF-8 encoding of the input. C++ and Go are examples of languages 488 /// on the UTF-8 encoding of the input. C++ and Go are examples of languages
491 * that use this encoding natively. 489 /// that use this encoding natively.
492 * - "UTF16" : Encoding-dependent information (such as `begin_offset`) is 490 /// - "UTF16" : Encoding-dependent information (such as `begin_offset`) is
493 * calculated based 491 /// calculated based
494 * on the UTF-16 encoding of the input. Java and Javascript are examples of 492 /// on the UTF-16 encoding of the input. Java and Javascript are examples of
495 * languages that use this encoding natively. 493 /// languages that use this encoding natively.
496 * - "UTF32" : Encoding-dependent information (such as `begin_offset`) is 494 /// - "UTF32" : Encoding-dependent information (such as `begin_offset`) is
497 * calculated based 495 /// calculated based
498 * on the UTF-32 encoding of the input. Python is an example of a language 496 /// on the UTF-32 encoding of the input. Python is an example of a language
499 * that uses this encoding natively. 497 /// that uses this encoding natively.
500 */
501 core.String encodingType; 498 core.String encodingType;
502 /** The enabled features. */ 499
500 /// The enabled features.
503 Features features; 501 Features features;
504 502
505 AnnotateTextRequest(); 503 AnnotateTextRequest();
506 504
507 AnnotateTextRequest.fromJson(core.Map _json) { 505 AnnotateTextRequest.fromJson(core.Map _json) {
508 if (_json.containsKey("document")) { 506 if (_json.containsKey("document")) {
509 document = new Document.fromJson(_json["document"]); 507 document = new Document.fromJson(_json["document"]);
510 } 508 }
511 if (_json.containsKey("encodingType")) { 509 if (_json.containsKey("encodingType")) {
512 encodingType = _json["encodingType"]; 510 encodingType = _json["encodingType"];
513 } 511 }
514 if (_json.containsKey("features")) { 512 if (_json.containsKey("features")) {
515 features = new Features.fromJson(_json["features"]); 513 features = new Features.fromJson(_json["features"]);
516 } 514 }
517 } 515 }
518 516
519 core.Map<core.String, core.Object> toJson() { 517 core.Map<core.String, core.Object> toJson() {
520 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 518 final core.Map<core.String, core.Object> _json =
519 new core.Map<core.String, core.Object>();
521 if (document != null) { 520 if (document != null) {
522 _json["document"] = (document).toJson(); 521 _json["document"] = (document).toJson();
523 } 522 }
524 if (encodingType != null) { 523 if (encodingType != null) {
525 _json["encodingType"] = encodingType; 524 _json["encodingType"] = encodingType;
526 } 525 }
527 if (features != null) { 526 if (features != null) {
528 _json["features"] = (features).toJson(); 527 _json["features"] = (features).toJson();
529 } 528 }
530 return _json; 529 return _json;
531 } 530 }
532 } 531 }
533 532
534 /** The text annotations response message. */ 533 /// The text annotations response message.
535 class AnnotateTextResponse { 534 class AnnotateTextResponse {
536 /** 535 /// The overall sentiment for the document. Populated if the user enables
537 * The overall sentiment for the document. Populated if the user enables 536 /// AnnotateTextRequest.Features.extract_document_sentiment.
538 * AnnotateTextRequest.Features.extract_document_sentiment.
539 */
540 Sentiment documentSentiment; 537 Sentiment documentSentiment;
541 /** 538
542 * Entities, along with their semantic information, in the input document. 539 /// Entities, along with their semantic information, in the input document.
543 * Populated if the user enables 540 /// Populated if the user enables
544 * AnnotateTextRequest.Features.extract_entities. 541 /// AnnotateTextRequest.Features.extract_entities.
545 */
546 core.List<Entity> entities; 542 core.List<Entity> entities;
547 /** 543
548 * The language of the text, which will be the same as the language specified 544 /// The language of the text, which will be the same as the language
549 * in the request or, if not specified, the automatically-detected language. 545 /// specified
550 * See Document.language field for more details. 546 /// in the request or, if not specified, the automatically-detected language.
551 */ 547 /// See Document.language field for more details.
552 core.String language; 548 core.String language;
553 /** 549
554 * Sentences in the input document. Populated if the user enables 550 /// Sentences in the input document. Populated if the user enables
555 * AnnotateTextRequest.Features.extract_syntax. 551 /// AnnotateTextRequest.Features.extract_syntax.
556 */
557 core.List<Sentence> sentences; 552 core.List<Sentence> sentences;
558 /** 553
559 * Tokens, along with their syntactic information, in the input document. 554 /// Tokens, along with their syntactic information, in the input document.
560 * Populated if the user enables 555 /// Populated if the user enables
561 * AnnotateTextRequest.Features.extract_syntax. 556 /// AnnotateTextRequest.Features.extract_syntax.
562 */
563 core.List<Token> tokens; 557 core.List<Token> tokens;
564 558
565 AnnotateTextResponse(); 559 AnnotateTextResponse();
566 560
567 AnnotateTextResponse.fromJson(core.Map _json) { 561 AnnotateTextResponse.fromJson(core.Map _json) {
568 if (_json.containsKey("documentSentiment")) { 562 if (_json.containsKey("documentSentiment")) {
569 documentSentiment = new Sentiment.fromJson(_json["documentSentiment"]); 563 documentSentiment = new Sentiment.fromJson(_json["documentSentiment"]);
570 } 564 }
571 if (_json.containsKey("entities")) { 565 if (_json.containsKey("entities")) {
572 entities = _json["entities"].map((value) => new Entity.fromJson(value)).to List(); 566 entities =
567 _json["entities"].map((value) => new Entity.fromJson(value)).toList();
573 } 568 }
574 if (_json.containsKey("language")) { 569 if (_json.containsKey("language")) {
575 language = _json["language"]; 570 language = _json["language"];
576 } 571 }
577 if (_json.containsKey("sentences")) { 572 if (_json.containsKey("sentences")) {
578 sentences = _json["sentences"].map((value) => new Sentence.fromJson(value) ).toList(); 573 sentences = _json["sentences"]
574 .map((value) => new Sentence.fromJson(value))
575 .toList();
579 } 576 }
580 if (_json.containsKey("tokens")) { 577 if (_json.containsKey("tokens")) {
581 tokens = _json["tokens"].map((value) => new Token.fromJson(value)).toList( ); 578 tokens =
579 _json["tokens"].map((value) => new Token.fromJson(value)).toList();
582 } 580 }
583 } 581 }
584 582
585 core.Map<core.String, core.Object> toJson() { 583 core.Map<core.String, core.Object> toJson() {
586 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 584 final core.Map<core.String, core.Object> _json =
585 new core.Map<core.String, core.Object>();
587 if (documentSentiment != null) { 586 if (documentSentiment != null) {
588 _json["documentSentiment"] = (documentSentiment).toJson(); 587 _json["documentSentiment"] = (documentSentiment).toJson();
589 } 588 }
590 if (entities != null) { 589 if (entities != null) {
591 _json["entities"] = entities.map((value) => (value).toJson()).toList(); 590 _json["entities"] = entities.map((value) => (value).toJson()).toList();
592 } 591 }
593 if (language != null) { 592 if (language != null) {
594 _json["language"] = language; 593 _json["language"] = language;
595 } 594 }
596 if (sentences != null) { 595 if (sentences != null) {
597 _json["sentences"] = sentences.map((value) => (value).toJson()).toList(); 596 _json["sentences"] = sentences.map((value) => (value).toJson()).toList();
598 } 597 }
599 if (tokens != null) { 598 if (tokens != null) {
600 _json["tokens"] = tokens.map((value) => (value).toJson()).toList(); 599 _json["tokens"] = tokens.map((value) => (value).toJson()).toList();
601 } 600 }
602 return _json; 601 return _json;
603 } 602 }
604 } 603 }
605 604
606 /** Represents dependency parse tree information for a token. */ 605 /// Represents dependency parse tree information for a token.
607 class DependencyEdge { 606 class DependencyEdge {
608 /** 607 /// Represents the head of this token in the dependency tree.
609 * Represents the head of this token in the dependency tree. 608 /// This is the index of the token which has an arc going to this token.
610 * This is the index of the token which has an arc going to this token. 609 /// The index is the position of the token in the array of tokens returned
611 * The index is the position of the token in the array of tokens returned 610 /// by the API method. If this token is a root token, then the
612 * by the API method. If this token is a root token, then the 611 /// `head_token_index` is its own index.
613 * `head_token_index` is its own index.
614 */
615 core.int headTokenIndex; 612 core.int headTokenIndex;
616 /** 613
617 * The parse label for the token. 614 /// The parse label for the token.
618 * Possible string values are: 615 /// Possible string values are:
619 * - "UNKNOWN" : Unknown 616 /// - "UNKNOWN" : Unknown
620 * - "ABBREV" : Abbreviation modifier 617 /// - "ABBREV" : Abbreviation modifier
621 * - "ACOMP" : Adjectival complement 618 /// - "ACOMP" : Adjectival complement
622 * - "ADVCL" : Adverbial clause modifier 619 /// - "ADVCL" : Adverbial clause modifier
623 * - "ADVMOD" : Adverbial modifier 620 /// - "ADVMOD" : Adverbial modifier
624 * - "AMOD" : Adjectival modifier of an NP 621 /// - "AMOD" : Adjectival modifier of an NP
625 * - "APPOS" : Appositional modifier of an NP 622 /// - "APPOS" : Appositional modifier of an NP
626 * - "ATTR" : Attribute dependent of a copular verb 623 /// - "ATTR" : Attribute dependent of a copular verb
627 * - "AUX" : Auxiliary (non-main) verb 624 /// - "AUX" : Auxiliary (non-main) verb
628 * - "AUXPASS" : Passive auxiliary 625 /// - "AUXPASS" : Passive auxiliary
629 * - "CC" : Coordinating conjunction 626 /// - "CC" : Coordinating conjunction
630 * - "CCOMP" : Clausal complement of a verb or adjective 627 /// - "CCOMP" : Clausal complement of a verb or adjective
631 * - "CONJ" : Conjunct 628 /// - "CONJ" : Conjunct
632 * - "CSUBJ" : Clausal subject 629 /// - "CSUBJ" : Clausal subject
633 * - "CSUBJPASS" : Clausal passive subject 630 /// - "CSUBJPASS" : Clausal passive subject
634 * - "DEP" : Dependency (unable to determine) 631 /// - "DEP" : Dependency (unable to determine)
635 * - "DET" : Determiner 632 /// - "DET" : Determiner
636 * - "DISCOURSE" : Discourse 633 /// - "DISCOURSE" : Discourse
637 * - "DOBJ" : Direct object 634 /// - "DOBJ" : Direct object
638 * - "EXPL" : Expletive 635 /// - "EXPL" : Expletive
639 * - "GOESWITH" : Goes with (part of a word in a text not well edited) 636 /// - "GOESWITH" : Goes with (part of a word in a text not well edited)
640 * - "IOBJ" : Indirect object 637 /// - "IOBJ" : Indirect object
641 * - "MARK" : Marker (word introducing a subordinate clause) 638 /// - "MARK" : Marker (word introducing a subordinate clause)
642 * - "MWE" : Multi-word expression 639 /// - "MWE" : Multi-word expression
643 * - "MWV" : Multi-word verbal expression 640 /// - "MWV" : Multi-word verbal expression
644 * - "NEG" : Negation modifier 641 /// - "NEG" : Negation modifier
645 * - "NN" : Noun compound modifier 642 /// - "NN" : Noun compound modifier
646 * - "NPADVMOD" : Noun phrase used as an adverbial modifier 643 /// - "NPADVMOD" : Noun phrase used as an adverbial modifier
647 * - "NSUBJ" : Nominal subject 644 /// - "NSUBJ" : Nominal subject
648 * - "NSUBJPASS" : Passive nominal subject 645 /// - "NSUBJPASS" : Passive nominal subject
649 * - "NUM" : Numeric modifier of a noun 646 /// - "NUM" : Numeric modifier of a noun
650 * - "NUMBER" : Element of compound number 647 /// - "NUMBER" : Element of compound number
651 * - "P" : Punctuation mark 648 /// - "P" : Punctuation mark
652 * - "PARATAXIS" : Parataxis relation 649 /// - "PARATAXIS" : Parataxis relation
653 * - "PARTMOD" : Participial modifier 650 /// - "PARTMOD" : Participial modifier
654 * - "PCOMP" : The complement of a preposition is a clause 651 /// - "PCOMP" : The complement of a preposition is a clause
655 * - "POBJ" : Object of a preposition 652 /// - "POBJ" : Object of a preposition
656 * - "POSS" : Possession modifier 653 /// - "POSS" : Possession modifier
657 * - "POSTNEG" : Postverbal negative particle 654 /// - "POSTNEG" : Postverbal negative particle
658 * - "PRECOMP" : Predicate complement 655 /// - "PRECOMP" : Predicate complement
659 * - "PRECONJ" : Preconjunt 656 /// - "PRECONJ" : Preconjunt
660 * - "PREDET" : Predeterminer 657 /// - "PREDET" : Predeterminer
661 * - "PREF" : Prefix 658 /// - "PREF" : Prefix
662 * - "PREP" : Prepositional modifier 659 /// - "PREP" : Prepositional modifier
663 * - "PRONL" : The relationship between a verb and verbal morpheme 660 /// - "PRONL" : The relationship between a verb and verbal morpheme
664 * - "PRT" : Particle 661 /// - "PRT" : Particle
665 * - "PS" : Associative or possessive marker 662 /// - "PS" : Associative or possessive marker
666 * - "QUANTMOD" : Quantifier phrase modifier 663 /// - "QUANTMOD" : Quantifier phrase modifier
667 * - "RCMOD" : Relative clause modifier 664 /// - "RCMOD" : Relative clause modifier
668 * - "RCMODREL" : Complementizer in relative clause 665 /// - "RCMODREL" : Complementizer in relative clause
669 * - "RDROP" : Ellipsis without a preceding predicate 666 /// - "RDROP" : Ellipsis without a preceding predicate
670 * - "REF" : Referent 667 /// - "REF" : Referent
671 * - "REMNANT" : Remnant 668 /// - "REMNANT" : Remnant
672 * - "REPARANDUM" : Reparandum 669 /// - "REPARANDUM" : Reparandum
673 * - "ROOT" : Root 670 /// - "ROOT" : Root
674 * - "SNUM" : Suffix specifying a unit of number 671 /// - "SNUM" : Suffix specifying a unit of number
675 * - "SUFF" : Suffix 672 /// - "SUFF" : Suffix
676 * - "TMOD" : Temporal modifier 673 /// - "TMOD" : Temporal modifier
677 * - "TOPIC" : Topic marker 674 /// - "TOPIC" : Topic marker
678 * - "VMOD" : Clause headed by an infinite form of the verb that modifies a 675 /// - "VMOD" : Clause headed by an infinite form of the verb that modifies a
679 * noun 676 /// noun
680 * - "VOCATIVE" : Vocative 677 /// - "VOCATIVE" : Vocative
681 * - "XCOMP" : Open clausal complement 678 /// - "XCOMP" : Open clausal complement
682 * - "SUFFIX" : Name suffix 679 /// - "SUFFIX" : Name suffix
683 * - "TITLE" : Name title 680 /// - "TITLE" : Name title
684 * - "ADVPHMOD" : Adverbial phrase modifier 681 /// - "ADVPHMOD" : Adverbial phrase modifier
685 * - "AUXCAUS" : Causative auxiliary 682 /// - "AUXCAUS" : Causative auxiliary
686 * - "AUXVV" : Helper auxiliary 683 /// - "AUXVV" : Helper auxiliary
687 * - "DTMOD" : Rentaishi (Prenominal modifier) 684 /// - "DTMOD" : Rentaishi (Prenominal modifier)
688 * - "FOREIGN" : Foreign words 685 /// - "FOREIGN" : Foreign words
689 * - "KW" : Keyword 686 /// - "KW" : Keyword
690 * - "LIST" : List for chains of comparable items 687 /// - "LIST" : List for chains of comparable items
691 * - "NOMC" : Nominalized clause 688 /// - "NOMC" : Nominalized clause
692 * - "NOMCSUBJ" : Nominalized clausal subject 689 /// - "NOMCSUBJ" : Nominalized clausal subject
693 * - "NOMCSUBJPASS" : Nominalized clausal passive 690 /// - "NOMCSUBJPASS" : Nominalized clausal passive
694 * - "NUMC" : Compound of numeric modifier 691 /// - "NUMC" : Compound of numeric modifier
695 * - "COP" : Copula 692 /// - "COP" : Copula
696 * - "DISLOCATED" : Dislocated relation (for fronted/topicalized elements) 693 /// - "DISLOCATED" : Dislocated relation (for fronted/topicalized elements)
697 */
698 core.String label; 694 core.String label;
699 695
700 DependencyEdge(); 696 DependencyEdge();
701 697
702 DependencyEdge.fromJson(core.Map _json) { 698 DependencyEdge.fromJson(core.Map _json) {
703 if (_json.containsKey("headTokenIndex")) { 699 if (_json.containsKey("headTokenIndex")) {
704 headTokenIndex = _json["headTokenIndex"]; 700 headTokenIndex = _json["headTokenIndex"];
705 } 701 }
706 if (_json.containsKey("label")) { 702 if (_json.containsKey("label")) {
707 label = _json["label"]; 703 label = _json["label"];
708 } 704 }
709 } 705 }
710 706
711 core.Map<core.String, core.Object> toJson() { 707 core.Map<core.String, core.Object> toJson() {
712 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 708 final core.Map<core.String, core.Object> _json =
709 new core.Map<core.String, core.Object>();
713 if (headTokenIndex != null) { 710 if (headTokenIndex != null) {
714 _json["headTokenIndex"] = headTokenIndex; 711 _json["headTokenIndex"] = headTokenIndex;
715 } 712 }
716 if (label != null) { 713 if (label != null) {
717 _json["label"] = label; 714 _json["label"] = label;
718 } 715 }
719 return _json; 716 return _json;
720 } 717 }
721 } 718 }
722 719
723 /** 720 /// ################################################################ #
724 * ################################################################ # 721 ///
725 * 722 /// Represents the input to API methods.
726 * Represents the input to API methods.
727 */
728 class Document { 723 class Document {
729 /** The content of the input in string format. */ 724 /// The content of the input in string format.
730 core.String content; 725 core.String content;
731 /** 726
732 * The Google Cloud Storage URI where the file content is located. 727 /// The Google Cloud Storage URI where the file content is located.
733 * This URI must be of the form: gs://bucket_name/object_name. For more 728 /// This URI must be of the form: gs://bucket_name/object_name. For more
734 * details, see https://cloud.google.com/storage/docs/reference-uris. 729 /// details, see https://cloud.google.com/storage/docs/reference-uris.
735 * NOTE: Cloud Storage object versioning is not supported. 730 /// NOTE: Cloud Storage object versioning is not supported.
736 */
737 core.String gcsContentUri; 731 core.String gcsContentUri;
738 /** 732
739 * The language of the document (if not specified, the language is 733 /// The language of the document (if not specified, the language is
740 * automatically detected). Both ISO and BCP-47 language codes are 734 /// automatically detected). Both ISO and BCP-47 language codes are
741 * accepted.<br> 735 /// accepted.<br>
742 * [Language Support](/natural-language/docs/languages) 736 /// [Language Support](/natural-language/docs/languages)
743 * lists currently supported languages for each API method. 737 /// lists currently supported languages for each API method.
744 * If the language (either specified by the caller or automatically detected) 738 /// If the language (either specified by the caller or automatically
745 * is not supported by the called API method, an `INVALID_ARGUMENT` error 739 /// detected)
746 * is returned. 740 /// is not supported by the called API method, an `INVALID_ARGUMENT` error
747 */ 741 /// is returned.
748 core.String language; 742 core.String language;
749 /** 743
750 * Required. If the type is not set or is `TYPE_UNSPECIFIED`, 744 /// Required. If the type is not set or is `TYPE_UNSPECIFIED`,
751 * returns an `INVALID_ARGUMENT` error. 745 /// returns an `INVALID_ARGUMENT` error.
752 * Possible string values are: 746 /// Possible string values are:
753 * - "TYPE_UNSPECIFIED" : The content type is not specified. 747 /// - "TYPE_UNSPECIFIED" : The content type is not specified.
754 * - "PLAIN_TEXT" : Plain text 748 /// - "PLAIN_TEXT" : Plain text
755 * - "HTML" : HTML 749 /// - "HTML" : HTML
756 */
757 core.String type; 750 core.String type;
758 751
759 Document(); 752 Document();
760 753
761 Document.fromJson(core.Map _json) { 754 Document.fromJson(core.Map _json) {
762 if (_json.containsKey("content")) { 755 if (_json.containsKey("content")) {
763 content = _json["content"]; 756 content = _json["content"];
764 } 757 }
765 if (_json.containsKey("gcsContentUri")) { 758 if (_json.containsKey("gcsContentUri")) {
766 gcsContentUri = _json["gcsContentUri"]; 759 gcsContentUri = _json["gcsContentUri"];
767 } 760 }
768 if (_json.containsKey("language")) { 761 if (_json.containsKey("language")) {
769 language = _json["language"]; 762 language = _json["language"];
770 } 763 }
771 if (_json.containsKey("type")) { 764 if (_json.containsKey("type")) {
772 type = _json["type"]; 765 type = _json["type"];
773 } 766 }
774 } 767 }
775 768
776 core.Map<core.String, core.Object> toJson() { 769 core.Map<core.String, core.Object> toJson() {
777 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 770 final core.Map<core.String, core.Object> _json =
771 new core.Map<core.String, core.Object>();
778 if (content != null) { 772 if (content != null) {
779 _json["content"] = content; 773 _json["content"] = content;
780 } 774 }
781 if (gcsContentUri != null) { 775 if (gcsContentUri != null) {
782 _json["gcsContentUri"] = gcsContentUri; 776 _json["gcsContentUri"] = gcsContentUri;
783 } 777 }
784 if (language != null) { 778 if (language != null) {
785 _json["language"] = language; 779 _json["language"] = language;
786 } 780 }
787 if (type != null) { 781 if (type != null) {
788 _json["type"] = type; 782 _json["type"] = type;
789 } 783 }
790 return _json; 784 return _json;
791 } 785 }
792 } 786 }
793 787
794 /** 788 /// Represents a phrase in the text that is a known entity, such as
795 * Represents a phrase in the text that is a known entity, such as 789 /// a person, an organization, or location. The API associates information,
796 * a person, an organization, or location. The API associates information, such 790 /// such
797 * as salience and mentions, with entities. 791 /// as salience and mentions, with entities.
798 */
799 class Entity { 792 class Entity {
800 /** 793 /// The mentions of this entity in the input document. The API currently
801 * The mentions of this entity in the input document. The API currently 794 /// supports proper noun mentions.
802 * supports proper noun mentions.
803 */
804 core.List<EntityMention> mentions; 795 core.List<EntityMention> mentions;
805 /** 796
806 * Metadata associated with the entity. 797 /// Metadata associated with the entity.
807 * 798 ///
808 * Currently, Wikipedia URLs and Knowledge Graph MIDs are provided, if 799 /// Currently, Wikipedia URLs and Knowledge Graph MIDs are provided, if
809 * available. The associated keys are "wikipedia_url" and "mid", respectively. 800 /// available. The associated keys are "wikipedia_url" and "mid",
810 */ 801 /// respectively.
811 core.Map<core.String, core.String> metadata; 802 core.Map<core.String, core.String> metadata;
812 /** The representative name for the entity. */ 803
804 /// The representative name for the entity.
813 core.String name; 805 core.String name;
814 /** 806
815 * The salience score associated with the entity in the [0, 1.0] range. 807 /// The salience score associated with the entity in the [0, 1.0] range.
816 * 808 ///
817 * The salience score for an entity provides information about the 809 /// The salience score for an entity provides information about the
818 * importance or centrality of that entity to the entire document text. 810 /// importance or centrality of that entity to the entire document text.
819 * Scores closer to 0 are less salient, while scores closer to 1.0 are highly 811 /// Scores closer to 0 are less salient, while scores closer to 1.0 are
820 * salient. 812 /// highly
821 */ 813 /// salient.
822 core.double salience; 814 core.double salience;
823 /** 815
824 * The entity type. 816 /// The entity type.
825 * Possible string values are: 817 /// Possible string values are:
826 * - "UNKNOWN" : Unknown 818 /// - "UNKNOWN" : Unknown
827 * - "PERSON" : Person 819 /// - "PERSON" : Person
828 * - "LOCATION" : Location 820 /// - "LOCATION" : Location
829 * - "ORGANIZATION" : Organization 821 /// - "ORGANIZATION" : Organization
830 * - "EVENT" : Event 822 /// - "EVENT" : Event
831 * - "WORK_OF_ART" : Work of art 823 /// - "WORK_OF_ART" : Work of art
832 * - "CONSUMER_GOOD" : Consumer goods 824 /// - "CONSUMER_GOOD" : Consumer goods
833 * - "OTHER" : Other types 825 /// - "OTHER" : Other types
834 */
835 core.String type; 826 core.String type;
836 827
837 Entity(); 828 Entity();
838 829
839 Entity.fromJson(core.Map _json) { 830 Entity.fromJson(core.Map _json) {
840 if (_json.containsKey("mentions")) { 831 if (_json.containsKey("mentions")) {
841 mentions = _json["mentions"].map((value) => new EntityMention.fromJson(val ue)).toList(); 832 mentions = _json["mentions"]
833 .map((value) => new EntityMention.fromJson(value))
834 .toList();
842 } 835 }
843 if (_json.containsKey("metadata")) { 836 if (_json.containsKey("metadata")) {
844 metadata = _json["metadata"]; 837 metadata = _json["metadata"];
845 } 838 }
846 if (_json.containsKey("name")) { 839 if (_json.containsKey("name")) {
847 name = _json["name"]; 840 name = _json["name"];
848 } 841 }
849 if (_json.containsKey("salience")) { 842 if (_json.containsKey("salience")) {
850 salience = _json["salience"]; 843 salience = _json["salience"];
851 } 844 }
852 if (_json.containsKey("type")) { 845 if (_json.containsKey("type")) {
853 type = _json["type"]; 846 type = _json["type"];
854 } 847 }
855 } 848 }
856 849
857 core.Map<core.String, core.Object> toJson() { 850 core.Map<core.String, core.Object> toJson() {
858 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 851 final core.Map<core.String, core.Object> _json =
852 new core.Map<core.String, core.Object>();
859 if (mentions != null) { 853 if (mentions != null) {
860 _json["mentions"] = mentions.map((value) => (value).toJson()).toList(); 854 _json["mentions"] = mentions.map((value) => (value).toJson()).toList();
861 } 855 }
862 if (metadata != null) { 856 if (metadata != null) {
863 _json["metadata"] = metadata; 857 _json["metadata"] = metadata;
864 } 858 }
865 if (name != null) { 859 if (name != null) {
866 _json["name"] = name; 860 _json["name"] = name;
867 } 861 }
868 if (salience != null) { 862 if (salience != null) {
869 _json["salience"] = salience; 863 _json["salience"] = salience;
870 } 864 }
871 if (type != null) { 865 if (type != null) {
872 _json["type"] = type; 866 _json["type"] = type;
873 } 867 }
874 return _json; 868 return _json;
875 } 869 }
876 } 870 }
877 871
878 /** 872 /// Represents a mention for an entity in the text. Currently, proper noun
879 * Represents a mention for an entity in the text. Currently, proper noun 873 /// mentions are supported.
880 * mentions are supported.
881 */
882 class EntityMention { 874 class EntityMention {
883 /** The mention text. */ 875 /// The mention text.
884 TextSpan text; 876 TextSpan text;
885 /** 877
886 * The type of the entity mention. 878 /// The type of the entity mention.
887 * Possible string values are: 879 /// Possible string values are:
888 * - "TYPE_UNKNOWN" : Unknown 880 /// - "TYPE_UNKNOWN" : Unknown
889 * - "PROPER" : Proper name 881 /// - "PROPER" : Proper name
890 * - "COMMON" : Common noun (or noun compound) 882 /// - "COMMON" : Common noun (or noun compound)
891 */
892 core.String type; 883 core.String type;
893 884
894 EntityMention(); 885 EntityMention();
895 886
896 EntityMention.fromJson(core.Map _json) { 887 EntityMention.fromJson(core.Map _json) {
897 if (_json.containsKey("text")) { 888 if (_json.containsKey("text")) {
898 text = new TextSpan.fromJson(_json["text"]); 889 text = new TextSpan.fromJson(_json["text"]);
899 } 890 }
900 if (_json.containsKey("type")) { 891 if (_json.containsKey("type")) {
901 type = _json["type"]; 892 type = _json["type"];
902 } 893 }
903 } 894 }
904 895
905 core.Map<core.String, core.Object> toJson() { 896 core.Map<core.String, core.Object> toJson() {
906 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 897 final core.Map<core.String, core.Object> _json =
898 new core.Map<core.String, core.Object>();
907 if (text != null) { 899 if (text != null) {
908 _json["text"] = (text).toJson(); 900 _json["text"] = (text).toJson();
909 } 901 }
910 if (type != null) { 902 if (type != null) {
911 _json["type"] = type; 903 _json["type"] = type;
912 } 904 }
913 return _json; 905 return _json;
914 } 906 }
915 } 907 }
916 908
917 /** 909 /// All available features for sentiment, syntax, and semantic analysis.
918 * All available features for sentiment, syntax, and semantic analysis. 910 /// Setting each one to true will enable that specific analysis for the input.
919 * Setting each one to true will enable that specific analysis for the input.
920 */
921 class Features { 911 class Features {
922 /** Extract document-level sentiment. */ 912 /// Extract document-level sentiment.
923 core.bool extractDocumentSentiment; 913 core.bool extractDocumentSentiment;
924 /** Extract entities. */ 914
915 /// Extract entities.
925 core.bool extractEntities; 916 core.bool extractEntities;
926 /** Extract syntax information. */ 917
918 /// Extract syntax information.
927 core.bool extractSyntax; 919 core.bool extractSyntax;
928 920
929 Features(); 921 Features();
930 922
931 Features.fromJson(core.Map _json) { 923 Features.fromJson(core.Map _json) {
932 if (_json.containsKey("extractDocumentSentiment")) { 924 if (_json.containsKey("extractDocumentSentiment")) {
933 extractDocumentSentiment = _json["extractDocumentSentiment"]; 925 extractDocumentSentiment = _json["extractDocumentSentiment"];
934 } 926 }
935 if (_json.containsKey("extractEntities")) { 927 if (_json.containsKey("extractEntities")) {
936 extractEntities = _json["extractEntities"]; 928 extractEntities = _json["extractEntities"];
937 } 929 }
938 if (_json.containsKey("extractSyntax")) { 930 if (_json.containsKey("extractSyntax")) {
939 extractSyntax = _json["extractSyntax"]; 931 extractSyntax = _json["extractSyntax"];
940 } 932 }
941 } 933 }
942 934
943 core.Map<core.String, core.Object> toJson() { 935 core.Map<core.String, core.Object> toJson() {
944 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 936 final core.Map<core.String, core.Object> _json =
937 new core.Map<core.String, core.Object>();
945 if (extractDocumentSentiment != null) { 938 if (extractDocumentSentiment != null) {
946 _json["extractDocumentSentiment"] = extractDocumentSentiment; 939 _json["extractDocumentSentiment"] = extractDocumentSentiment;
947 } 940 }
948 if (extractEntities != null) { 941 if (extractEntities != null) {
949 _json["extractEntities"] = extractEntities; 942 _json["extractEntities"] = extractEntities;
950 } 943 }
951 if (extractSyntax != null) { 944 if (extractSyntax != null) {
952 _json["extractSyntax"] = extractSyntax; 945 _json["extractSyntax"] = extractSyntax;
953 } 946 }
954 return _json; 947 return _json;
955 } 948 }
956 } 949 }
957 950
958 /** Represents part of speech information for a token. */ 951 /// Represents part of speech information for a token.
959 class PartOfSpeech { 952 class PartOfSpeech {
960 /** 953 /// The grammatical aspect.
961 * The grammatical aspect. 954 /// Possible string values are:
962 * Possible string values are: 955 /// - "ASPECT_UNKNOWN" : Aspect is not applicable in the analyzed language or
963 * - "ASPECT_UNKNOWN" : Aspect is not applicable in the analyzed language or 956 /// is not predicted.
964 * is not predicted. 957 /// - "PERFECTIVE" : Perfective
965 * - "PERFECTIVE" : Perfective 958 /// - "IMPERFECTIVE" : Imperfective
966 * - "IMPERFECTIVE" : Imperfective 959 /// - "PROGRESSIVE" : Progressive
967 * - "PROGRESSIVE" : Progressive
968 */
969 core.String aspect; 960 core.String aspect;
970 /** 961
971 * The grammatical case. 962 /// The grammatical case.
972 * Possible string values are: 963 /// Possible string values are:
973 * - "CASE_UNKNOWN" : Case is not applicable in the analyzed language or is 964 /// - "CASE_UNKNOWN" : Case is not applicable in the analyzed language or is
974 * not predicted. 965 /// not predicted.
975 * - "ACCUSATIVE" : Accusative 966 /// - "ACCUSATIVE" : Accusative
976 * - "ADVERBIAL" : Adverbial 967 /// - "ADVERBIAL" : Adverbial
977 * - "COMPLEMENTIVE" : Complementive 968 /// - "COMPLEMENTIVE" : Complementive
978 * - "DATIVE" : Dative 969 /// - "DATIVE" : Dative
979 * - "GENITIVE" : Genitive 970 /// - "GENITIVE" : Genitive
980 * - "INSTRUMENTAL" : Instrumental 971 /// - "INSTRUMENTAL" : Instrumental
981 * - "LOCATIVE" : Locative 972 /// - "LOCATIVE" : Locative
982 * - "NOMINATIVE" : Nominative 973 /// - "NOMINATIVE" : Nominative
983 * - "OBLIQUE" : Oblique 974 /// - "OBLIQUE" : Oblique
984 * - "PARTITIVE" : Partitive 975 /// - "PARTITIVE" : Partitive
985 * - "PREPOSITIONAL" : Prepositional 976 /// - "PREPOSITIONAL" : Prepositional
986 * - "REFLEXIVE_CASE" : Reflexive 977 /// - "REFLEXIVE_CASE" : Reflexive
987 * - "RELATIVE_CASE" : Relative 978 /// - "RELATIVE_CASE" : Relative
988 * - "VOCATIVE" : Vocative 979 /// - "VOCATIVE" : Vocative
989 */
990 core.String case_; 980 core.String case_;
991 /** 981
992 * The grammatical form. 982 /// The grammatical form.
993 * Possible string values are: 983 /// Possible string values are:
994 * - "FORM_UNKNOWN" : Form is not applicable in the analyzed language or is 984 /// - "FORM_UNKNOWN" : Form is not applicable in the analyzed language or is
995 * not predicted. 985 /// not predicted.
996 * - "ADNOMIAL" : Adnomial 986 /// - "ADNOMIAL" : Adnomial
997 * - "AUXILIARY" : Auxiliary 987 /// - "AUXILIARY" : Auxiliary
998 * - "COMPLEMENTIZER" : Complementizer 988 /// - "COMPLEMENTIZER" : Complementizer
999 * - "FINAL_ENDING" : Final ending 989 /// - "FINAL_ENDING" : Final ending
1000 * - "GERUND" : Gerund 990 /// - "GERUND" : Gerund
1001 * - "REALIS" : Realis 991 /// - "REALIS" : Realis
1002 * - "IRREALIS" : Irrealis 992 /// - "IRREALIS" : Irrealis
1003 * - "SHORT" : Short form 993 /// - "SHORT" : Short form
1004 * - "LONG" : Long form 994 /// - "LONG" : Long form
1005 * - "ORDER" : Order form 995 /// - "ORDER" : Order form
1006 * - "SPECIFIC" : Specific form 996 /// - "SPECIFIC" : Specific form
1007 */
1008 core.String form; 997 core.String form;
1009 /** 998
1010 * The grammatical gender. 999 /// The grammatical gender.
1011 * Possible string values are: 1000 /// Possible string values are:
1012 * - "GENDER_UNKNOWN" : Gender is not applicable in the analyzed language or 1001 /// - "GENDER_UNKNOWN" : Gender is not applicable in the analyzed language or
1013 * is not predicted. 1002 /// is not predicted.
1014 * - "FEMININE" : Feminine 1003 /// - "FEMININE" : Feminine
1015 * - "MASCULINE" : Masculine 1004 /// - "MASCULINE" : Masculine
1016 * - "NEUTER" : Neuter 1005 /// - "NEUTER" : Neuter
1017 */
1018 core.String gender; 1006 core.String gender;
1019 /** 1007
1020 * The grammatical mood. 1008 /// The grammatical mood.
1021 * Possible string values are: 1009 /// Possible string values are:
1022 * - "MOOD_UNKNOWN" : Mood is not applicable in the analyzed language or is 1010 /// - "MOOD_UNKNOWN" : Mood is not applicable in the analyzed language or is
1023 * not predicted. 1011 /// not predicted.
1024 * - "CONDITIONAL_MOOD" : Conditional 1012 /// - "CONDITIONAL_MOOD" : Conditional
1025 * - "IMPERATIVE" : Imperative 1013 /// - "IMPERATIVE" : Imperative
1026 * - "INDICATIVE" : Indicative 1014 /// - "INDICATIVE" : Indicative
1027 * - "INTERROGATIVE" : Interrogative 1015 /// - "INTERROGATIVE" : Interrogative
1028 * - "JUSSIVE" : Jussive 1016 /// - "JUSSIVE" : Jussive
1029 * - "SUBJUNCTIVE" : Subjunctive 1017 /// - "SUBJUNCTIVE" : Subjunctive
1030 */
1031 core.String mood; 1018 core.String mood;
1032 /** 1019
1033 * The grammatical number. 1020 /// The grammatical number.
1034 * Possible string values are: 1021 /// Possible string values are:
1035 * - "NUMBER_UNKNOWN" : Number is not applicable in the analyzed language or 1022 /// - "NUMBER_UNKNOWN" : Number is not applicable in the analyzed language or
1036 * is not predicted. 1023 /// is not predicted.
1037 * - "SINGULAR" : Singular 1024 /// - "SINGULAR" : Singular
1038 * - "PLURAL" : Plural 1025 /// - "PLURAL" : Plural
1039 * - "DUAL" : Dual 1026 /// - "DUAL" : Dual
1040 */
1041 core.String number; 1027 core.String number;
1042 /** 1028
1043 * The grammatical person. 1029 /// The grammatical person.
1044 * Possible string values are: 1030 /// Possible string values are:
1045 * - "PERSON_UNKNOWN" : Person is not applicable in the analyzed language or 1031 /// - "PERSON_UNKNOWN" : Person is not applicable in the analyzed language or
1046 * is not predicted. 1032 /// is not predicted.
1047 * - "FIRST" : First 1033 /// - "FIRST" : First
1048 * - "SECOND" : Second 1034 /// - "SECOND" : Second
1049 * - "THIRD" : Third 1035 /// - "THIRD" : Third
1050 * - "REFLEXIVE_PERSON" : Reflexive 1036 /// - "REFLEXIVE_PERSON" : Reflexive
1051 */
1052 core.String person; 1037 core.String person;
1053 /** 1038
1054 * The grammatical properness. 1039 /// The grammatical properness.
1055 * Possible string values are: 1040 /// Possible string values are:
1056 * - "PROPER_UNKNOWN" : Proper is not applicable in the analyzed language or 1041 /// - "PROPER_UNKNOWN" : Proper is not applicable in the analyzed language or
1057 * is not predicted. 1042 /// is not predicted.
1058 * - "PROPER" : Proper 1043 /// - "PROPER" : Proper
1059 * - "NOT_PROPER" : Not proper 1044 /// - "NOT_PROPER" : Not proper
1060 */
1061 core.String proper; 1045 core.String proper;
1062 /** 1046
1063 * The grammatical reciprocity. 1047 /// The grammatical reciprocity.
1064 * Possible string values are: 1048 /// Possible string values are:
1065 * - "RECIPROCITY_UNKNOWN" : Reciprocity is not applicable in the analyzed 1049 /// - "RECIPROCITY_UNKNOWN" : Reciprocity is not applicable in the analyzed
1066 * language or is not 1050 /// language or is not
1067 * predicted. 1051 /// predicted.
1068 * - "RECIPROCAL" : Reciprocal 1052 /// - "RECIPROCAL" : Reciprocal
1069 * - "NON_RECIPROCAL" : Non-reciprocal 1053 /// - "NON_RECIPROCAL" : Non-reciprocal
1070 */
1071 core.String reciprocity; 1054 core.String reciprocity;
1072 /** 1055
1073 * The part of speech tag. 1056 /// The part of speech tag.
1074 * Possible string values are: 1057 /// Possible string values are:
1075 * - "UNKNOWN" : Unknown 1058 /// - "UNKNOWN" : Unknown
1076 * - "ADJ" : Adjective 1059 /// - "ADJ" : Adjective
1077 * - "ADP" : Adposition (preposition and postposition) 1060 /// - "ADP" : Adposition (preposition and postposition)
1078 * - "ADV" : Adverb 1061 /// - "ADV" : Adverb
1079 * - "CONJ" : Conjunction 1062 /// - "CONJ" : Conjunction
1080 * - "DET" : Determiner 1063 /// - "DET" : Determiner
1081 * - "NOUN" : Noun (common and proper) 1064 /// - "NOUN" : Noun (common and proper)
1082 * - "NUM" : Cardinal number 1065 /// - "NUM" : Cardinal number
1083 * - "PRON" : Pronoun 1066 /// - "PRON" : Pronoun
1084 * - "PRT" : Particle or other function word 1067 /// - "PRT" : Particle or other function word
1085 * - "PUNCT" : Punctuation 1068 /// - "PUNCT" : Punctuation
1086 * - "VERB" : Verb (all tenses and modes) 1069 /// - "VERB" : Verb (all tenses and modes)
1087 * - "X" : Other: foreign words, typos, abbreviations 1070 /// - "X" : Other: foreign words, typos, abbreviations
1088 * - "AFFIX" : Affix 1071 /// - "AFFIX" : Affix
1089 */
1090 core.String tag; 1072 core.String tag;
1091 /** 1073
1092 * The grammatical tense. 1074 /// The grammatical tense.
1093 * Possible string values are: 1075 /// Possible string values are:
1094 * - "TENSE_UNKNOWN" : Tense is not applicable in the analyzed language or is 1076 /// - "TENSE_UNKNOWN" : Tense is not applicable in the analyzed language or
1095 * not predicted. 1077 /// is not predicted.
1096 * - "CONDITIONAL_TENSE" : Conditional 1078 /// - "CONDITIONAL_TENSE" : Conditional
1097 * - "FUTURE" : Future 1079 /// - "FUTURE" : Future
1098 * - "PAST" : Past 1080 /// - "PAST" : Past
1099 * - "PRESENT" : Present 1081 /// - "PRESENT" : Present
1100 * - "IMPERFECT" : Imperfect 1082 /// - "IMPERFECT" : Imperfect
1101 * - "PLUPERFECT" : Pluperfect 1083 /// - "PLUPERFECT" : Pluperfect
1102 */
1103 core.String tense; 1084 core.String tense;
1104 /** 1085
1105 * The grammatical voice. 1086 /// The grammatical voice.
1106 * Possible string values are: 1087 /// Possible string values are:
1107 * - "VOICE_UNKNOWN" : Voice is not applicable in the analyzed language or is 1088 /// - "VOICE_UNKNOWN" : Voice is not applicable in the analyzed language or
1108 * not predicted. 1089 /// is not predicted.
1109 * - "ACTIVE" : Active 1090 /// - "ACTIVE" : Active
1110 * - "CAUSATIVE" : Causative 1091 /// - "CAUSATIVE" : Causative
1111 * - "PASSIVE" : Passive 1092 /// - "PASSIVE" : Passive
1112 */
1113 core.String voice; 1093 core.String voice;
1114 1094
1115 PartOfSpeech(); 1095 PartOfSpeech();
1116 1096
1117 PartOfSpeech.fromJson(core.Map _json) { 1097 PartOfSpeech.fromJson(core.Map _json) {
1118 if (_json.containsKey("aspect")) { 1098 if (_json.containsKey("aspect")) {
1119 aspect = _json["aspect"]; 1099 aspect = _json["aspect"];
1120 } 1100 }
1121 if (_json.containsKey("case")) { 1101 if (_json.containsKey("case")) {
1122 case_ = _json["case"]; 1102 case_ = _json["case"];
(...skipping 24 matching lines...) Expand all
1147 } 1127 }
1148 if (_json.containsKey("tense")) { 1128 if (_json.containsKey("tense")) {
1149 tense = _json["tense"]; 1129 tense = _json["tense"];
1150 } 1130 }
1151 if (_json.containsKey("voice")) { 1131 if (_json.containsKey("voice")) {
1152 voice = _json["voice"]; 1132 voice = _json["voice"];
1153 } 1133 }
1154 } 1134 }
1155 1135
1156 core.Map<core.String, core.Object> toJson() { 1136 core.Map<core.String, core.Object> toJson() {
1157 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 1137 final core.Map<core.String, core.Object> _json =
1138 new core.Map<core.String, core.Object>();
1158 if (aspect != null) { 1139 if (aspect != null) {
1159 _json["aspect"] = aspect; 1140 _json["aspect"] = aspect;
1160 } 1141 }
1161 if (case_ != null) { 1142 if (case_ != null) {
1162 _json["case"] = case_; 1143 _json["case"] = case_;
1163 } 1144 }
1164 if (form != null) { 1145 if (form != null) {
1165 _json["form"] = form; 1146 _json["form"] = form;
1166 } 1147 }
1167 if (gender != null) { 1148 if (gender != null) {
(...skipping 20 matching lines...) Expand all
1188 if (tense != null) { 1169 if (tense != null) {
1189 _json["tense"] = tense; 1170 _json["tense"] = tense;
1190 } 1171 }
1191 if (voice != null) { 1172 if (voice != null) {
1192 _json["voice"] = voice; 1173 _json["voice"] = voice;
1193 } 1174 }
1194 return _json; 1175 return _json;
1195 } 1176 }
1196 } 1177 }
1197 1178
1198 /** Represents a sentence in the input document. */ 1179 /// Represents a sentence in the input document.
1199 class Sentence { 1180 class Sentence {
1200 /** 1181 /// For calls to AnalyzeSentiment or if
1201 * For calls to AnalyzeSentiment or if 1182 /// AnnotateTextRequest.Features.extract_document_sentiment is set to
1202 * AnnotateTextRequest.Features.extract_document_sentiment is set to 1183 /// true, this field will contain the sentiment for the sentence.
1203 * true, this field will contain the sentiment for the sentence.
1204 */
1205 Sentiment sentiment; 1184 Sentiment sentiment;
1206 /** The sentence text. */ 1185
1186 /// The sentence text.
1207 TextSpan text; 1187 TextSpan text;
1208 1188
1209 Sentence(); 1189 Sentence();
1210 1190
1211 Sentence.fromJson(core.Map _json) { 1191 Sentence.fromJson(core.Map _json) {
1212 if (_json.containsKey("sentiment")) { 1192 if (_json.containsKey("sentiment")) {
1213 sentiment = new Sentiment.fromJson(_json["sentiment"]); 1193 sentiment = new Sentiment.fromJson(_json["sentiment"]);
1214 } 1194 }
1215 if (_json.containsKey("text")) { 1195 if (_json.containsKey("text")) {
1216 text = new TextSpan.fromJson(_json["text"]); 1196 text = new TextSpan.fromJson(_json["text"]);
1217 } 1197 }
1218 } 1198 }
1219 1199
1220 core.Map<core.String, core.Object> toJson() { 1200 core.Map<core.String, core.Object> toJson() {
1221 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 1201 final core.Map<core.String, core.Object> _json =
1202 new core.Map<core.String, core.Object>();
1222 if (sentiment != null) { 1203 if (sentiment != null) {
1223 _json["sentiment"] = (sentiment).toJson(); 1204 _json["sentiment"] = (sentiment).toJson();
1224 } 1205 }
1225 if (text != null) { 1206 if (text != null) {
1226 _json["text"] = (text).toJson(); 1207 _json["text"] = (text).toJson();
1227 } 1208 }
1228 return _json; 1209 return _json;
1229 } 1210 }
1230 } 1211 }
1231 1212
1232 /** 1213 /// Represents the feeling associated with the entire text or entities in
1233 * Represents the feeling associated with the entire text or entities in 1214 /// the text.
1234 * the text.
1235 */
1236 class Sentiment { 1215 class Sentiment {
1237 /** 1216 /// A non-negative number in the [0, +inf) range, which represents
1238 * A non-negative number in the [0, +inf) range, which represents 1217 /// the absolute magnitude of sentiment regardless of score (positive or
1239 * the absolute magnitude of sentiment regardless of score (positive or 1218 /// negative).
1240 * negative).
1241 */
1242 core.double magnitude; 1219 core.double magnitude;
1243 /** 1220
1244 * DEPRECATED FIELD - This field is being deprecated in 1221 /// DEPRECATED FIELD - This field is being deprecated in
1245 * favor of score. Please refer to our documentation at 1222 /// favor of score. Please refer to our documentation at
1246 * https://cloud.google.com/natural-language/docs for more information. 1223 /// https://cloud.google.com/natural-language/docs for more information.
1247 */
1248 core.double polarity; 1224 core.double polarity;
1249 /** 1225
1250 * Sentiment score between -1.0 (negative sentiment) and 1.0 1226 /// Sentiment score between -1.0 (negative sentiment) and 1.0
1251 * (positive sentiment). 1227 /// (positive sentiment).
1252 */
1253 core.double score; 1228 core.double score;
1254 1229
1255 Sentiment(); 1230 Sentiment();
1256 1231
1257 Sentiment.fromJson(core.Map _json) { 1232 Sentiment.fromJson(core.Map _json) {
1258 if (_json.containsKey("magnitude")) { 1233 if (_json.containsKey("magnitude")) {
1259 magnitude = _json["magnitude"]; 1234 magnitude = _json["magnitude"];
1260 } 1235 }
1261 if (_json.containsKey("polarity")) { 1236 if (_json.containsKey("polarity")) {
1262 polarity = _json["polarity"]; 1237 polarity = _json["polarity"];
1263 } 1238 }
1264 if (_json.containsKey("score")) { 1239 if (_json.containsKey("score")) {
1265 score = _json["score"]; 1240 score = _json["score"];
1266 } 1241 }
1267 } 1242 }
1268 1243
1269 core.Map<core.String, core.Object> toJson() { 1244 core.Map<core.String, core.Object> toJson() {
1270 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 1245 final core.Map<core.String, core.Object> _json =
1246 new core.Map<core.String, core.Object>();
1271 if (magnitude != null) { 1247 if (magnitude != null) {
1272 _json["magnitude"] = magnitude; 1248 _json["magnitude"] = magnitude;
1273 } 1249 }
1274 if (polarity != null) { 1250 if (polarity != null) {
1275 _json["polarity"] = polarity; 1251 _json["polarity"] = polarity;
1276 } 1252 }
1277 if (score != null) { 1253 if (score != null) {
1278 _json["score"] = score; 1254 _json["score"] = score;
1279 } 1255 }
1280 return _json; 1256 return _json;
1281 } 1257 }
1282 } 1258 }
1283 1259
1284 /** 1260 /// The `Status` type defines a logical error model that is suitable for
1285 * The `Status` type defines a logical error model that is suitable for 1261 /// different
1286 * different 1262 /// programming environments, including REST APIs and RPC APIs. It is used by
1287 * programming environments, including REST APIs and RPC APIs. It is used by 1263 /// [gRPC](https://github.com/grpc). The error model is designed to be:
1288 * [gRPC](https://github.com/grpc). The error model is designed to be: 1264 ///
1289 * 1265 /// - Simple to use and understand for most users
1290 * - Simple to use and understand for most users 1266 /// - Flexible enough to meet unexpected needs
1291 * - Flexible enough to meet unexpected needs 1267 ///
1292 * 1268 /// # Overview
1293 * # Overview 1269 ///
1294 * 1270 /// The `Status` message contains three pieces of data: error code, error
1295 * The `Status` message contains three pieces of data: error code, error 1271 /// message,
1296 * message, 1272 /// and error details. The error code should be an enum value of
1297 * and error details. The error code should be an enum value of 1273 /// google.rpc.Code, but it may accept additional error codes if needed. The
1298 * google.rpc.Code, but it may accept additional error codes if needed. The 1274 /// error message should be a developer-facing English message that helps
1299 * error message should be a developer-facing English message that helps 1275 /// developers *understand* and *resolve* the error. If a localized user-facing
1300 * developers *understand* and *resolve* the error. If a localized user-facing 1276 /// error message is needed, put the localized message in the error details or
1301 * error message is needed, put the localized message in the error details or 1277 /// localize it in the client. The optional error details may contain arbitrary
1302 * localize it in the client. The optional error details may contain arbitrary 1278 /// information about the error. There is a predefined set of error detail
1303 * information about the error. There is a predefined set of error detail types 1279 /// types
1304 * in the package `google.rpc` that can be used for common error conditions. 1280 /// in the package `google.rpc` that can be used for common error conditions.
1305 * 1281 ///
1306 * # Language mapping 1282 /// # Language mapping
1307 * 1283 ///
1308 * The `Status` message is the logical representation of the error model, but it 1284 /// The `Status` message is the logical representation of the error model, but
1309 * is not necessarily the actual wire format. When the `Status` message is 1285 /// it
1310 * exposed in different client libraries and different wire protocols, it can be 1286 /// is not necessarily the actual wire format. When the `Status` message is
1311 * mapped differently. For example, it will likely be mapped to some exceptions 1287 /// exposed in different client libraries and different wire protocols, it can
1312 * in Java, but more likely mapped to some error codes in C. 1288 /// be
1313 * 1289 /// mapped differently. For example, it will likely be mapped to some
1314 * # Other uses 1290 /// exceptions
1315 * 1291 /// in Java, but more likely mapped to some error codes in C.
1316 * The error model and the `Status` message can be used in a variety of 1292 ///
1317 * environments, either with or without APIs, to provide a 1293 /// # Other uses
1318 * consistent developer experience across different environments. 1294 ///
1319 * 1295 /// The error model and the `Status` message can be used in a variety of
1320 * Example uses of this error model include: 1296 /// environments, either with or without APIs, to provide a
1321 * 1297 /// consistent developer experience across different environments.
1322 * - Partial errors. If a service needs to return partial errors to the client, 1298 ///
1323 * it may embed the `Status` in the normal response to indicate the partial 1299 /// Example uses of this error model include:
1324 * errors. 1300 ///
1325 * 1301 /// - Partial errors. If a service needs to return partial errors to the
1326 * - Workflow errors. A typical workflow has multiple steps. Each step may 1302 /// client,
1327 * have a `Status` message for error reporting. 1303 /// it may embed the `Status` in the normal response to indicate the partial
1328 * 1304 /// errors.
1329 * - Batch operations. If a client uses batch request and batch response, the 1305 ///
1330 * `Status` message should be used directly inside batch response, one for 1306 /// - Workflow errors. A typical workflow has multiple steps. Each step may
1331 * each error sub-response. 1307 /// have a `Status` message for error reporting.
1332 * 1308 ///
1333 * - Asynchronous operations. If an API call embeds asynchronous operation 1309 /// - Batch operations. If a client uses batch request and batch response, the
1334 * results in its response, the status of those operations should be 1310 /// `Status` message should be used directly inside batch response, one for
1335 * represented directly using the `Status` message. 1311 /// each error sub-response.
1336 * 1312 ///
1337 * - Logging. If some API errors are stored in logs, the message `Status` could 1313 /// - Asynchronous operations. If an API call embeds asynchronous operation
1338 * be used directly after any stripping needed for security/privacy reasons. 1314 /// results in its response, the status of those operations should be
1339 */ 1315 /// represented directly using the `Status` message.
1316 ///
1317 /// - Logging. If some API errors are stored in logs, the message `Status`
1318 /// could
1319 /// be used directly after any stripping needed for security/privacy reasons.
1340 class Status { 1320 class Status {
1341 /** The status code, which should be an enum value of google.rpc.Code. */ 1321 /// The status code, which should be an enum value of google.rpc.Code.
1342 core.int code; 1322 core.int code;
1343 /** 1323
1344 * A list of messages that carry the error details. There is a common set of 1324 /// A list of messages that carry the error details. There is a common set
1345 * message types for APIs to use. 1325 /// of
1346 * 1326 /// message types for APIs to use.
1347 * The values for Object must be JSON objects. It can consist of `num`, 1327 ///
1348 * `String`, `bool` and `null` as well as `Map` and `List` values. 1328 /// The values for Object must be JSON objects. It can consist of `num`,
1349 */ 1329 /// `String`, `bool` and `null` as well as `Map` and `List` values.
1350 core.List<core.Map<core.String, core.Object>> details; 1330 core.List<core.Map<core.String, core.Object>> details;
1351 /** 1331
1352 * A developer-facing error message, which should be in English. Any 1332 /// A developer-facing error message, which should be in English. Any
1353 * user-facing error message should be localized and sent in the 1333 /// user-facing error message should be localized and sent in the
1354 * google.rpc.Status.details field, or localized by the client. 1334 /// google.rpc.Status.details field, or localized by the client.
1355 */
1356 core.String message; 1335 core.String message;
1357 1336
1358 Status(); 1337 Status();
1359 1338
1360 Status.fromJson(core.Map _json) { 1339 Status.fromJson(core.Map _json) {
1361 if (_json.containsKey("code")) { 1340 if (_json.containsKey("code")) {
1362 code = _json["code"]; 1341 code = _json["code"];
1363 } 1342 }
1364 if (_json.containsKey("details")) { 1343 if (_json.containsKey("details")) {
1365 details = _json["details"]; 1344 details = _json["details"];
1366 } 1345 }
1367 if (_json.containsKey("message")) { 1346 if (_json.containsKey("message")) {
1368 message = _json["message"]; 1347 message = _json["message"];
1369 } 1348 }
1370 } 1349 }
1371 1350
1372 core.Map<core.String, core.Object> toJson() { 1351 core.Map<core.String, core.Object> toJson() {
1373 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 1352 final core.Map<core.String, core.Object> _json =
1353 new core.Map<core.String, core.Object>();
1374 if (code != null) { 1354 if (code != null) {
1375 _json["code"] = code; 1355 _json["code"] = code;
1376 } 1356 }
1377 if (details != null) { 1357 if (details != null) {
1378 _json["details"] = details; 1358 _json["details"] = details;
1379 } 1359 }
1380 if (message != null) { 1360 if (message != null) {
1381 _json["message"] = message; 1361 _json["message"] = message;
1382 } 1362 }
1383 return _json; 1363 return _json;
1384 } 1364 }
1385 } 1365 }
1386 1366
1387 /** Represents an output piece of text. */ 1367 /// Represents an output piece of text.
1388 class TextSpan { 1368 class TextSpan {
1389 /** 1369 /// The API calculates the beginning offset of the content in the original
1390 * The API calculates the beginning offset of the content in the original 1370 /// document according to the EncodingType specified in the API request.
1391 * document according to the EncodingType specified in the API request.
1392 */
1393 core.int beginOffset; 1371 core.int beginOffset;
1394 /** The content of the output text. */ 1372
1373 /// The content of the output text.
1395 core.String content; 1374 core.String content;
1396 1375
1397 TextSpan(); 1376 TextSpan();
1398 1377
1399 TextSpan.fromJson(core.Map _json) { 1378 TextSpan.fromJson(core.Map _json) {
1400 if (_json.containsKey("beginOffset")) { 1379 if (_json.containsKey("beginOffset")) {
1401 beginOffset = _json["beginOffset"]; 1380 beginOffset = _json["beginOffset"];
1402 } 1381 }
1403 if (_json.containsKey("content")) { 1382 if (_json.containsKey("content")) {
1404 content = _json["content"]; 1383 content = _json["content"];
1405 } 1384 }
1406 } 1385 }
1407 1386
1408 core.Map<core.String, core.Object> toJson() { 1387 core.Map<core.String, core.Object> toJson() {
1409 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 1388 final core.Map<core.String, core.Object> _json =
1389 new core.Map<core.String, core.Object>();
1410 if (beginOffset != null) { 1390 if (beginOffset != null) {
1411 _json["beginOffset"] = beginOffset; 1391 _json["beginOffset"] = beginOffset;
1412 } 1392 }
1413 if (content != null) { 1393 if (content != null) {
1414 _json["content"] = content; 1394 _json["content"] = content;
1415 } 1395 }
1416 return _json; 1396 return _json;
1417 } 1397 }
1418 } 1398 }
1419 1399
1420 /** Represents the smallest syntactic building block of the text. */ 1400 /// Represents the smallest syntactic building block of the text.
1421 class Token { 1401 class Token {
1422 /** Dependency tree parse for this token. */ 1402 /// Dependency tree parse for this token.
1423 DependencyEdge dependencyEdge; 1403 DependencyEdge dependencyEdge;
1424 /** 1404
1425 * [Lemma](https://en.wikipedia.org/wiki/Lemma_%28morphology%29) of the token. 1405 /// [Lemma](https://en.wikipedia.org/wiki/Lemma_%28morphology%29) of the
1426 */ 1406 /// token.
1427 core.String lemma; 1407 core.String lemma;
1428 /** Parts of speech tag for this token. */ 1408
1409 /// Parts of speech tag for this token.
1429 PartOfSpeech partOfSpeech; 1410 PartOfSpeech partOfSpeech;
1430 /** The token text. */ 1411
1412 /// The token text.
1431 TextSpan text; 1413 TextSpan text;
1432 1414
1433 Token(); 1415 Token();
1434 1416
1435 Token.fromJson(core.Map _json) { 1417 Token.fromJson(core.Map _json) {
1436 if (_json.containsKey("dependencyEdge")) { 1418 if (_json.containsKey("dependencyEdge")) {
1437 dependencyEdge = new DependencyEdge.fromJson(_json["dependencyEdge"]); 1419 dependencyEdge = new DependencyEdge.fromJson(_json["dependencyEdge"]);
1438 } 1420 }
1439 if (_json.containsKey("lemma")) { 1421 if (_json.containsKey("lemma")) {
1440 lemma = _json["lemma"]; 1422 lemma = _json["lemma"];
1441 } 1423 }
1442 if (_json.containsKey("partOfSpeech")) { 1424 if (_json.containsKey("partOfSpeech")) {
1443 partOfSpeech = new PartOfSpeech.fromJson(_json["partOfSpeech"]); 1425 partOfSpeech = new PartOfSpeech.fromJson(_json["partOfSpeech"]);
1444 } 1426 }
1445 if (_json.containsKey("text")) { 1427 if (_json.containsKey("text")) {
1446 text = new TextSpan.fromJson(_json["text"]); 1428 text = new TextSpan.fromJson(_json["text"]);
1447 } 1429 }
1448 } 1430 }
1449 1431
1450 core.Map<core.String, core.Object> toJson() { 1432 core.Map<core.String, core.Object> toJson() {
1451 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c ore.Object>(); 1433 final core.Map<core.String, core.Object> _json =
1434 new core.Map<core.String, core.Object>();
1452 if (dependencyEdge != null) { 1435 if (dependencyEdge != null) {
1453 _json["dependencyEdge"] = (dependencyEdge).toJson(); 1436 _json["dependencyEdge"] = (dependencyEdge).toJson();
1454 } 1437 }
1455 if (lemma != null) { 1438 if (lemma != null) {
1456 _json["lemma"] = lemma; 1439 _json["lemma"] = lemma;
1457 } 1440 }
1458 if (partOfSpeech != null) { 1441 if (partOfSpeech != null) {
1459 _json["partOfSpeech"] = (partOfSpeech).toJson(); 1442 _json["partOfSpeech"] = (partOfSpeech).toJson();
1460 } 1443 }
1461 if (text != null) { 1444 if (text != null) {
1462 _json["text"] = (text).toJson(); 1445 _json["text"] = (text).toJson();
1463 } 1446 }
1464 return _json; 1447 return _json;
1465 } 1448 }
1466 } 1449 }
OLDNEW
« no previous file with comments | « generated/googleapis_beta/lib/dns/v2beta1.dart ('k') | generated/googleapis_beta/lib/language/v1beta2.dart » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698