Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(367)

Side by Side Diff: generated/googleapis/lib/bigquery/v2.dart

Issue 1078053002: Roll of googleapis as of 4/7/2015. (Closed) Base URL: https://github.com/dart-lang/googleapis.git@master
Patch Set: Created 5 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « generated/googleapis/lib/appstate/v1.dart ('k') | generated/googleapis/lib/blogger/v3.dart » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // This is a generated file (see the discoveryapis_generator project).
2
1 library googleapis.bigquery.v2; 3 library googleapis.bigquery.v2;
2 4
3 import "dart:core" as core; 5 import 'dart:core' as core;
4 import "dart:collection" as collection; 6 import 'dart:collection' as collection;
5 import "dart:async" as async; 7 import 'dart:async' as async;
6 import "dart:convert" as convert; 8 import 'dart:convert' as convert;
7 9
8 import "package:crypto/crypto.dart" as crypto; 10 import 'package:_discoveryapis_commons/_discoveryapis_commons.dart' as commons;
11 import 'package:crypto/crypto.dart' as crypto;
9 import 'package:http/http.dart' as http; 12 import 'package:http/http.dart' as http;
10 import '../src/common_internal.dart' as common_internal;
11 import '../common/common.dart' as common;
12 13
13 export '../common/common.dart' show ApiRequestError; 14 export 'package:_discoveryapis_commons/_discoveryapis_commons.dart' show
14 export '../common/common.dart' show DetailedApiRequestError; 15 ApiRequestError, DetailedApiRequestError, Media, UploadOptions,
16 ResumableUploadOptions, DownloadOptions, PartialDownloadOptions,
17 ByteRange;
18
19 const core.String USER_AGENT = 'dart-api-client bigquery/v2';
15 20
16 /** A data platform for customers to create, manage, share and query data. */ 21 /** A data platform for customers to create, manage, share and query data. */
17 class BigqueryApi { 22 class BigqueryApi {
18 /** View and manage your data in Google BigQuery */ 23 /** View and manage your data in Google BigQuery */
19 static const BigqueryScope = "https://www.googleapis.com/auth/bigquery"; 24 static const BigqueryScope = "https://www.googleapis.com/auth/bigquery";
20 25
21 /** Insert data into Google BigQuery */ 26 /** Insert data into Google BigQuery */
22 static const BigqueryInsertdataScope = "https://www.googleapis.com/auth/bigque ry.insertdata"; 27 static const BigqueryInsertdataScope = "https://www.googleapis.com/auth/bigque ry.insertdata";
23 28
24 /** View and manage your data across Google Cloud Platform services */ 29 /** View and manage your data across Google Cloud Platform services */
25 static const CloudPlatformScope = "https://www.googleapis.com/auth/cloud-platf orm"; 30 static const CloudPlatformScope = "https://www.googleapis.com/auth/cloud-platf orm";
26 31
27 /** Manage your data and permissions in Google Cloud Storage */ 32 /** Manage your data and permissions in Google Cloud Storage */
28 static const DevstorageFullControlScope = "https://www.googleapis.com/auth/dev storage.full_control"; 33 static const DevstorageFullControlScope = "https://www.googleapis.com/auth/dev storage.full_control";
29 34
30 /** View your data in Google Cloud Storage */ 35 /** View your data in Google Cloud Storage */
31 static const DevstorageReadOnlyScope = "https://www.googleapis.com/auth/devsto rage.read_only"; 36 static const DevstorageReadOnlyScope = "https://www.googleapis.com/auth/devsto rage.read_only";
32 37
33 /** Manage your data in Google Cloud Storage */ 38 /** Manage your data in Google Cloud Storage */
34 static const DevstorageReadWriteScope = "https://www.googleapis.com/auth/devst orage.read_write"; 39 static const DevstorageReadWriteScope = "https://www.googleapis.com/auth/devst orage.read_write";
35 40
36 41
37 final common_internal.ApiRequester _requester; 42 final commons.ApiRequester _requester;
38 43
39 DatasetsResourceApi get datasets => new DatasetsResourceApi(_requester); 44 DatasetsResourceApi get datasets => new DatasetsResourceApi(_requester);
40 JobsResourceApi get jobs => new JobsResourceApi(_requester); 45 JobsResourceApi get jobs => new JobsResourceApi(_requester);
41 ProjectsResourceApi get projects => new ProjectsResourceApi(_requester); 46 ProjectsResourceApi get projects => new ProjectsResourceApi(_requester);
42 TabledataResourceApi get tabledata => new TabledataResourceApi(_requester); 47 TabledataResourceApi get tabledata => new TabledataResourceApi(_requester);
43 TablesResourceApi get tables => new TablesResourceApi(_requester); 48 TablesResourceApi get tables => new TablesResourceApi(_requester);
44 49
45 BigqueryApi(http.Client client, {core.String rootUrl: "https://www.googleapis. com/", core.String servicePath: "bigquery/v2/"}) : 50 BigqueryApi(http.Client client, {core.String rootUrl: "https://www.googleapis. com/", core.String servicePath: "bigquery/v2/"}) :
46 _requester = new common_internal.ApiRequester(client, rootUrl, servicePath ); 51 _requester = new commons.ApiRequester(client, rootUrl, servicePath, USER_A GENT);
47 } 52 }
48 53
49 54
50 /** Not documented yet. */
51 class DatasetsResourceApi { 55 class DatasetsResourceApi {
52 final common_internal.ApiRequester _requester; 56 final commons.ApiRequester _requester;
53 57
54 DatasetsResourceApi(common_internal.ApiRequester client) : 58 DatasetsResourceApi(commons.ApiRequester client) :
55 _requester = client; 59 _requester = client;
56 60
57 /** 61 /**
58 * Deletes the dataset specified by the datasetId value. Before you can delete 62 * Deletes the dataset specified by the datasetId value. Before you can delete
59 * a dataset, you must delete all its tables, either manually or by specifying 63 * a dataset, you must delete all its tables, either manually or by specifying
60 * deleteContents. Immediately after deletion, you can create another dataset 64 * deleteContents. Immediately after deletion, you can create another dataset
61 * with the same name. 65 * with the same name.
62 * 66 *
63 * Request parameters: 67 * Request parameters:
64 * 68 *
65 * [projectId] - Project ID of the dataset being deleted 69 * [projectId] - Project ID of the dataset being deleted
66 * 70 *
67 * [datasetId] - Dataset ID of dataset being deleted 71 * [datasetId] - Dataset ID of dataset being deleted
68 * 72 *
69 * [deleteContents] - If True, delete all the tables in the dataset. If False 73 * [deleteContents] - If True, delete all the tables in the dataset. If False
70 * and the dataset contains tables, the request will fail. Default is False 74 * and the dataset contains tables, the request will fail. Default is False
71 * 75 *
72 * Completes with a [common.ApiRequestError] if the API endpoint returned an 76 * Completes with a [commons.ApiRequestError] if the API endpoint returned an
73 * error. 77 * error.
74 * 78 *
75 * If the used [http.Client] completes with an error when making a REST call, 79 * If the used [http.Client] completes with an error when making a REST call,
76 * this method will complete with the same error. 80 * this method will complete with the same error.
77 */ 81 */
78 async.Future delete(core.String projectId, core.String datasetId, {core.bool d eleteContents}) { 82 async.Future delete(core.String projectId, core.String datasetId, {core.bool d eleteContents}) {
79 var _url = null; 83 var _url = null;
80 var _queryParams = new core.Map(); 84 var _queryParams = new core.Map();
81 var _uploadMedia = null; 85 var _uploadMedia = null;
82 var _uploadOptions = null; 86 var _uploadOptions = null;
83 var _downloadOptions = common.DownloadOptions.Metadata; 87 var _downloadOptions = commons.DownloadOptions.Metadata;
84 var _body = null; 88 var _body = null;
85 89
86 if (projectId == null) { 90 if (projectId == null) {
87 throw new core.ArgumentError("Parameter projectId is required."); 91 throw new core.ArgumentError("Parameter projectId is required.");
88 } 92 }
89 if (datasetId == null) { 93 if (datasetId == null) {
90 throw new core.ArgumentError("Parameter datasetId is required."); 94 throw new core.ArgumentError("Parameter datasetId is required.");
91 } 95 }
92 if (deleteContents != null) { 96 if (deleteContents != null) {
93 _queryParams["deleteContents"] = ["${deleteContents}"]; 97 _queryParams["deleteContents"] = ["${deleteContents}"];
94 } 98 }
95 99
96 _downloadOptions = null; 100 _downloadOptions = null;
97 101
98 _url = 'projects/' + common_internal.Escaper.ecapeVariable('$projectId') + ' /datasets/' + common_internal.Escaper.ecapeVariable('$datasetId'); 102 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/dataset s/' + commons.Escaper.ecapeVariable('$datasetId');
99 103
100 var _response = _requester.request(_url, 104 var _response = _requester.request(_url,
101 "DELETE", 105 "DELETE",
102 body: _body, 106 body: _body,
103 queryParams: _queryParams, 107 queryParams: _queryParams,
104 uploadOptions: _uploadOptions, 108 uploadOptions: _uploadOptions,
105 uploadMedia: _uploadMedia, 109 uploadMedia: _uploadMedia,
106 downloadOptions: _downloadOptions); 110 downloadOptions: _downloadOptions);
107 return _response.then((data) => null); 111 return _response.then((data) => null);
108 } 112 }
109 113
110 /** 114 /**
111 * Returns the dataset specified by datasetID. 115 * Returns the dataset specified by datasetID.
112 * 116 *
113 * Request parameters: 117 * Request parameters:
114 * 118 *
115 * [projectId] - Project ID of the requested dataset 119 * [projectId] - Project ID of the requested dataset
116 * 120 *
117 * [datasetId] - Dataset ID of the requested dataset 121 * [datasetId] - Dataset ID of the requested dataset
118 * 122 *
119 * Completes with a [Dataset]. 123 * Completes with a [Dataset].
120 * 124 *
121 * Completes with a [common.ApiRequestError] if the API endpoint returned an 125 * Completes with a [commons.ApiRequestError] if the API endpoint returned an
122 * error. 126 * error.
123 * 127 *
124 * If the used [http.Client] completes with an error when making a REST call, 128 * If the used [http.Client] completes with an error when making a REST call,
125 * this method will complete with the same error. 129 * this method will complete with the same error.
126 */ 130 */
127 async.Future<Dataset> get(core.String projectId, core.String datasetId) { 131 async.Future<Dataset> get(core.String projectId, core.String datasetId) {
128 var _url = null; 132 var _url = null;
129 var _queryParams = new core.Map(); 133 var _queryParams = new core.Map();
130 var _uploadMedia = null; 134 var _uploadMedia = null;
131 var _uploadOptions = null; 135 var _uploadOptions = null;
132 var _downloadOptions = common.DownloadOptions.Metadata; 136 var _downloadOptions = commons.DownloadOptions.Metadata;
133 var _body = null; 137 var _body = null;
134 138
135 if (projectId == null) { 139 if (projectId == null) {
136 throw new core.ArgumentError("Parameter projectId is required."); 140 throw new core.ArgumentError("Parameter projectId is required.");
137 } 141 }
138 if (datasetId == null) { 142 if (datasetId == null) {
139 throw new core.ArgumentError("Parameter datasetId is required."); 143 throw new core.ArgumentError("Parameter datasetId is required.");
140 } 144 }
141 145
142 146
143 _url = 'projects/' + common_internal.Escaper.ecapeVariable('$projectId') + ' /datasets/' + common_internal.Escaper.ecapeVariable('$datasetId'); 147 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/dataset s/' + commons.Escaper.ecapeVariable('$datasetId');
144 148
145 var _response = _requester.request(_url, 149 var _response = _requester.request(_url,
146 "GET", 150 "GET",
147 body: _body, 151 body: _body,
148 queryParams: _queryParams, 152 queryParams: _queryParams,
149 uploadOptions: _uploadOptions, 153 uploadOptions: _uploadOptions,
150 uploadMedia: _uploadMedia, 154 uploadMedia: _uploadMedia,
151 downloadOptions: _downloadOptions); 155 downloadOptions: _downloadOptions);
152 return _response.then((data) => new Dataset.fromJson(data)); 156 return _response.then((data) => new Dataset.fromJson(data));
153 } 157 }
154 158
155 /** 159 /**
156 * Creates a new empty dataset. 160 * Creates a new empty dataset.
157 * 161 *
158 * [request] - The metadata request object. 162 * [request] - The metadata request object.
159 * 163 *
160 * Request parameters: 164 * Request parameters:
161 * 165 *
162 * [projectId] - Project ID of the new dataset 166 * [projectId] - Project ID of the new dataset
163 * 167 *
164 * Completes with a [Dataset]. 168 * Completes with a [Dataset].
165 * 169 *
166 * Completes with a [common.ApiRequestError] if the API endpoint returned an 170 * Completes with a [commons.ApiRequestError] if the API endpoint returned an
167 * error. 171 * error.
168 * 172 *
169 * If the used [http.Client] completes with an error when making a REST call, 173 * If the used [http.Client] completes with an error when making a REST call,
170 * this method will complete with the same error. 174 * this method will complete with the same error.
171 */ 175 */
172 async.Future<Dataset> insert(Dataset request, core.String projectId) { 176 async.Future<Dataset> insert(Dataset request, core.String projectId) {
173 var _url = null; 177 var _url = null;
174 var _queryParams = new core.Map(); 178 var _queryParams = new core.Map();
175 var _uploadMedia = null; 179 var _uploadMedia = null;
176 var _uploadOptions = null; 180 var _uploadOptions = null;
177 var _downloadOptions = common.DownloadOptions.Metadata; 181 var _downloadOptions = commons.DownloadOptions.Metadata;
178 var _body = null; 182 var _body = null;
179 183
180 if (request != null) { 184 if (request != null) {
181 _body = convert.JSON.encode((request).toJson()); 185 _body = convert.JSON.encode((request).toJson());
182 } 186 }
183 if (projectId == null) { 187 if (projectId == null) {
184 throw new core.ArgumentError("Parameter projectId is required."); 188 throw new core.ArgumentError("Parameter projectId is required.");
185 } 189 }
186 190
187 191
188 _url = 'projects/' + common_internal.Escaper.ecapeVariable('$projectId') + ' /datasets'; 192 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/dataset s';
189 193
190 var _response = _requester.request(_url, 194 var _response = _requester.request(_url,
191 "POST", 195 "POST",
192 body: _body, 196 body: _body,
193 queryParams: _queryParams, 197 queryParams: _queryParams,
194 uploadOptions: _uploadOptions, 198 uploadOptions: _uploadOptions,
195 uploadMedia: _uploadMedia, 199 uploadMedia: _uploadMedia,
196 downloadOptions: _downloadOptions); 200 downloadOptions: _downloadOptions);
197 return _response.then((data) => new Dataset.fromJson(data)); 201 return _response.then((data) => new Dataset.fromJson(data));
198 } 202 }
199 203
200 /** 204 /**
201 * Lists all the datasets in the specified project to which the caller has 205 * Lists all the datasets in the specified project to which the caller has
202 * read access; however, a project owner can list (but not necessarily get) 206 * read access; however, a project owner can list (but not necessarily get)
203 * all datasets in his project. 207 * all datasets in his project.
204 * 208 *
205 * Request parameters: 209 * Request parameters:
206 * 210 *
207 * [projectId] - Project ID of the datasets to be listed 211 * [projectId] - Project ID of the datasets to be listed
208 * 212 *
209 * [all] - Whether to list all datasets, including hidden ones 213 * [all] - Whether to list all datasets, including hidden ones
210 * 214 *
211 * [maxResults] - The maximum number of results to return 215 * [maxResults] - The maximum number of results to return
212 * 216 *
213 * [pageToken] - Page token, returned by a previous call, to request the next 217 * [pageToken] - Page token, returned by a previous call, to request the next
214 * page of results 218 * page of results
215 * 219 *
216 * Completes with a [DatasetList]. 220 * Completes with a [DatasetList].
217 * 221 *
218 * Completes with a [common.ApiRequestError] if the API endpoint returned an 222 * Completes with a [commons.ApiRequestError] if the API endpoint returned an
219 * error. 223 * error.
220 * 224 *
221 * If the used [http.Client] completes with an error when making a REST call, 225 * If the used [http.Client] completes with an error when making a REST call,
222 * this method will complete with the same error. 226 * this method will complete with the same error.
223 */ 227 */
224 async.Future<DatasetList> list(core.String projectId, {core.bool all, core.int maxResults, core.String pageToken}) { 228 async.Future<DatasetList> list(core.String projectId, {core.bool all, core.int maxResults, core.String pageToken}) {
225 var _url = null; 229 var _url = null;
226 var _queryParams = new core.Map(); 230 var _queryParams = new core.Map();
227 var _uploadMedia = null; 231 var _uploadMedia = null;
228 var _uploadOptions = null; 232 var _uploadOptions = null;
229 var _downloadOptions = common.DownloadOptions.Metadata; 233 var _downloadOptions = commons.DownloadOptions.Metadata;
230 var _body = null; 234 var _body = null;
231 235
232 if (projectId == null) { 236 if (projectId == null) {
233 throw new core.ArgumentError("Parameter projectId is required."); 237 throw new core.ArgumentError("Parameter projectId is required.");
234 } 238 }
235 if (all != null) { 239 if (all != null) {
236 _queryParams["all"] = ["${all}"]; 240 _queryParams["all"] = ["${all}"];
237 } 241 }
238 if (maxResults != null) { 242 if (maxResults != null) {
239 _queryParams["maxResults"] = ["${maxResults}"]; 243 _queryParams["maxResults"] = ["${maxResults}"];
240 } 244 }
241 if (pageToken != null) { 245 if (pageToken != null) {
242 _queryParams["pageToken"] = [pageToken]; 246 _queryParams["pageToken"] = [pageToken];
243 } 247 }
244 248
245 249
246 _url = 'projects/' + common_internal.Escaper.ecapeVariable('$projectId') + ' /datasets'; 250 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/dataset s';
247 251
248 var _response = _requester.request(_url, 252 var _response = _requester.request(_url,
249 "GET", 253 "GET",
250 body: _body, 254 body: _body,
251 queryParams: _queryParams, 255 queryParams: _queryParams,
252 uploadOptions: _uploadOptions, 256 uploadOptions: _uploadOptions,
253 uploadMedia: _uploadMedia, 257 uploadMedia: _uploadMedia,
254 downloadOptions: _downloadOptions); 258 downloadOptions: _downloadOptions);
255 return _response.then((data) => new DatasetList.fromJson(data)); 259 return _response.then((data) => new DatasetList.fromJson(data));
256 } 260 }
257 261
258 /** 262 /**
259 * Updates information in an existing dataset. The update method replaces the 263 * Updates information in an existing dataset. The update method replaces the
260 * entire dataset resource, whereas the patch method only replaces fields that 264 * entire dataset resource, whereas the patch method only replaces fields that
261 * are provided in the submitted dataset resource. This method supports patch 265 * are provided in the submitted dataset resource. This method supports patch
262 * semantics. 266 * semantics.
263 * 267 *
264 * [request] - The metadata request object. 268 * [request] - The metadata request object.
265 * 269 *
266 * Request parameters: 270 * Request parameters:
267 * 271 *
268 * [projectId] - Project ID of the dataset being updated 272 * [projectId] - Project ID of the dataset being updated
269 * 273 *
270 * [datasetId] - Dataset ID of the dataset being updated 274 * [datasetId] - Dataset ID of the dataset being updated
271 * 275 *
272 * Completes with a [Dataset]. 276 * Completes with a [Dataset].
273 * 277 *
274 * Completes with a [common.ApiRequestError] if the API endpoint returned an 278 * Completes with a [commons.ApiRequestError] if the API endpoint returned an
275 * error. 279 * error.
276 * 280 *
277 * If the used [http.Client] completes with an error when making a REST call, 281 * If the used [http.Client] completes with an error when making a REST call,
278 * this method will complete with the same error. 282 * this method will complete with the same error.
279 */ 283 */
280 async.Future<Dataset> patch(Dataset request, core.String projectId, core.Strin g datasetId) { 284 async.Future<Dataset> patch(Dataset request, core.String projectId, core.Strin g datasetId) {
281 var _url = null; 285 var _url = null;
282 var _queryParams = new core.Map(); 286 var _queryParams = new core.Map();
283 var _uploadMedia = null; 287 var _uploadMedia = null;
284 var _uploadOptions = null; 288 var _uploadOptions = null;
285 var _downloadOptions = common.DownloadOptions.Metadata; 289 var _downloadOptions = commons.DownloadOptions.Metadata;
286 var _body = null; 290 var _body = null;
287 291
288 if (request != null) { 292 if (request != null) {
289 _body = convert.JSON.encode((request).toJson()); 293 _body = convert.JSON.encode((request).toJson());
290 } 294 }
291 if (projectId == null) { 295 if (projectId == null) {
292 throw new core.ArgumentError("Parameter projectId is required."); 296 throw new core.ArgumentError("Parameter projectId is required.");
293 } 297 }
294 if (datasetId == null) { 298 if (datasetId == null) {
295 throw new core.ArgumentError("Parameter datasetId is required."); 299 throw new core.ArgumentError("Parameter datasetId is required.");
296 } 300 }
297 301
298 302
299 _url = 'projects/' + common_internal.Escaper.ecapeVariable('$projectId') + ' /datasets/' + common_internal.Escaper.ecapeVariable('$datasetId'); 303 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/dataset s/' + commons.Escaper.ecapeVariable('$datasetId');
300 304
301 var _response = _requester.request(_url, 305 var _response = _requester.request(_url,
302 "PATCH", 306 "PATCH",
303 body: _body, 307 body: _body,
304 queryParams: _queryParams, 308 queryParams: _queryParams,
305 uploadOptions: _uploadOptions, 309 uploadOptions: _uploadOptions,
306 uploadMedia: _uploadMedia, 310 uploadMedia: _uploadMedia,
307 downloadOptions: _downloadOptions); 311 downloadOptions: _downloadOptions);
308 return _response.then((data) => new Dataset.fromJson(data)); 312 return _response.then((data) => new Dataset.fromJson(data));
309 } 313 }
310 314
311 /** 315 /**
312 * Updates information in an existing dataset. The update method replaces the 316 * Updates information in an existing dataset. The update method replaces the
313 * entire dataset resource, whereas the patch method only replaces fields that 317 * entire dataset resource, whereas the patch method only replaces fields that
314 * are provided in the submitted dataset resource. 318 * are provided in the submitted dataset resource.
315 * 319 *
316 * [request] - The metadata request object. 320 * [request] - The metadata request object.
317 * 321 *
318 * Request parameters: 322 * Request parameters:
319 * 323 *
320 * [projectId] - Project ID of the dataset being updated 324 * [projectId] - Project ID of the dataset being updated
321 * 325 *
322 * [datasetId] - Dataset ID of the dataset being updated 326 * [datasetId] - Dataset ID of the dataset being updated
323 * 327 *
324 * Completes with a [Dataset]. 328 * Completes with a [Dataset].
325 * 329 *
326 * Completes with a [common.ApiRequestError] if the API endpoint returned an 330 * Completes with a [commons.ApiRequestError] if the API endpoint returned an
327 * error. 331 * error.
328 * 332 *
329 * If the used [http.Client] completes with an error when making a REST call, 333 * If the used [http.Client] completes with an error when making a REST call,
330 * this method will complete with the same error. 334 * this method will complete with the same error.
331 */ 335 */
332 async.Future<Dataset> update(Dataset request, core.String projectId, core.Stri ng datasetId) { 336 async.Future<Dataset> update(Dataset request, core.String projectId, core.Stri ng datasetId) {
333 var _url = null; 337 var _url = null;
334 var _queryParams = new core.Map(); 338 var _queryParams = new core.Map();
335 var _uploadMedia = null; 339 var _uploadMedia = null;
336 var _uploadOptions = null; 340 var _uploadOptions = null;
337 var _downloadOptions = common.DownloadOptions.Metadata; 341 var _downloadOptions = commons.DownloadOptions.Metadata;
338 var _body = null; 342 var _body = null;
339 343
340 if (request != null) { 344 if (request != null) {
341 _body = convert.JSON.encode((request).toJson()); 345 _body = convert.JSON.encode((request).toJson());
342 } 346 }
343 if (projectId == null) { 347 if (projectId == null) {
344 throw new core.ArgumentError("Parameter projectId is required."); 348 throw new core.ArgumentError("Parameter projectId is required.");
345 } 349 }
346 if (datasetId == null) { 350 if (datasetId == null) {
347 throw new core.ArgumentError("Parameter datasetId is required."); 351 throw new core.ArgumentError("Parameter datasetId is required.");
348 } 352 }
349 353
350 354
351 _url = 'projects/' + common_internal.Escaper.ecapeVariable('$projectId') + ' /datasets/' + common_internal.Escaper.ecapeVariable('$datasetId'); 355 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/dataset s/' + commons.Escaper.ecapeVariable('$datasetId');
352 356
353 var _response = _requester.request(_url, 357 var _response = _requester.request(_url,
354 "PUT", 358 "PUT",
355 body: _body, 359 body: _body,
356 queryParams: _queryParams, 360 queryParams: _queryParams,
357 uploadOptions: _uploadOptions, 361 uploadOptions: _uploadOptions,
358 uploadMedia: _uploadMedia, 362 uploadMedia: _uploadMedia,
359 downloadOptions: _downloadOptions); 363 downloadOptions: _downloadOptions);
360 return _response.then((data) => new Dataset.fromJson(data)); 364 return _response.then((data) => new Dataset.fromJson(data));
361 } 365 }
362 366
363 } 367 }
364 368
365 369
366 /** Not documented yet. */
367 class JobsResourceApi { 370 class JobsResourceApi {
368 final common_internal.ApiRequester _requester; 371 final commons.ApiRequester _requester;
369 372
370 JobsResourceApi(common_internal.ApiRequester client) : 373 JobsResourceApi(commons.ApiRequester client) :
371 _requester = client; 374 _requester = client;
372 375
373 /** 376 /**
374 * Retrieves the specified job by ID. 377 * Retrieves the specified job by ID.
375 * 378 *
376 * Request parameters: 379 * Request parameters:
377 * 380 *
378 * [projectId] - Project ID of the requested job 381 * [projectId] - Project ID of the requested job
379 * 382 *
380 * [jobId] - Job ID of the requested job 383 * [jobId] - Job ID of the requested job
381 * 384 *
382 * Completes with a [Job]. 385 * Completes with a [Job].
383 * 386 *
384 * Completes with a [common.ApiRequestError] if the API endpoint returned an 387 * Completes with a [commons.ApiRequestError] if the API endpoint returned an
385 * error. 388 * error.
386 * 389 *
387 * If the used [http.Client] completes with an error when making a REST call, 390 * If the used [http.Client] completes with an error when making a REST call,
388 * this method will complete with the same error. 391 * this method will complete with the same error.
389 */ 392 */
390 async.Future<Job> get(core.String projectId, core.String jobId) { 393 async.Future<Job> get(core.String projectId, core.String jobId) {
391 var _url = null; 394 var _url = null;
392 var _queryParams = new core.Map(); 395 var _queryParams = new core.Map();
393 var _uploadMedia = null; 396 var _uploadMedia = null;
394 var _uploadOptions = null; 397 var _uploadOptions = null;
395 var _downloadOptions = common.DownloadOptions.Metadata; 398 var _downloadOptions = commons.DownloadOptions.Metadata;
396 var _body = null; 399 var _body = null;
397 400
398 if (projectId == null) { 401 if (projectId == null) {
399 throw new core.ArgumentError("Parameter projectId is required."); 402 throw new core.ArgumentError("Parameter projectId is required.");
400 } 403 }
401 if (jobId == null) { 404 if (jobId == null) {
402 throw new core.ArgumentError("Parameter jobId is required."); 405 throw new core.ArgumentError("Parameter jobId is required.");
403 } 406 }
404 407
405 408
406 _url = 'projects/' + common_internal.Escaper.ecapeVariable('$projectId') + ' /jobs/' + common_internal.Escaper.ecapeVariable('$jobId'); 409 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/jobs/' + commons.Escaper.ecapeVariable('$jobId');
407 410
408 var _response = _requester.request(_url, 411 var _response = _requester.request(_url,
409 "GET", 412 "GET",
410 body: _body, 413 body: _body,
411 queryParams: _queryParams, 414 queryParams: _queryParams,
412 uploadOptions: _uploadOptions, 415 uploadOptions: _uploadOptions,
413 uploadMedia: _uploadMedia, 416 uploadMedia: _uploadMedia,
414 downloadOptions: _downloadOptions); 417 downloadOptions: _downloadOptions);
415 return _response.then((data) => new Job.fromJson(data)); 418 return _response.then((data) => new Job.fromJson(data));
416 } 419 }
(...skipping 13 matching lines...) Expand all
430 * page of results 433 * page of results
431 * 434 *
432 * [startIndex] - Zero-based index of the starting row 435 * [startIndex] - Zero-based index of the starting row
433 * 436 *
434 * [timeoutMs] - How long to wait for the query to complete, in milliseconds, 437 * [timeoutMs] - How long to wait for the query to complete, in milliseconds,
435 * before returning. Default is to return immediately. If the timeout passes 438 * before returning. Default is to return immediately. If the timeout passes
436 * before the job completes, the request will fail with a TIMEOUT error 439 * before the job completes, the request will fail with a TIMEOUT error
437 * 440 *
438 * Completes with a [GetQueryResultsResponse]. 441 * Completes with a [GetQueryResultsResponse].
439 * 442 *
440 * Completes with a [common.ApiRequestError] if the API endpoint returned an 443 * Completes with a [commons.ApiRequestError] if the API endpoint returned an
441 * error. 444 * error.
442 * 445 *
443 * If the used [http.Client] completes with an error when making a REST call, 446 * If the used [http.Client] completes with an error when making a REST call,
444 * this method will complete with the same error. 447 * this method will complete with the same error.
445 */ 448 */
446 async.Future<GetQueryResultsResponse> getQueryResults(core.String projectId, c ore.String jobId, {core.int maxResults, core.String pageToken, core.String start Index, core.int timeoutMs}) { 449 async.Future<GetQueryResultsResponse> getQueryResults(core.String projectId, c ore.String jobId, {core.int maxResults, core.String pageToken, core.String start Index, core.int timeoutMs}) {
447 var _url = null; 450 var _url = null;
448 var _queryParams = new core.Map(); 451 var _queryParams = new core.Map();
449 var _uploadMedia = null; 452 var _uploadMedia = null;
450 var _uploadOptions = null; 453 var _uploadOptions = null;
451 var _downloadOptions = common.DownloadOptions.Metadata; 454 var _downloadOptions = commons.DownloadOptions.Metadata;
452 var _body = null; 455 var _body = null;
453 456
454 if (projectId == null) { 457 if (projectId == null) {
455 throw new core.ArgumentError("Parameter projectId is required."); 458 throw new core.ArgumentError("Parameter projectId is required.");
456 } 459 }
457 if (jobId == null) { 460 if (jobId == null) {
458 throw new core.ArgumentError("Parameter jobId is required."); 461 throw new core.ArgumentError("Parameter jobId is required.");
459 } 462 }
460 if (maxResults != null) { 463 if (maxResults != null) {
461 _queryParams["maxResults"] = ["${maxResults}"]; 464 _queryParams["maxResults"] = ["${maxResults}"];
462 } 465 }
463 if (pageToken != null) { 466 if (pageToken != null) {
464 _queryParams["pageToken"] = [pageToken]; 467 _queryParams["pageToken"] = [pageToken];
465 } 468 }
466 if (startIndex != null) { 469 if (startIndex != null) {
467 _queryParams["startIndex"] = [startIndex]; 470 _queryParams["startIndex"] = [startIndex];
468 } 471 }
469 if (timeoutMs != null) { 472 if (timeoutMs != null) {
470 _queryParams["timeoutMs"] = ["${timeoutMs}"]; 473 _queryParams["timeoutMs"] = ["${timeoutMs}"];
471 } 474 }
472 475
473 476
474 _url = 'projects/' + common_internal.Escaper.ecapeVariable('$projectId') + ' /queries/' + common_internal.Escaper.ecapeVariable('$jobId'); 477 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/queries /' + commons.Escaper.ecapeVariable('$jobId');
475 478
476 var _response = _requester.request(_url, 479 var _response = _requester.request(_url,
477 "GET", 480 "GET",
478 body: _body, 481 body: _body,
479 queryParams: _queryParams, 482 queryParams: _queryParams,
480 uploadOptions: _uploadOptions, 483 uploadOptions: _uploadOptions,
481 uploadMedia: _uploadMedia, 484 uploadMedia: _uploadMedia,
482 downloadOptions: _downloadOptions); 485 downloadOptions: _downloadOptions);
483 return _response.then((data) => new GetQueryResultsResponse.fromJson(data)); 486 return _response.then((data) => new GetQueryResultsResponse.fromJson(data));
484 } 487 }
485 488
486 /** 489 /**
487 * Starts a new asynchronous job. 490 * Starts a new asynchronous job.
488 * 491 *
489 * [request] - The metadata request object. 492 * [request] - The metadata request object.
490 * 493 *
491 * Request parameters: 494 * Request parameters:
492 * 495 *
493 * [projectId] - Project ID of the project that will be billed for the job 496 * [projectId] - Project ID of the project that will be billed for the job
494 * 497 *
495 * [uploadMedia] - The media to upload. 498 * [uploadMedia] - The media to upload.
496 * 499 *
497 * [uploadOptions] - Options for the media upload. Streaming Media without the 500 * [uploadOptions] - Options for the media upload. Streaming Media without the
498 * length being known ahead of time is only supported via resumable uploads. 501 * length being known ahead of time is only supported via resumable uploads.
499 * 502 *
500 * Completes with a [Job]. 503 * Completes with a [Job].
501 * 504 *
502 * Completes with a [common.ApiRequestError] if the API endpoint returned an 505 * Completes with a [commons.ApiRequestError] if the API endpoint returned an
503 * error. 506 * error.
504 * 507 *
505 * If the used [http.Client] completes with an error when making a REST call, 508 * If the used [http.Client] completes with an error when making a REST call,
506 * this method will complete with the same error. 509 * this method will complete with the same error.
507 */ 510 */
508 async.Future<Job> insert(Job request, core.String projectId, {common.UploadOpt ions uploadOptions : common.UploadOptions.Default, common.Media uploadMedia}) { 511 async.Future<Job> insert(Job request, core.String projectId, {commons.UploadOp tions uploadOptions : commons.UploadOptions.Default, commons.Media uploadMedia}) {
509 var _url = null; 512 var _url = null;
510 var _queryParams = new core.Map(); 513 var _queryParams = new core.Map();
511 var _uploadMedia = null; 514 var _uploadMedia = null;
512 var _uploadOptions = null; 515 var _uploadOptions = null;
513 var _downloadOptions = common.DownloadOptions.Metadata; 516 var _downloadOptions = commons.DownloadOptions.Metadata;
514 var _body = null; 517 var _body = null;
515 518
516 if (request != null) { 519 if (request != null) {
517 _body = convert.JSON.encode((request).toJson()); 520 _body = convert.JSON.encode((request).toJson());
518 } 521 }
519 if (projectId == null) { 522 if (projectId == null) {
520 throw new core.ArgumentError("Parameter projectId is required."); 523 throw new core.ArgumentError("Parameter projectId is required.");
521 } 524 }
522 525
523 _uploadMedia = uploadMedia; 526 _uploadMedia = uploadMedia;
524 _uploadOptions = uploadOptions; 527 _uploadOptions = uploadOptions;
525 528
526 if (_uploadMedia == null) { 529 if (_uploadMedia == null) {
527 _url = 'projects/' + common_internal.Escaper.ecapeVariable('$projectId') + '/jobs'; 530 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/jobs' ;
528 } else if (_uploadOptions is common.ResumableUploadOptions) { 531 } else if (_uploadOptions is commons.ResumableUploadOptions) {
529 _url = '/resumable/upload/bigquery/v2/projects/' + common_internal.Escaper .ecapeVariable('$projectId') + '/jobs'; 532 _url = '/resumable/upload/bigquery/v2/projects/' + commons.Escaper.ecapeVa riable('$projectId') + '/jobs';
530 } else { 533 } else {
531 _url = '/upload/bigquery/v2/projects/' + common_internal.Escaper.ecapeVari able('$projectId') + '/jobs'; 534 _url = '/upload/bigquery/v2/projects/' + commons.Escaper.ecapeVariable('$p rojectId') + '/jobs';
532 } 535 }
533 536
534 537
535 var _response = _requester.request(_url, 538 var _response = _requester.request(_url,
536 "POST", 539 "POST",
537 body: _body, 540 body: _body,
538 queryParams: _queryParams, 541 queryParams: _queryParams,
539 uploadOptions: _uploadOptions, 542 uploadOptions: _uploadOptions,
540 uploadMedia: _uploadMedia, 543 uploadMedia: _uploadMedia,
541 downloadOptions: _downloadOptions); 544 downloadOptions: _downloadOptions);
(...skipping 19 matching lines...) Expand all
561 * 564 *
562 * [projection] - Restrict information returned to a set of selected fields 565 * [projection] - Restrict information returned to a set of selected fields
563 * Possible string values are: 566 * Possible string values are:
564 * - "full" : Includes all job data 567 * - "full" : Includes all job data
565 * - "minimal" : Does not include the job configuration 568 * - "minimal" : Does not include the job configuration
566 * 569 *
567 * [stateFilter] - Filter for job state 570 * [stateFilter] - Filter for job state
568 * 571 *
569 * Completes with a [JobList]. 572 * Completes with a [JobList].
570 * 573 *
571 * Completes with a [common.ApiRequestError] if the API endpoint returned an 574 * Completes with a [commons.ApiRequestError] if the API endpoint returned an
572 * error. 575 * error.
573 * 576 *
574 * If the used [http.Client] completes with an error when making a REST call, 577 * If the used [http.Client] completes with an error when making a REST call,
575 * this method will complete with the same error. 578 * this method will complete with the same error.
576 */ 579 */
577 async.Future<JobList> list(core.String projectId, {core.bool allUsers, core.in t maxResults, core.String pageToken, core.String projection, core.List<core.Stri ng> stateFilter}) { 580 async.Future<JobList> list(core.String projectId, {core.bool allUsers, core.in t maxResults, core.String pageToken, core.String projection, core.List<core.Stri ng> stateFilter}) {
578 var _url = null; 581 var _url = null;
579 var _queryParams = new core.Map(); 582 var _queryParams = new core.Map();
580 var _uploadMedia = null; 583 var _uploadMedia = null;
581 var _uploadOptions = null; 584 var _uploadOptions = null;
582 var _downloadOptions = common.DownloadOptions.Metadata; 585 var _downloadOptions = commons.DownloadOptions.Metadata;
583 var _body = null; 586 var _body = null;
584 587
585 if (projectId == null) { 588 if (projectId == null) {
586 throw new core.ArgumentError("Parameter projectId is required."); 589 throw new core.ArgumentError("Parameter projectId is required.");
587 } 590 }
588 if (allUsers != null) { 591 if (allUsers != null) {
589 _queryParams["allUsers"] = ["${allUsers}"]; 592 _queryParams["allUsers"] = ["${allUsers}"];
590 } 593 }
591 if (maxResults != null) { 594 if (maxResults != null) {
592 _queryParams["maxResults"] = ["${maxResults}"]; 595 _queryParams["maxResults"] = ["${maxResults}"];
593 } 596 }
594 if (pageToken != null) { 597 if (pageToken != null) {
595 _queryParams["pageToken"] = [pageToken]; 598 _queryParams["pageToken"] = [pageToken];
596 } 599 }
597 if (projection != null) { 600 if (projection != null) {
598 _queryParams["projection"] = [projection]; 601 _queryParams["projection"] = [projection];
599 } 602 }
600 if (stateFilter != null) { 603 if (stateFilter != null) {
601 _queryParams["stateFilter"] = stateFilter; 604 _queryParams["stateFilter"] = stateFilter;
602 } 605 }
603 606
604 607
605 _url = 'projects/' + common_internal.Escaper.ecapeVariable('$projectId') + ' /jobs'; 608 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/jobs';
606 609
607 var _response = _requester.request(_url, 610 var _response = _requester.request(_url,
608 "GET", 611 "GET",
609 body: _body, 612 body: _body,
610 queryParams: _queryParams, 613 queryParams: _queryParams,
611 uploadOptions: _uploadOptions, 614 uploadOptions: _uploadOptions,
612 uploadMedia: _uploadMedia, 615 uploadMedia: _uploadMedia,
613 downloadOptions: _downloadOptions); 616 downloadOptions: _downloadOptions);
614 return _response.then((data) => new JobList.fromJson(data)); 617 return _response.then((data) => new JobList.fromJson(data));
615 } 618 }
616 619
617 /** 620 /**
618 * Runs a BigQuery SQL query synchronously and returns query results if the 621 * Runs a BigQuery SQL query synchronously and returns query results if the
619 * query completes within a specified timeout. 622 * query completes within a specified timeout.
620 * 623 *
621 * [request] - The metadata request object. 624 * [request] - The metadata request object.
622 * 625 *
623 * Request parameters: 626 * Request parameters:
624 * 627 *
625 * [projectId] - Project ID of the project billed for the query 628 * [projectId] - Project ID of the project billed for the query
626 * 629 *
627 * Completes with a [QueryResponse]. 630 * Completes with a [QueryResponse].
628 * 631 *
629 * Completes with a [common.ApiRequestError] if the API endpoint returned an 632 * Completes with a [commons.ApiRequestError] if the API endpoint returned an
630 * error. 633 * error.
631 * 634 *
632 * If the used [http.Client] completes with an error when making a REST call, 635 * If the used [http.Client] completes with an error when making a REST call,
633 * this method will complete with the same error. 636 * this method will complete with the same error.
634 */ 637 */
635 async.Future<QueryResponse> query(QueryRequest request, core.String projectId) { 638 async.Future<QueryResponse> query(QueryRequest request, core.String projectId) {
636 var _url = null; 639 var _url = null;
637 var _queryParams = new core.Map(); 640 var _queryParams = new core.Map();
638 var _uploadMedia = null; 641 var _uploadMedia = null;
639 var _uploadOptions = null; 642 var _uploadOptions = null;
640 var _downloadOptions = common.DownloadOptions.Metadata; 643 var _downloadOptions = commons.DownloadOptions.Metadata;
641 var _body = null; 644 var _body = null;
642 645
643 if (request != null) { 646 if (request != null) {
644 _body = convert.JSON.encode((request).toJson()); 647 _body = convert.JSON.encode((request).toJson());
645 } 648 }
646 if (projectId == null) { 649 if (projectId == null) {
647 throw new core.ArgumentError("Parameter projectId is required."); 650 throw new core.ArgumentError("Parameter projectId is required.");
648 } 651 }
649 652
650 653
651 _url = 'projects/' + common_internal.Escaper.ecapeVariable('$projectId') + ' /queries'; 654 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/queries ';
652 655
653 var _response = _requester.request(_url, 656 var _response = _requester.request(_url,
654 "POST", 657 "POST",
655 body: _body, 658 body: _body,
656 queryParams: _queryParams, 659 queryParams: _queryParams,
657 uploadOptions: _uploadOptions, 660 uploadOptions: _uploadOptions,
658 uploadMedia: _uploadMedia, 661 uploadMedia: _uploadMedia,
659 downloadOptions: _downloadOptions); 662 downloadOptions: _downloadOptions);
660 return _response.then((data) => new QueryResponse.fromJson(data)); 663 return _response.then((data) => new QueryResponse.fromJson(data));
661 } 664 }
662 665
663 } 666 }
664 667
665 668
666 /** Not documented yet. */
667 class ProjectsResourceApi { 669 class ProjectsResourceApi {
668 final common_internal.ApiRequester _requester; 670 final commons.ApiRequester _requester;
669 671
670 ProjectsResourceApi(common_internal.ApiRequester client) : 672 ProjectsResourceApi(commons.ApiRequester client) :
671 _requester = client; 673 _requester = client;
672 674
673 /** 675 /**
674 * Lists the projects to which you have at least read access. 676 * Lists the projects to which you have at least read access.
675 * 677 *
676 * Request parameters: 678 * Request parameters:
677 * 679 *
678 * [maxResults] - Maximum number of results to return 680 * [maxResults] - Maximum number of results to return
679 * 681 *
680 * [pageToken] - Page token, returned by a previous call, to request the next 682 * [pageToken] - Page token, returned by a previous call, to request the next
681 * page of results 683 * page of results
682 * 684 *
683 * Completes with a [ProjectList]. 685 * Completes with a [ProjectList].
684 * 686 *
685 * Completes with a [common.ApiRequestError] if the API endpoint returned an 687 * Completes with a [commons.ApiRequestError] if the API endpoint returned an
686 * error. 688 * error.
687 * 689 *
688 * If the used [http.Client] completes with an error when making a REST call, 690 * If the used [http.Client] completes with an error when making a REST call,
689 * this method will complete with the same error. 691 * this method will complete with the same error.
690 */ 692 */
691 async.Future<ProjectList> list({core.int maxResults, core.String pageToken}) { 693 async.Future<ProjectList> list({core.int maxResults, core.String pageToken}) {
692 var _url = null; 694 var _url = null;
693 var _queryParams = new core.Map(); 695 var _queryParams = new core.Map();
694 var _uploadMedia = null; 696 var _uploadMedia = null;
695 var _uploadOptions = null; 697 var _uploadOptions = null;
696 var _downloadOptions = common.DownloadOptions.Metadata; 698 var _downloadOptions = commons.DownloadOptions.Metadata;
697 var _body = null; 699 var _body = null;
698 700
699 if (maxResults != null) { 701 if (maxResults != null) {
700 _queryParams["maxResults"] = ["${maxResults}"]; 702 _queryParams["maxResults"] = ["${maxResults}"];
701 } 703 }
702 if (pageToken != null) { 704 if (pageToken != null) {
703 _queryParams["pageToken"] = [pageToken]; 705 _queryParams["pageToken"] = [pageToken];
704 } 706 }
705 707
706 708
707 _url = 'projects'; 709 _url = 'projects';
708 710
709 var _response = _requester.request(_url, 711 var _response = _requester.request(_url,
710 "GET", 712 "GET",
711 body: _body, 713 body: _body,
712 queryParams: _queryParams, 714 queryParams: _queryParams,
713 uploadOptions: _uploadOptions, 715 uploadOptions: _uploadOptions,
714 uploadMedia: _uploadMedia, 716 uploadMedia: _uploadMedia,
715 downloadOptions: _downloadOptions); 717 downloadOptions: _downloadOptions);
716 return _response.then((data) => new ProjectList.fromJson(data)); 718 return _response.then((data) => new ProjectList.fromJson(data));
717 } 719 }
718 720
719 } 721 }
720 722
721 723
722 /** Not documented yet. */
723 class TabledataResourceApi { 724 class TabledataResourceApi {
724 final common_internal.ApiRequester _requester; 725 final commons.ApiRequester _requester;
725 726
726 TabledataResourceApi(common_internal.ApiRequester client) : 727 TabledataResourceApi(commons.ApiRequester client) :
727 _requester = client; 728 _requester = client;
728 729
729 /** 730 /**
730 * Streams data into BigQuery one record at a time without needing to run a 731 * Streams data into BigQuery one record at a time without needing to run a
731 * load job. 732 * load job.
732 * 733 *
733 * [request] - The metadata request object. 734 * [request] - The metadata request object.
734 * 735 *
735 * Request parameters: 736 * Request parameters:
736 * 737 *
737 * [projectId] - Project ID of the destination table. 738 * [projectId] - Project ID of the destination table.
738 * 739 *
739 * [datasetId] - Dataset ID of the destination table. 740 * [datasetId] - Dataset ID of the destination table.
740 * 741 *
741 * [tableId] - Table ID of the destination table. 742 * [tableId] - Table ID of the destination table.
742 * 743 *
743 * Completes with a [TableDataInsertAllResponse]. 744 * Completes with a [TableDataInsertAllResponse].
744 * 745 *
745 * Completes with a [common.ApiRequestError] if the API endpoint returned an 746 * Completes with a [commons.ApiRequestError] if the API endpoint returned an
746 * error. 747 * error.
747 * 748 *
748 * If the used [http.Client] completes with an error when making a REST call, 749 * If the used [http.Client] completes with an error when making a REST call,
749 * this method will complete with the same error. 750 * this method will complete with the same error.
750 */ 751 */
751 async.Future<TableDataInsertAllResponse> insertAll(TableDataInsertAllRequest r equest, core.String projectId, core.String datasetId, core.String tableId) { 752 async.Future<TableDataInsertAllResponse> insertAll(TableDataInsertAllRequest r equest, core.String projectId, core.String datasetId, core.String tableId) {
752 var _url = null; 753 var _url = null;
753 var _queryParams = new core.Map(); 754 var _queryParams = new core.Map();
754 var _uploadMedia = null; 755 var _uploadMedia = null;
755 var _uploadOptions = null; 756 var _uploadOptions = null;
756 var _downloadOptions = common.DownloadOptions.Metadata; 757 var _downloadOptions = commons.DownloadOptions.Metadata;
757 var _body = null; 758 var _body = null;
758 759
759 if (request != null) { 760 if (request != null) {
760 _body = convert.JSON.encode((request).toJson()); 761 _body = convert.JSON.encode((request).toJson());
761 } 762 }
762 if (projectId == null) { 763 if (projectId == null) {
763 throw new core.ArgumentError("Parameter projectId is required."); 764 throw new core.ArgumentError("Parameter projectId is required.");
764 } 765 }
765 if (datasetId == null) { 766 if (datasetId == null) {
766 throw new core.ArgumentError("Parameter datasetId is required."); 767 throw new core.ArgumentError("Parameter datasetId is required.");
767 } 768 }
768 if (tableId == null) { 769 if (tableId == null) {
769 throw new core.ArgumentError("Parameter tableId is required."); 770 throw new core.ArgumentError("Parameter tableId is required.");
770 } 771 }
771 772
772 773
773 _url = 'projects/' + common_internal.Escaper.ecapeVariable('$projectId') + ' /datasets/' + common_internal.Escaper.ecapeVariable('$datasetId') + '/tables/' + common_internal.Escaper.ecapeVariable('$tableId') + '/insertAll'; 774 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/dataset s/' + commons.Escaper.ecapeVariable('$datasetId') + '/tables/' + commons.Escaper .ecapeVariable('$tableId') + '/insertAll';
774 775
775 var _response = _requester.request(_url, 776 var _response = _requester.request(_url,
776 "POST", 777 "POST",
777 body: _body, 778 body: _body,
778 queryParams: _queryParams, 779 queryParams: _queryParams,
779 uploadOptions: _uploadOptions, 780 uploadOptions: _uploadOptions,
780 uploadMedia: _uploadMedia, 781 uploadMedia: _uploadMedia,
781 downloadOptions: _downloadOptions); 782 downloadOptions: _downloadOptions);
782 return _response.then((data) => new TableDataInsertAllResponse.fromJson(data )); 783 return _response.then((data) => new TableDataInsertAllResponse.fromJson(data ));
783 } 784 }
(...skipping 11 matching lines...) Expand all
795 * 796 *
796 * [maxResults] - Maximum number of results to return 797 * [maxResults] - Maximum number of results to return
797 * 798 *
798 * [pageToken] - Page token, returned by a previous call, identifying the 799 * [pageToken] - Page token, returned by a previous call, identifying the
799 * result set 800 * result set
800 * 801 *
801 * [startIndex] - Zero-based index of the starting row to read 802 * [startIndex] - Zero-based index of the starting row to read
802 * 803 *
803 * Completes with a [TableDataList]. 804 * Completes with a [TableDataList].
804 * 805 *
805 * Completes with a [common.ApiRequestError] if the API endpoint returned an 806 * Completes with a [commons.ApiRequestError] if the API endpoint returned an
806 * error. 807 * error.
807 * 808 *
808 * If the used [http.Client] completes with an error when making a REST call, 809 * If the used [http.Client] completes with an error when making a REST call,
809 * this method will complete with the same error. 810 * this method will complete with the same error.
810 */ 811 */
811 async.Future<TableDataList> list(core.String projectId, core.String datasetId, core.String tableId, {core.int maxResults, core.String pageToken, core.String s tartIndex}) { 812 async.Future<TableDataList> list(core.String projectId, core.String datasetId, core.String tableId, {core.int maxResults, core.String pageToken, core.String s tartIndex}) {
812 var _url = null; 813 var _url = null;
813 var _queryParams = new core.Map(); 814 var _queryParams = new core.Map();
814 var _uploadMedia = null; 815 var _uploadMedia = null;
815 var _uploadOptions = null; 816 var _uploadOptions = null;
816 var _downloadOptions = common.DownloadOptions.Metadata; 817 var _downloadOptions = commons.DownloadOptions.Metadata;
817 var _body = null; 818 var _body = null;
818 819
819 if (projectId == null) { 820 if (projectId == null) {
820 throw new core.ArgumentError("Parameter projectId is required."); 821 throw new core.ArgumentError("Parameter projectId is required.");
821 } 822 }
822 if (datasetId == null) { 823 if (datasetId == null) {
823 throw new core.ArgumentError("Parameter datasetId is required."); 824 throw new core.ArgumentError("Parameter datasetId is required.");
824 } 825 }
825 if (tableId == null) { 826 if (tableId == null) {
826 throw new core.ArgumentError("Parameter tableId is required."); 827 throw new core.ArgumentError("Parameter tableId is required.");
827 } 828 }
828 if (maxResults != null) { 829 if (maxResults != null) {
829 _queryParams["maxResults"] = ["${maxResults}"]; 830 _queryParams["maxResults"] = ["${maxResults}"];
830 } 831 }
831 if (pageToken != null) { 832 if (pageToken != null) {
832 _queryParams["pageToken"] = [pageToken]; 833 _queryParams["pageToken"] = [pageToken];
833 } 834 }
834 if (startIndex != null) { 835 if (startIndex != null) {
835 _queryParams["startIndex"] = [startIndex]; 836 _queryParams["startIndex"] = [startIndex];
836 } 837 }
837 838
838 839
839 _url = 'projects/' + common_internal.Escaper.ecapeVariable('$projectId') + ' /datasets/' + common_internal.Escaper.ecapeVariable('$datasetId') + '/tables/' + common_internal.Escaper.ecapeVariable('$tableId') + '/data'; 840 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/dataset s/' + commons.Escaper.ecapeVariable('$datasetId') + '/tables/' + commons.Escaper .ecapeVariable('$tableId') + '/data';
840 841
841 var _response = _requester.request(_url, 842 var _response = _requester.request(_url,
842 "GET", 843 "GET",
843 body: _body, 844 body: _body,
844 queryParams: _queryParams, 845 queryParams: _queryParams,
845 uploadOptions: _uploadOptions, 846 uploadOptions: _uploadOptions,
846 uploadMedia: _uploadMedia, 847 uploadMedia: _uploadMedia,
847 downloadOptions: _downloadOptions); 848 downloadOptions: _downloadOptions);
848 return _response.then((data) => new TableDataList.fromJson(data)); 849 return _response.then((data) => new TableDataList.fromJson(data));
849 } 850 }
850 851
851 } 852 }
852 853
853 854
854 /** Not documented yet. */
855 class TablesResourceApi { 855 class TablesResourceApi {
856 final common_internal.ApiRequester _requester; 856 final commons.ApiRequester _requester;
857 857
858 TablesResourceApi(common_internal.ApiRequester client) : 858 TablesResourceApi(commons.ApiRequester client) :
859 _requester = client; 859 _requester = client;
860 860
861 /** 861 /**
862 * Deletes the table specified by tableId from the dataset. If the table 862 * Deletes the table specified by tableId from the dataset. If the table
863 * contains data, all the data will be deleted. 863 * contains data, all the data will be deleted.
864 * 864 *
865 * Request parameters: 865 * Request parameters:
866 * 866 *
867 * [projectId] - Project ID of the table to delete 867 * [projectId] - Project ID of the table to delete
868 * 868 *
869 * [datasetId] - Dataset ID of the table to delete 869 * [datasetId] - Dataset ID of the table to delete
870 * 870 *
871 * [tableId] - Table ID of the table to delete 871 * [tableId] - Table ID of the table to delete
872 * 872 *
873 * Completes with a [common.ApiRequestError] if the API endpoint returned an 873 * Completes with a [commons.ApiRequestError] if the API endpoint returned an
874 * error. 874 * error.
875 * 875 *
876 * If the used [http.Client] completes with an error when making a REST call, 876 * If the used [http.Client] completes with an error when making a REST call,
877 * this method will complete with the same error. 877 * this method will complete with the same error.
878 */ 878 */
879 async.Future delete(core.String projectId, core.String datasetId, core.String tableId) { 879 async.Future delete(core.String projectId, core.String datasetId, core.String tableId) {
880 var _url = null; 880 var _url = null;
881 var _queryParams = new core.Map(); 881 var _queryParams = new core.Map();
882 var _uploadMedia = null; 882 var _uploadMedia = null;
883 var _uploadOptions = null; 883 var _uploadOptions = null;
884 var _downloadOptions = common.DownloadOptions.Metadata; 884 var _downloadOptions = commons.DownloadOptions.Metadata;
885 var _body = null; 885 var _body = null;
886 886
887 if (projectId == null) { 887 if (projectId == null) {
888 throw new core.ArgumentError("Parameter projectId is required."); 888 throw new core.ArgumentError("Parameter projectId is required.");
889 } 889 }
890 if (datasetId == null) { 890 if (datasetId == null) {
891 throw new core.ArgumentError("Parameter datasetId is required."); 891 throw new core.ArgumentError("Parameter datasetId is required.");
892 } 892 }
893 if (tableId == null) { 893 if (tableId == null) {
894 throw new core.ArgumentError("Parameter tableId is required."); 894 throw new core.ArgumentError("Parameter tableId is required.");
895 } 895 }
896 896
897 _downloadOptions = null; 897 _downloadOptions = null;
898 898
899 _url = 'projects/' + common_internal.Escaper.ecapeVariable('$projectId') + ' /datasets/' + common_internal.Escaper.ecapeVariable('$datasetId') + '/tables/' + common_internal.Escaper.ecapeVariable('$tableId'); 899 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/dataset s/' + commons.Escaper.ecapeVariable('$datasetId') + '/tables/' + commons.Escaper .ecapeVariable('$tableId');
900 900
901 var _response = _requester.request(_url, 901 var _response = _requester.request(_url,
902 "DELETE", 902 "DELETE",
903 body: _body, 903 body: _body,
904 queryParams: _queryParams, 904 queryParams: _queryParams,
905 uploadOptions: _uploadOptions, 905 uploadOptions: _uploadOptions,
906 uploadMedia: _uploadMedia, 906 uploadMedia: _uploadMedia,
907 downloadOptions: _downloadOptions); 907 downloadOptions: _downloadOptions);
908 return _response.then((data) => null); 908 return _response.then((data) => null);
909 } 909 }
910 910
911 /** 911 /**
912 * Gets the specified table resource by table ID. This method does not return 912 * Gets the specified table resource by table ID. This method does not return
913 * the data in the table, it only returns the table resource, which describes 913 * the data in the table, it only returns the table resource, which describes
914 * the structure of this table. 914 * the structure of this table.
915 * 915 *
916 * Request parameters: 916 * Request parameters:
917 * 917 *
918 * [projectId] - Project ID of the requested table 918 * [projectId] - Project ID of the requested table
919 * 919 *
920 * [datasetId] - Dataset ID of the requested table 920 * [datasetId] - Dataset ID of the requested table
921 * 921 *
922 * [tableId] - Table ID of the requested table 922 * [tableId] - Table ID of the requested table
923 * 923 *
924 * Completes with a [Table]. 924 * Completes with a [Table].
925 * 925 *
926 * Completes with a [common.ApiRequestError] if the API endpoint returned an 926 * Completes with a [commons.ApiRequestError] if the API endpoint returned an
927 * error. 927 * error.
928 * 928 *
929 * If the used [http.Client] completes with an error when making a REST call, 929 * If the used [http.Client] completes with an error when making a REST call,
930 * this method will complete with the same error. 930 * this method will complete with the same error.
931 */ 931 */
932 async.Future<Table> get(core.String projectId, core.String datasetId, core.Str ing tableId) { 932 async.Future<Table> get(core.String projectId, core.String datasetId, core.Str ing tableId) {
933 var _url = null; 933 var _url = null;
934 var _queryParams = new core.Map(); 934 var _queryParams = new core.Map();
935 var _uploadMedia = null; 935 var _uploadMedia = null;
936 var _uploadOptions = null; 936 var _uploadOptions = null;
937 var _downloadOptions = common.DownloadOptions.Metadata; 937 var _downloadOptions = commons.DownloadOptions.Metadata;
938 var _body = null; 938 var _body = null;
939 939
940 if (projectId == null) { 940 if (projectId == null) {
941 throw new core.ArgumentError("Parameter projectId is required."); 941 throw new core.ArgumentError("Parameter projectId is required.");
942 } 942 }
943 if (datasetId == null) { 943 if (datasetId == null) {
944 throw new core.ArgumentError("Parameter datasetId is required."); 944 throw new core.ArgumentError("Parameter datasetId is required.");
945 } 945 }
946 if (tableId == null) { 946 if (tableId == null) {
947 throw new core.ArgumentError("Parameter tableId is required."); 947 throw new core.ArgumentError("Parameter tableId is required.");
948 } 948 }
949 949
950 950
951 _url = 'projects/' + common_internal.Escaper.ecapeVariable('$projectId') + ' /datasets/' + common_internal.Escaper.ecapeVariable('$datasetId') + '/tables/' + common_internal.Escaper.ecapeVariable('$tableId'); 951 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/dataset s/' + commons.Escaper.ecapeVariable('$datasetId') + '/tables/' + commons.Escaper .ecapeVariable('$tableId');
952 952
953 var _response = _requester.request(_url, 953 var _response = _requester.request(_url,
954 "GET", 954 "GET",
955 body: _body, 955 body: _body,
956 queryParams: _queryParams, 956 queryParams: _queryParams,
957 uploadOptions: _uploadOptions, 957 uploadOptions: _uploadOptions,
958 uploadMedia: _uploadMedia, 958 uploadMedia: _uploadMedia,
959 downloadOptions: _downloadOptions); 959 downloadOptions: _downloadOptions);
960 return _response.then((data) => new Table.fromJson(data)); 960 return _response.then((data) => new Table.fromJson(data));
961 } 961 }
962 962
963 /** 963 /**
964 * Creates a new, empty table in the dataset. 964 * Creates a new, empty table in the dataset.
965 * 965 *
966 * [request] - The metadata request object. 966 * [request] - The metadata request object.
967 * 967 *
968 * Request parameters: 968 * Request parameters:
969 * 969 *
970 * [projectId] - Project ID of the new table 970 * [projectId] - Project ID of the new table
971 * 971 *
972 * [datasetId] - Dataset ID of the new table 972 * [datasetId] - Dataset ID of the new table
973 * 973 *
974 * Completes with a [Table]. 974 * Completes with a [Table].
975 * 975 *
976 * Completes with a [common.ApiRequestError] if the API endpoint returned an 976 * Completes with a [commons.ApiRequestError] if the API endpoint returned an
977 * error. 977 * error.
978 * 978 *
979 * If the used [http.Client] completes with an error when making a REST call, 979 * If the used [http.Client] completes with an error when making a REST call,
980 * this method will complete with the same error. 980 * this method will complete with the same error.
981 */ 981 */
982 async.Future<Table> insert(Table request, core.String projectId, core.String d atasetId) { 982 async.Future<Table> insert(Table request, core.String projectId, core.String d atasetId) {
983 var _url = null; 983 var _url = null;
984 var _queryParams = new core.Map(); 984 var _queryParams = new core.Map();
985 var _uploadMedia = null; 985 var _uploadMedia = null;
986 var _uploadOptions = null; 986 var _uploadOptions = null;
987 var _downloadOptions = common.DownloadOptions.Metadata; 987 var _downloadOptions = commons.DownloadOptions.Metadata;
988 var _body = null; 988 var _body = null;
989 989
990 if (request != null) { 990 if (request != null) {
991 _body = convert.JSON.encode((request).toJson()); 991 _body = convert.JSON.encode((request).toJson());
992 } 992 }
993 if (projectId == null) { 993 if (projectId == null) {
994 throw new core.ArgumentError("Parameter projectId is required."); 994 throw new core.ArgumentError("Parameter projectId is required.");
995 } 995 }
996 if (datasetId == null) { 996 if (datasetId == null) {
997 throw new core.ArgumentError("Parameter datasetId is required."); 997 throw new core.ArgumentError("Parameter datasetId is required.");
998 } 998 }
999 999
1000 1000
1001 _url = 'projects/' + common_internal.Escaper.ecapeVariable('$projectId') + ' /datasets/' + common_internal.Escaper.ecapeVariable('$datasetId') + '/tables'; 1001 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/dataset s/' + commons.Escaper.ecapeVariable('$datasetId') + '/tables';
1002 1002
1003 var _response = _requester.request(_url, 1003 var _response = _requester.request(_url,
1004 "POST", 1004 "POST",
1005 body: _body, 1005 body: _body,
1006 queryParams: _queryParams, 1006 queryParams: _queryParams,
1007 uploadOptions: _uploadOptions, 1007 uploadOptions: _uploadOptions,
1008 uploadMedia: _uploadMedia, 1008 uploadMedia: _uploadMedia,
1009 downloadOptions: _downloadOptions); 1009 downloadOptions: _downloadOptions);
1010 return _response.then((data) => new Table.fromJson(data)); 1010 return _response.then((data) => new Table.fromJson(data));
1011 } 1011 }
1012 1012
1013 /** 1013 /**
1014 * Lists all tables in the specified dataset. 1014 * Lists all tables in the specified dataset.
1015 * 1015 *
1016 * Request parameters: 1016 * Request parameters:
1017 * 1017 *
1018 * [projectId] - Project ID of the tables to list 1018 * [projectId] - Project ID of the tables to list
1019 * 1019 *
1020 * [datasetId] - Dataset ID of the tables to list 1020 * [datasetId] - Dataset ID of the tables to list
1021 * 1021 *
1022 * [maxResults] - Maximum number of results to return 1022 * [maxResults] - Maximum number of results to return
1023 * 1023 *
1024 * [pageToken] - Page token, returned by a previous call, to request the next 1024 * [pageToken] - Page token, returned by a previous call, to request the next
1025 * page of results 1025 * page of results
1026 * 1026 *
1027 * Completes with a [TableList]. 1027 * Completes with a [TableList].
1028 * 1028 *
1029 * Completes with a [common.ApiRequestError] if the API endpoint returned an 1029 * Completes with a [commons.ApiRequestError] if the API endpoint returned an
1030 * error. 1030 * error.
1031 * 1031 *
1032 * If the used [http.Client] completes with an error when making a REST call, 1032 * If the used [http.Client] completes with an error when making a REST call,
1033 * this method will complete with the same error. 1033 * this method will complete with the same error.
1034 */ 1034 */
1035 async.Future<TableList> list(core.String projectId, core.String datasetId, {co re.int maxResults, core.String pageToken}) { 1035 async.Future<TableList> list(core.String projectId, core.String datasetId, {co re.int maxResults, core.String pageToken}) {
1036 var _url = null; 1036 var _url = null;
1037 var _queryParams = new core.Map(); 1037 var _queryParams = new core.Map();
1038 var _uploadMedia = null; 1038 var _uploadMedia = null;
1039 var _uploadOptions = null; 1039 var _uploadOptions = null;
1040 var _downloadOptions = common.DownloadOptions.Metadata; 1040 var _downloadOptions = commons.DownloadOptions.Metadata;
1041 var _body = null; 1041 var _body = null;
1042 1042
1043 if (projectId == null) { 1043 if (projectId == null) {
1044 throw new core.ArgumentError("Parameter projectId is required."); 1044 throw new core.ArgumentError("Parameter projectId is required.");
1045 } 1045 }
1046 if (datasetId == null) { 1046 if (datasetId == null) {
1047 throw new core.ArgumentError("Parameter datasetId is required."); 1047 throw new core.ArgumentError("Parameter datasetId is required.");
1048 } 1048 }
1049 if (maxResults != null) { 1049 if (maxResults != null) {
1050 _queryParams["maxResults"] = ["${maxResults}"]; 1050 _queryParams["maxResults"] = ["${maxResults}"];
1051 } 1051 }
1052 if (pageToken != null) { 1052 if (pageToken != null) {
1053 _queryParams["pageToken"] = [pageToken]; 1053 _queryParams["pageToken"] = [pageToken];
1054 } 1054 }
1055 1055
1056 1056
1057 _url = 'projects/' + common_internal.Escaper.ecapeVariable('$projectId') + ' /datasets/' + common_internal.Escaper.ecapeVariable('$datasetId') + '/tables'; 1057 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/dataset s/' + commons.Escaper.ecapeVariable('$datasetId') + '/tables';
1058 1058
1059 var _response = _requester.request(_url, 1059 var _response = _requester.request(_url,
1060 "GET", 1060 "GET",
1061 body: _body, 1061 body: _body,
1062 queryParams: _queryParams, 1062 queryParams: _queryParams,
1063 uploadOptions: _uploadOptions, 1063 uploadOptions: _uploadOptions,
1064 uploadMedia: _uploadMedia, 1064 uploadMedia: _uploadMedia,
1065 downloadOptions: _downloadOptions); 1065 downloadOptions: _downloadOptions);
1066 return _response.then((data) => new TableList.fromJson(data)); 1066 return _response.then((data) => new TableList.fromJson(data));
1067 } 1067 }
1068 1068
1069 /** 1069 /**
1070 * Updates information in an existing table. The update method replaces the 1070 * Updates information in an existing table. The update method replaces the
1071 * entire table resource, whereas the patch method only replaces fields that 1071 * entire table resource, whereas the patch method only replaces fields that
1072 * are provided in the submitted table resource. This method supports patch 1072 * are provided in the submitted table resource. This method supports patch
1073 * semantics. 1073 * semantics.
1074 * 1074 *
1075 * [request] - The metadata request object. 1075 * [request] - The metadata request object.
1076 * 1076 *
1077 * Request parameters: 1077 * Request parameters:
1078 * 1078 *
1079 * [projectId] - Project ID of the table to update 1079 * [projectId] - Project ID of the table to update
1080 * 1080 *
1081 * [datasetId] - Dataset ID of the table to update 1081 * [datasetId] - Dataset ID of the table to update
1082 * 1082 *
1083 * [tableId] - Table ID of the table to update 1083 * [tableId] - Table ID of the table to update
1084 * 1084 *
1085 * Completes with a [Table]. 1085 * Completes with a [Table].
1086 * 1086 *
1087 * Completes with a [common.ApiRequestError] if the API endpoint returned an 1087 * Completes with a [commons.ApiRequestError] if the API endpoint returned an
1088 * error. 1088 * error.
1089 * 1089 *
1090 * If the used [http.Client] completes with an error when making a REST call, 1090 * If the used [http.Client] completes with an error when making a REST call,
1091 * this method will complete with the same error. 1091 * this method will complete with the same error.
1092 */ 1092 */
1093 async.Future<Table> patch(Table request, core.String projectId, core.String da tasetId, core.String tableId) { 1093 async.Future<Table> patch(Table request, core.String projectId, core.String da tasetId, core.String tableId) {
1094 var _url = null; 1094 var _url = null;
1095 var _queryParams = new core.Map(); 1095 var _queryParams = new core.Map();
1096 var _uploadMedia = null; 1096 var _uploadMedia = null;
1097 var _uploadOptions = null; 1097 var _uploadOptions = null;
1098 var _downloadOptions = common.DownloadOptions.Metadata; 1098 var _downloadOptions = commons.DownloadOptions.Metadata;
1099 var _body = null; 1099 var _body = null;
1100 1100
1101 if (request != null) { 1101 if (request != null) {
1102 _body = convert.JSON.encode((request).toJson()); 1102 _body = convert.JSON.encode((request).toJson());
1103 } 1103 }
1104 if (projectId == null) { 1104 if (projectId == null) {
1105 throw new core.ArgumentError("Parameter projectId is required."); 1105 throw new core.ArgumentError("Parameter projectId is required.");
1106 } 1106 }
1107 if (datasetId == null) { 1107 if (datasetId == null) {
1108 throw new core.ArgumentError("Parameter datasetId is required."); 1108 throw new core.ArgumentError("Parameter datasetId is required.");
1109 } 1109 }
1110 if (tableId == null) { 1110 if (tableId == null) {
1111 throw new core.ArgumentError("Parameter tableId is required."); 1111 throw new core.ArgumentError("Parameter tableId is required.");
1112 } 1112 }
1113 1113
1114 1114
1115 _url = 'projects/' + common_internal.Escaper.ecapeVariable('$projectId') + ' /datasets/' + common_internal.Escaper.ecapeVariable('$datasetId') + '/tables/' + common_internal.Escaper.ecapeVariable('$tableId'); 1115 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/dataset s/' + commons.Escaper.ecapeVariable('$datasetId') + '/tables/' + commons.Escaper .ecapeVariable('$tableId');
1116 1116
1117 var _response = _requester.request(_url, 1117 var _response = _requester.request(_url,
1118 "PATCH", 1118 "PATCH",
1119 body: _body, 1119 body: _body,
1120 queryParams: _queryParams, 1120 queryParams: _queryParams,
1121 uploadOptions: _uploadOptions, 1121 uploadOptions: _uploadOptions,
1122 uploadMedia: _uploadMedia, 1122 uploadMedia: _uploadMedia,
1123 downloadOptions: _downloadOptions); 1123 downloadOptions: _downloadOptions);
1124 return _response.then((data) => new Table.fromJson(data)); 1124 return _response.then((data) => new Table.fromJson(data));
1125 } 1125 }
1126 1126
1127 /** 1127 /**
1128 * Updates information in an existing table. The update method replaces the 1128 * Updates information in an existing table. The update method replaces the
1129 * entire table resource, whereas the patch method only replaces fields that 1129 * entire table resource, whereas the patch method only replaces fields that
1130 * are provided in the submitted table resource. 1130 * are provided in the submitted table resource.
1131 * 1131 *
1132 * [request] - The metadata request object. 1132 * [request] - The metadata request object.
1133 * 1133 *
1134 * Request parameters: 1134 * Request parameters:
1135 * 1135 *
1136 * [projectId] - Project ID of the table to update 1136 * [projectId] - Project ID of the table to update
1137 * 1137 *
1138 * [datasetId] - Dataset ID of the table to update 1138 * [datasetId] - Dataset ID of the table to update
1139 * 1139 *
1140 * [tableId] - Table ID of the table to update 1140 * [tableId] - Table ID of the table to update
1141 * 1141 *
1142 * Completes with a [Table]. 1142 * Completes with a [Table].
1143 * 1143 *
1144 * Completes with a [common.ApiRequestError] if the API endpoint returned an 1144 * Completes with a [commons.ApiRequestError] if the API endpoint returned an
1145 * error. 1145 * error.
1146 * 1146 *
1147 * If the used [http.Client] completes with an error when making a REST call, 1147 * If the used [http.Client] completes with an error when making a REST call,
1148 * this method will complete with the same error. 1148 * this method will complete with the same error.
1149 */ 1149 */
1150 async.Future<Table> update(Table request, core.String projectId, core.String d atasetId, core.String tableId) { 1150 async.Future<Table> update(Table request, core.String projectId, core.String d atasetId, core.String tableId) {
1151 var _url = null; 1151 var _url = null;
1152 var _queryParams = new core.Map(); 1152 var _queryParams = new core.Map();
1153 var _uploadMedia = null; 1153 var _uploadMedia = null;
1154 var _uploadOptions = null; 1154 var _uploadOptions = null;
1155 var _downloadOptions = common.DownloadOptions.Metadata; 1155 var _downloadOptions = commons.DownloadOptions.Metadata;
1156 var _body = null; 1156 var _body = null;
1157 1157
1158 if (request != null) { 1158 if (request != null) {
1159 _body = convert.JSON.encode((request).toJson()); 1159 _body = convert.JSON.encode((request).toJson());
1160 } 1160 }
1161 if (projectId == null) { 1161 if (projectId == null) {
1162 throw new core.ArgumentError("Parameter projectId is required."); 1162 throw new core.ArgumentError("Parameter projectId is required.");
1163 } 1163 }
1164 if (datasetId == null) { 1164 if (datasetId == null) {
1165 throw new core.ArgumentError("Parameter datasetId is required."); 1165 throw new core.ArgumentError("Parameter datasetId is required.");
1166 } 1166 }
1167 if (tableId == null) { 1167 if (tableId == null) {
1168 throw new core.ArgumentError("Parameter tableId is required."); 1168 throw new core.ArgumentError("Parameter tableId is required.");
1169 } 1169 }
1170 1170
1171 1171
1172 _url = 'projects/' + common_internal.Escaper.ecapeVariable('$projectId') + ' /datasets/' + common_internal.Escaper.ecapeVariable('$datasetId') + '/tables/' + common_internal.Escaper.ecapeVariable('$tableId'); 1172 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/dataset s/' + commons.Escaper.ecapeVariable('$datasetId') + '/tables/' + commons.Escaper .ecapeVariable('$tableId');
1173 1173
1174 var _response = _requester.request(_url, 1174 var _response = _requester.request(_url,
1175 "PUT", 1175 "PUT",
1176 body: _body, 1176 body: _body,
1177 queryParams: _queryParams, 1177 queryParams: _queryParams,
1178 uploadOptions: _uploadOptions, 1178 uploadOptions: _uploadOptions,
1179 uploadMedia: _uploadMedia, 1179 uploadMedia: _uploadMedia,
1180 downloadOptions: _downloadOptions); 1180 downloadOptions: _downloadOptions);
1181 return _response.then((data) => new Table.fromJson(data)); 1181 return _response.then((data) => new Table.fromJson(data));
1182 } 1182 }
1183 1183
1184 } 1184 }
1185 1185
1186 1186
1187 1187
1188 /** Not documented yet. */ 1188 class CsvOptions {
1189 /**
1190 * [Optional] Indicates if BigQuery should accept rows that are missing
1191 * trailing optional columns. If true, BigQuery treats missing trailing
1192 * columns as null values. If false, records with missing trailing columns are
1193 * treated as bad records, and if there are too many bad records, an invalid
1194 * error is returned in the job result. The default value is false.
1195 */
1196 core.bool allowJaggedRows;
1197
1198 /**
1199 * [Optional] Indicates if BigQuery should allow quoted data sections that
1200 * contain newline characters in a CSV file. The default value is false.
1201 */
1202 core.bool allowQuotedNewlines;
1203
1204 /**
1205 * [Optional] The character encoding of the data. The supported values are
1206 * UTF-8 or ISO-8859-1. The default value is UTF-8. BigQuery decodes the data
1207 * after the raw, binary data has been split using the values of the quote and
1208 * fieldDelimiter properties.
1209 */
1210 core.String encoding;
1211
1212 /**
1213 * [Optional] The separator for fields in a CSV file. BigQuery converts the
1214 * string to ISO-8859-1 encoding, and then uses the first byte of the encoded
1215 * string to split the data in its raw, binary state. BigQuery also supports
1216 * the escape sequence "\t" to specify a tab separator. The default value is a
1217 * comma (',').
1218 */
1219 core.String fieldDelimiter;
1220
1221 /**
1222 * [Optional] The value that is used to quote data sections in a CSV file.
1223 * BigQuery converts the string to ISO-8859-1 encoding, and then uses the
1224 * first byte of the encoded string to split the data in its raw, binary
1225 * state. The default value is a double-quote ('"'). If your data does not
1226 * contain quoted sections, set the property value to an empty string. If your
1227 * data contains quoted newline characters, you must also set the
1228 * allowQuotedNewlines property to true.
1229 */
1230 core.String quote;
1231
1232 /**
1233 * [Optional] The number of rows at the top of a CSV file that BigQuery will
1234 * skip when reading the data. The default value is 0. This property is useful
1235 * if you have header rows in the file that should be skipped.
1236 */
1237 core.int skipLeadingRows;
1238
1239
1240 CsvOptions();
1241
1242 CsvOptions.fromJson(core.Map _json) {
1243 if (_json.containsKey("allowJaggedRows")) {
1244 allowJaggedRows = _json["allowJaggedRows"];
1245 }
1246 if (_json.containsKey("allowQuotedNewlines")) {
1247 allowQuotedNewlines = _json["allowQuotedNewlines"];
1248 }
1249 if (_json.containsKey("encoding")) {
1250 encoding = _json["encoding"];
1251 }
1252 if (_json.containsKey("fieldDelimiter")) {
1253 fieldDelimiter = _json["fieldDelimiter"];
1254 }
1255 if (_json.containsKey("quote")) {
1256 quote = _json["quote"];
1257 }
1258 if (_json.containsKey("skipLeadingRows")) {
1259 skipLeadingRows = _json["skipLeadingRows"];
1260 }
1261 }
1262
1263 core.Map toJson() {
1264 var _json = new core.Map();
1265 if (allowJaggedRows != null) {
1266 _json["allowJaggedRows"] = allowJaggedRows;
1267 }
1268 if (allowQuotedNewlines != null) {
1269 _json["allowQuotedNewlines"] = allowQuotedNewlines;
1270 }
1271 if (encoding != null) {
1272 _json["encoding"] = encoding;
1273 }
1274 if (fieldDelimiter != null) {
1275 _json["fieldDelimiter"] = fieldDelimiter;
1276 }
1277 if (quote != null) {
1278 _json["quote"] = quote;
1279 }
1280 if (skipLeadingRows != null) {
1281 _json["skipLeadingRows"] = skipLeadingRows;
1282 }
1283 return _json;
1284 }
1285 }
1286
1287
1189 class DatasetAccess { 1288 class DatasetAccess {
1190 /** 1289 /**
1191 * [Pick one] A domain to grant access to. Any users signed in with the domain 1290 * [Pick one] A domain to grant access to. Any users signed in with the domain
1192 * specified will be granted the specified access. Example: "example.com". 1291 * specified will be granted the specified access. Example: "example.com".
1193 */ 1292 */
1194 core.String domain; 1293 core.String domain;
1195 1294
1196 /** [Pick one] An email address of a Google Group to grant access to. */ 1295 /** [Pick one] An email address of a Google Group to grant access to. */
1197 core.String groupByEmail; 1296 core.String groupByEmail;
1198 1297
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
1268 _json["userByEmail"] = userByEmail; 1367 _json["userByEmail"] = userByEmail;
1269 } 1368 }
1270 if (view != null) { 1369 if (view != null) {
1271 _json["view"] = (view).toJson(); 1370 _json["view"] = (view).toJson();
1272 } 1371 }
1273 return _json; 1372 return _json;
1274 } 1373 }
1275 } 1374 }
1276 1375
1277 1376
1278 /** Not documented yet. */
1279 class Dataset { 1377 class Dataset {
1280 /** 1378 /**
1281 * [Optional] An array of objects that define dataset access for one or more 1379 * [Optional] An array of objects that define dataset access for one or more
1282 * entities. You can set this property when inserting or updating a dataset in 1380 * entities. You can set this property when inserting or updating a dataset in
1283 * order to control who is allowed to access the data. If unspecified at 1381 * order to control who is allowed to access the data. If unspecified at
1284 * dataset creation time, BigQuery adds default dataset access for the 1382 * dataset creation time, BigQuery adds default dataset access for the
1285 * following entities: access.specialGroup: projectReaders; access.role: 1383 * following entities: access.specialGroup: projectReaders; access.role:
1286 * READER; access.specialGroup: projectWriters; access.role: WRITER; 1384 * READER; access.specialGroup: projectWriters; access.role: WRITER;
1287 * access.specialGroup: projectOwners; access.role: OWNER; access.userByEmail: 1385 * access.specialGroup: projectOwners; access.role: OWNER; access.userByEmail:
1288 * [dataset creator email]; access.role: OWNER; 1386 * [dataset creator email]; access.role: OWNER;
1289 */ 1387 */
1290 core.List<DatasetAccess> access; 1388 core.List<DatasetAccess> access;
1291 1389
1292 /** 1390 /**
1293 * [Output-only] The time when this dataset was created, in milliseconds since 1391 * [Output-only] The time when this dataset was created, in milliseconds since
1294 * the epoch. 1392 * the epoch.
1295 */ 1393 */
1296 core.String creationTime; 1394 core.String creationTime;
1297 1395
1298 /** [Required] A reference that identifies the dataset. */ 1396 /** [Required] A reference that identifies the dataset. */
1299 DatasetReference datasetReference; 1397 DatasetReference datasetReference;
1300 1398
1399 /**
1400 * [Experimental] The default lifetime of all tables in the dataset, in
1401 * milliseconds. The minimum value is 3600000 milliseconds (one hour). Once
1402 * this property is set, all newly-created tables in the dataset will have an
1403 * expirationTime property set to the creation time plus the value in this
1404 * property, and changing the value will only affect new tables, not existing
1405 * ones. When the expirationTime for a given table is reached, that table will
1406 * be deleted automatically. If a table's expirationTime is modified or
1407 * removed before the table expires, or if you provide an explicit
1408 * expirationTime when creating a table, that value takes precedence over the
1409 * default expiration time indicated by this property.
1410 */
1411 core.String defaultTableExpirationMs;
1412
1301 /** [Optional] A user-friendly description of the dataset. */ 1413 /** [Optional] A user-friendly description of the dataset. */
1302 core.String description; 1414 core.String description;
1303 1415
1304 /** [Output-only] A hash of the resource. */ 1416 /** [Output-only] A hash of the resource. */
1305 core.String etag; 1417 core.String etag;
1306 1418
1307 /** [Optional] A descriptive name for the dataset. */ 1419 /** [Optional] A descriptive name for the dataset. */
1308 core.String friendlyName; 1420 core.String friendlyName;
1309 1421
1310 /** 1422 /**
(...skipping 25 matching lines...) Expand all
1336 Dataset.fromJson(core.Map _json) { 1448 Dataset.fromJson(core.Map _json) {
1337 if (_json.containsKey("access")) { 1449 if (_json.containsKey("access")) {
1338 access = _json["access"].map((value) => new DatasetAccess.fromJson(value)) .toList(); 1450 access = _json["access"].map((value) => new DatasetAccess.fromJson(value)) .toList();
1339 } 1451 }
1340 if (_json.containsKey("creationTime")) { 1452 if (_json.containsKey("creationTime")) {
1341 creationTime = _json["creationTime"]; 1453 creationTime = _json["creationTime"];
1342 } 1454 }
1343 if (_json.containsKey("datasetReference")) { 1455 if (_json.containsKey("datasetReference")) {
1344 datasetReference = new DatasetReference.fromJson(_json["datasetReference"] ); 1456 datasetReference = new DatasetReference.fromJson(_json["datasetReference"] );
1345 } 1457 }
1458 if (_json.containsKey("defaultTableExpirationMs")) {
1459 defaultTableExpirationMs = _json["defaultTableExpirationMs"];
1460 }
1346 if (_json.containsKey("description")) { 1461 if (_json.containsKey("description")) {
1347 description = _json["description"]; 1462 description = _json["description"];
1348 } 1463 }
1349 if (_json.containsKey("etag")) { 1464 if (_json.containsKey("etag")) {
1350 etag = _json["etag"]; 1465 etag = _json["etag"];
1351 } 1466 }
1352 if (_json.containsKey("friendlyName")) { 1467 if (_json.containsKey("friendlyName")) {
1353 friendlyName = _json["friendlyName"]; 1468 friendlyName = _json["friendlyName"];
1354 } 1469 }
1355 if (_json.containsKey("id")) { 1470 if (_json.containsKey("id")) {
(...skipping 14 matching lines...) Expand all
1370 var _json = new core.Map(); 1485 var _json = new core.Map();
1371 if (access != null) { 1486 if (access != null) {
1372 _json["access"] = access.map((value) => (value).toJson()).toList(); 1487 _json["access"] = access.map((value) => (value).toJson()).toList();
1373 } 1488 }
1374 if (creationTime != null) { 1489 if (creationTime != null) {
1375 _json["creationTime"] = creationTime; 1490 _json["creationTime"] = creationTime;
1376 } 1491 }
1377 if (datasetReference != null) { 1492 if (datasetReference != null) {
1378 _json["datasetReference"] = (datasetReference).toJson(); 1493 _json["datasetReference"] = (datasetReference).toJson();
1379 } 1494 }
1495 if (defaultTableExpirationMs != null) {
1496 _json["defaultTableExpirationMs"] = defaultTableExpirationMs;
1497 }
1380 if (description != null) { 1498 if (description != null) {
1381 _json["description"] = description; 1499 _json["description"] = description;
1382 } 1500 }
1383 if (etag != null) { 1501 if (etag != null) {
1384 _json["etag"] = etag; 1502 _json["etag"] = etag;
1385 } 1503 }
1386 if (friendlyName != null) { 1504 if (friendlyName != null) {
1387 _json["friendlyName"] = friendlyName; 1505 _json["friendlyName"] = friendlyName;
1388 } 1506 }
1389 if (id != null) { 1507 if (id != null) {
1390 _json["id"] = id; 1508 _json["id"] = id;
1391 } 1509 }
1392 if (kind != null) { 1510 if (kind != null) {
1393 _json["kind"] = kind; 1511 _json["kind"] = kind;
1394 } 1512 }
1395 if (lastModifiedTime != null) { 1513 if (lastModifiedTime != null) {
1396 _json["lastModifiedTime"] = lastModifiedTime; 1514 _json["lastModifiedTime"] = lastModifiedTime;
1397 } 1515 }
1398 if (selfLink != null) { 1516 if (selfLink != null) {
1399 _json["selfLink"] = selfLink; 1517 _json["selfLink"] = selfLink;
1400 } 1518 }
1401 return _json; 1519 return _json;
1402 } 1520 }
1403 } 1521 }
1404 1522
1405 1523
1406 /** Not documented yet. */
1407 class DatasetListDatasets { 1524 class DatasetListDatasets {
1408 /** 1525 /**
1409 * The dataset reference. Use this property to access specific parts of the 1526 * The dataset reference. Use this property to access specific parts of the
1410 * dataset's ID, such as project ID or dataset ID. 1527 * dataset's ID, such as project ID or dataset ID.
1411 */ 1528 */
1412 DatasetReference datasetReference; 1529 DatasetReference datasetReference;
1413 1530
1414 /** A descriptive name for the dataset, if one exists. */ 1531 /** A descriptive name for the dataset, if one exists. */
1415 core.String friendlyName; 1532 core.String friendlyName;
1416 1533
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
1453 _json["id"] = id; 1570 _json["id"] = id;
1454 } 1571 }
1455 if (kind != null) { 1572 if (kind != null) {
1456 _json["kind"] = kind; 1573 _json["kind"] = kind;
1457 } 1574 }
1458 return _json; 1575 return _json;
1459 } 1576 }
1460 } 1577 }
1461 1578
1462 1579
1463 /** Not documented yet. */
1464 class DatasetList { 1580 class DatasetList {
1465 /** 1581 /**
1466 * An array of the dataset resources in the project. Each resource contains 1582 * An array of the dataset resources in the project. Each resource contains
1467 * basic information. For full information about a particular dataset 1583 * basic information. For full information about a particular dataset
1468 * resource, use the Datasets: get method. This property is omitted when there 1584 * resource, use the Datasets: get method. This property is omitted when there
1469 * are no datasets in the project. 1585 * are no datasets in the project.
1470 */ 1586 */
1471 core.List<DatasetListDatasets> datasets; 1587 core.List<DatasetListDatasets> datasets;
1472 1588
1473 /** 1589 /**
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
1518 _json["kind"] = kind; 1634 _json["kind"] = kind;
1519 } 1635 }
1520 if (nextPageToken != null) { 1636 if (nextPageToken != null) {
1521 _json["nextPageToken"] = nextPageToken; 1637 _json["nextPageToken"] = nextPageToken;
1522 } 1638 }
1523 return _json; 1639 return _json;
1524 } 1640 }
1525 } 1641 }
1526 1642
1527 1643
1528 /** Not documented yet. */
1529 class DatasetReference { 1644 class DatasetReference {
1530 /** 1645 /**
1531 * [Required] A unique ID for this dataset, without the project name. The ID 1646 * [Required] A unique ID for this dataset, without the project name. The ID
1532 * must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). 1647 * must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_).
1533 * The maximum length is 1,024 characters. 1648 * The maximum length is 1,024 characters.
1534 */ 1649 */
1535 core.String datasetId; 1650 core.String datasetId;
1536 1651
1537 /** [Optional] The ID of the project containing this dataset. */ 1652 /** [Optional] The ID of the project containing this dataset. */
1538 core.String projectId; 1653 core.String projectId;
(...skipping 16 matching lines...) Expand all
1555 _json["datasetId"] = datasetId; 1670 _json["datasetId"] = datasetId;
1556 } 1671 }
1557 if (projectId != null) { 1672 if (projectId != null) {
1558 _json["projectId"] = projectId; 1673 _json["projectId"] = projectId;
1559 } 1674 }
1560 return _json; 1675 return _json;
1561 } 1676 }
1562 } 1677 }
1563 1678
1564 1679
1565 /** Not documented yet. */
1566 class ErrorProto { 1680 class ErrorProto {
1567 /** 1681 /**
1568 * Debugging information. This property is internal to Google and should not 1682 * Debugging information. This property is internal to Google and should not
1569 * be used. 1683 * be used.
1570 */ 1684 */
1571 core.String debugInfo; 1685 core.String debugInfo;
1572 1686
1573 /** Specifies where the error occurred, if present. */ 1687 /** Specifies where the error occurred, if present. */
1574 core.String location; 1688 core.String location;
1575 1689
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
1609 _json["message"] = message; 1723 _json["message"] = message;
1610 } 1724 }
1611 if (reason != null) { 1725 if (reason != null) {
1612 _json["reason"] = reason; 1726 _json["reason"] = reason;
1613 } 1727 }
1614 return _json; 1728 return _json;
1615 } 1729 }
1616 } 1730 }
1617 1731
1618 1732
1619 /** Not documented yet. */ 1733 class ExternalDataConfiguration {
1734 /**
1735 * [Optional] The compression type of the data source. Possible values include
1736 * GZIP and NONE. The default value is NONE.
1737 */
1738 core.String compression;
1739
1740 /** Additional properties to set if sourceFormat is set to CSV. */
1741 CsvOptions csvOptions;
1742
1743 /**
1744 * [Optional] Indicates if BigQuery should allow extra values that are not
1745 * represented in the table schema. If true, the extra values are ignored. If
1746 * false, records with extra columns are treated as bad records, and if there
1747 * are too many bad records, an invalid error is returned in the job result.
1748 * The default value is false. The sourceFormat property determines what
1749 * BigQuery treats as an extra value: CSV: Trailing columns
1750 */
1751 core.bool ignoreUnknownValues;
1752
1753 /**
1754 * [Optional] The maximum number of bad records that BigQuery can ignore when
1755 * reading data. If the number of bad records exceeds this value, an invalid
1756 * error is returned in the job result. The default value is 0, which requires
1757 * that all records are valid.
1758 */
1759 core.int maxBadRecords;
1760
1761 /** [Required] The schema for the data. */
1762 TableSchema schema;
1763
1764 /**
1765 * [Optional] The data format. External data sources must be in CSV format.
1766 * The default value is CSV.
1767 */
1768 core.String sourceFormat;
1769
1770 /**
1771 * [Required] The fully-qualified URIs that point to your data in Google Cloud
1772 * Storage. Each URI can contain one '*' wildcard character and it must come
1773 * after the 'bucket' name. CSV limits related to load jobs apply to external
1774 * data sources, plus an additional limit of 10 GB maximum size across all
1775 * URIs.
1776 */
1777 core.List<core.String> sourceUris;
1778
1779
1780 ExternalDataConfiguration();
1781
1782 ExternalDataConfiguration.fromJson(core.Map _json) {
1783 if (_json.containsKey("compression")) {
1784 compression = _json["compression"];
1785 }
1786 if (_json.containsKey("csvOptions")) {
1787 csvOptions = new CsvOptions.fromJson(_json["csvOptions"]);
1788 }
1789 if (_json.containsKey("ignoreUnknownValues")) {
1790 ignoreUnknownValues = _json["ignoreUnknownValues"];
1791 }
1792 if (_json.containsKey("maxBadRecords")) {
1793 maxBadRecords = _json["maxBadRecords"];
1794 }
1795 if (_json.containsKey("schema")) {
1796 schema = new TableSchema.fromJson(_json["schema"]);
1797 }
1798 if (_json.containsKey("sourceFormat")) {
1799 sourceFormat = _json["sourceFormat"];
1800 }
1801 if (_json.containsKey("sourceUris")) {
1802 sourceUris = _json["sourceUris"];
1803 }
1804 }
1805
1806 core.Map toJson() {
1807 var _json = new core.Map();
1808 if (compression != null) {
1809 _json["compression"] = compression;
1810 }
1811 if (csvOptions != null) {
1812 _json["csvOptions"] = (csvOptions).toJson();
1813 }
1814 if (ignoreUnknownValues != null) {
1815 _json["ignoreUnknownValues"] = ignoreUnknownValues;
1816 }
1817 if (maxBadRecords != null) {
1818 _json["maxBadRecords"] = maxBadRecords;
1819 }
1820 if (schema != null) {
1821 _json["schema"] = (schema).toJson();
1822 }
1823 if (sourceFormat != null) {
1824 _json["sourceFormat"] = sourceFormat;
1825 }
1826 if (sourceUris != null) {
1827 _json["sourceUris"] = sourceUris;
1828 }
1829 return _json;
1830 }
1831 }
1832
1833
1620 class GetQueryResultsResponse { 1834 class GetQueryResultsResponse {
1621 /** Whether the query result was fetched from the query cache. */ 1835 /** Whether the query result was fetched from the query cache. */
1622 core.bool cacheHit; 1836 core.bool cacheHit;
1623 1837
1624 /** A hash of this response. */ 1838 /** A hash of this response. */
1625 core.String etag; 1839 core.String etag;
1626 1840
1627 /** 1841 /**
1628 * Whether the query has completed or not. If rows or totalRows are present, 1842 * Whether the query has completed or not. If rows or totalRows are present,
1629 * this will always be true. If this is false, totalRows will not be 1843 * this will always be true. If this is false, totalRows will not be
(...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after
1736 _json["totalBytesProcessed"] = totalBytesProcessed; 1950 _json["totalBytesProcessed"] = totalBytesProcessed;
1737 } 1951 }
1738 if (totalRows != null) { 1952 if (totalRows != null) {
1739 _json["totalRows"] = totalRows; 1953 _json["totalRows"] = totalRows;
1740 } 1954 }
1741 return _json; 1955 return _json;
1742 } 1956 }
1743 } 1957 }
1744 1958
1745 1959
1746 /** Not documented yet. */
1747 class Job { 1960 class Job {
1748 /** [Required] Describes the job configuration. */ 1961 /** [Required] Describes the job configuration. */
1749 JobConfiguration configuration; 1962 JobConfiguration configuration;
1750 1963
1751 /** [Output-only] A hash of this resource. */ 1964 /** [Output-only] A hash of this resource. */
1752 core.String etag; 1965 core.String etag;
1753 1966
1754 /** [Output-only] Opaque ID field of the job */ 1967 /** [Output-only] Opaque ID field of the job */
1755 core.String id; 1968 core.String id;
1756 1969
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
1838 _json["status"] = (status).toJson(); 2051 _json["status"] = (status).toJson();
1839 } 2052 }
1840 if (userEmail != null) { 2053 if (userEmail != null) {
1841 _json["user_email"] = userEmail; 2054 _json["user_email"] = userEmail;
1842 } 2055 }
1843 return _json; 2056 return _json;
1844 } 2057 }
1845 } 2058 }
1846 2059
1847 2060
1848 /** Not documented yet. */
1849 class JobConfiguration { 2061 class JobConfiguration {
1850 /** [Pick one] Copies a table. */ 2062 /** [Pick one] Copies a table. */
1851 JobConfigurationTableCopy copy; 2063 JobConfigurationTableCopy copy;
1852 2064
1853 /** 2065 /**
1854 * [Optional] If set, don't actually run this job. A valid query will return a 2066 * [Optional] If set, don't actually run this job. A valid query will return a
1855 * mostly empty response with some processing statistics, while an invalid 2067 * mostly empty response with some processing statistics, while an invalid
1856 * query will return the same error it would if it wasn't a dry run. Behavior 2068 * query will return the same error it would if it wasn't a dry run. Behavior
1857 * of non-query jobs is undefined. 2069 * of non-query jobs is undefined.
1858 */ 2070 */
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
1912 _json["load"] = (load).toJson(); 2124 _json["load"] = (load).toJson();
1913 } 2125 }
1914 if (query != null) { 2126 if (query != null) {
1915 _json["query"] = (query).toJson(); 2127 _json["query"] = (query).toJson();
1916 } 2128 }
1917 return _json; 2129 return _json;
1918 } 2130 }
1919 } 2131 }
1920 2132
1921 2133
1922 /** Not documented yet. */
1923 class JobConfigurationExtract { 2134 class JobConfigurationExtract {
1924 /** 2135 /**
1925 * [Optional] The compression type to use for exported files. Possible values 2136 * [Optional] The compression type to use for exported files. Possible values
1926 * include GZIP and NONE. The default value is NONE. 2137 * include GZIP and NONE. The default value is NONE.
1927 */ 2138 */
1928 core.String compression; 2139 core.String compression;
1929 2140
1930 /** 2141 /**
1931 * [Optional] The exported file format. Possible values include CSV, 2142 * [Optional] The exported file format. Possible values include CSV,
1932 * NEWLINE_DELIMITED_JSON and AVRO. The default value is CSV. Tables with 2143 * NEWLINE_DELIMITED_JSON and AVRO. The default value is CSV. Tables with
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after
2010 _json["printHeader"] = printHeader; 2221 _json["printHeader"] = printHeader;
2011 } 2222 }
2012 if (sourceTable != null) { 2223 if (sourceTable != null) {
2013 _json["sourceTable"] = (sourceTable).toJson(); 2224 _json["sourceTable"] = (sourceTable).toJson();
2014 } 2225 }
2015 return _json; 2226 return _json;
2016 } 2227 }
2017 } 2228 }
2018 2229
2019 2230
2020 /** Not documented yet. */
2021 class JobConfigurationLink { 2231 class JobConfigurationLink {
2022 /** 2232 /**
2023 * [Optional] Specifies whether the job is allowed to create new tables. The 2233 * [Optional] Specifies whether the job is allowed to create new tables. The
2024 * following values are supported: CREATE_IF_NEEDED: If the table does not 2234 * following values are supported: CREATE_IF_NEEDED: If the table does not
2025 * exist, BigQuery creates the table. CREATE_NEVER: The table must already 2235 * exist, BigQuery creates the table. CREATE_NEVER: The table must already
2026 * exist. If it does not, a 'notFound' error is returned in the job result. 2236 * exist. If it does not, a 'notFound' error is returned in the job result.
2027 * The default value is CREATE_IF_NEEDED. Creation, truncation and append 2237 * The default value is CREATE_IF_NEEDED. Creation, truncation and append
2028 * actions occur as one atomic update upon job completion. 2238 * actions occur as one atomic update upon job completion.
2029 */ 2239 */
2030 core.String createDisposition; 2240 core.String createDisposition;
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
2078 _json["sourceUri"] = sourceUri; 2288 _json["sourceUri"] = sourceUri;
2079 } 2289 }
2080 if (writeDisposition != null) { 2290 if (writeDisposition != null) {
2081 _json["writeDisposition"] = writeDisposition; 2291 _json["writeDisposition"] = writeDisposition;
2082 } 2292 }
2083 return _json; 2293 return _json;
2084 } 2294 }
2085 } 2295 }
2086 2296
2087 2297
2088 /** Not documented yet. */
2089 class JobConfigurationLoad { 2298 class JobConfigurationLoad {
2090 /** 2299 /**
2091 * [Optional] Accept rows that are missing trailing optional columns. The 2300 * [Optional] Accept rows that are missing trailing optional columns. The
2092 * missing values are treated as nulls. Default is false which treats short 2301 * missing values are treated as nulls. If false, records with missing
2093 * rows as errors. Only applicable to CSV, ignored for other formats. 2302 * trailing columns are treated as bad records, and if there are too many bad
2303 * records, an invalid error is returned in the job result. The default value
2304 * is false. Only applicable to CSV, ignored for other formats.
2094 */ 2305 */
2095 core.bool allowJaggedRows; 2306 core.bool allowJaggedRows;
2096 2307
2097 /** 2308 /**
2098 * Indicates if BigQuery should allow quoted data sections that contain 2309 * Indicates if BigQuery should allow quoted data sections that contain
2099 * newline characters in a CSV file. The default value is false. 2310 * newline characters in a CSV file. The default value is false.
2100 */ 2311 */
2101 core.bool allowQuotedNewlines; 2312 core.bool allowQuotedNewlines;
2102 2313
2103 /** 2314 /**
(...skipping 20 matching lines...) Expand all
2124 /** 2335 /**
2125 * [Optional] The separator for fields in a CSV file. BigQuery converts the 2336 * [Optional] The separator for fields in a CSV file. BigQuery converts the
2126 * string to ISO-8859-1 encoding, and then uses the first byte of the encoded 2337 * string to ISO-8859-1 encoding, and then uses the first byte of the encoded
2127 * string to split the data in its raw, binary state. BigQuery also supports 2338 * string to split the data in its raw, binary state. BigQuery also supports
2128 * the escape sequence "\t" to specify a tab separator. The default value is a 2339 * the escape sequence "\t" to specify a tab separator. The default value is a
2129 * comma (','). 2340 * comma (',').
2130 */ 2341 */
2131 core.String fieldDelimiter; 2342 core.String fieldDelimiter;
2132 2343
2133 /** 2344 /**
2134 * [Optional] Accept rows that contain values that do not match the schema. 2345 * [Optional] Indicates if BigQuery should allow extra values that are not
2135 * The unknown values are ignored. Default is false which treats unknown 2346 * represented in the table schema. If true, the extra values are ignored. If
2136 * values as errors. For CSV this ignores extra values at the end of a line. 2347 * false, records with extra columns are treated as bad records, and if there
2137 * For JSON this ignores named values that do not match any column name. 2348 * are too many bad records, an invalid error is returned in the job result.
2349 * The default value is false. The sourceFormat property determines what
2350 * BigQuery treats as an extra value: CSV: Trailing columns JSON: Named values
2351 * that don't match any column names
2138 */ 2352 */
2139 core.bool ignoreUnknownValues; 2353 core.bool ignoreUnknownValues;
2140 2354
2141 /** 2355 /**
2142 * [Optional] The maximum number of bad records that BigQuery can ignore when 2356 * [Optional] The maximum number of bad records that BigQuery can ignore when
2143 * running the job. If the number of bad records exceeds this value, an 2357 * running the job. If the number of bad records exceeds this value, an
2144 * 'invalid' error is returned in the job result and the job fails. The 2358 * invalid error is returned in the job result. The default value is 0, which
2145 * default value is 0, which requires that all records are valid. 2359 * requires that all records are valid.
2146 */ 2360 */
2147 core.int maxBadRecords; 2361 core.int maxBadRecords;
2148 2362
2149 /** 2363 /**
2150 * [Experimental] Names(case-sensitive) of properties to keep when importing 2364 * [Experimental] Names(case-sensitive) of properties to keep when importing
2151 * data. If this is populated, only the specified properties will be imported 2365 * data. If this is populated, only the specified properties will be imported
2152 * for each entity. Currently, this is only supported for DATASTORE_BACKUP 2366 * for each entity. Currently, this is only supported for DATASTORE_BACKUP
2153 * imports and only top level properties are supported. If any specified 2367 * imports and only top level properties are supported. If any specified
2154 * property is not found in the Datastore 'Kind' being imported, that is an 2368 * property is not found in the Datastore 'Kind' being imported, that is an
2155 * error. Note: This feature is experimental and can change in the future. 2369 * error. Note: This feature is experimental and can change in the future.
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
2193 2407
2194 /** 2408 /**
2195 * [Optional] The format of the data files. For CSV files, specify "CSV". For 2409 * [Optional] The format of the data files. For CSV files, specify "CSV". For
2196 * datastore backups, specify "DATASTORE_BACKUP". For newline-delimited JSON, 2410 * datastore backups, specify "DATASTORE_BACKUP". For newline-delimited JSON,
2197 * specify "NEWLINE_DELIMITED_JSON". The default value is CSV. 2411 * specify "NEWLINE_DELIMITED_JSON". The default value is CSV.
2198 */ 2412 */
2199 core.String sourceFormat; 2413 core.String sourceFormat;
2200 2414
2201 /** 2415 /**
2202 * [Required] The fully-qualified URIs that point to your data in Google Cloud 2416 * [Required] The fully-qualified URIs that point to your data in Google Cloud
2203 * Storage. Wildcard names are only supported when they appear at the end of 2417 * Storage. Each URI can contain one '*' wildcard character and it must come
2204 * the URI. 2418 * after the 'bucket' name.
2205 */ 2419 */
2206 core.List<core.String> sourceUris; 2420 core.List<core.String> sourceUris;
2207 2421
2208 /** 2422 /**
2209 * [Optional] Specifies the action that occurs if the destination table 2423 * [Optional] Specifies the action that occurs if the destination table
2210 * already exists. The following values are supported: WRITE_TRUNCATE: If the 2424 * already exists. The following values are supported: WRITE_TRUNCATE: If the
2211 * table already exists, BigQuery overwrites the table data. WRITE_APPEND: If 2425 * table already exists, BigQuery overwrites the table data. WRITE_APPEND: If
2212 * the table already exists, BigQuery appends the data to the table. 2426 * the table already exists, BigQuery appends the data to the table.
2213 * WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' 2427 * WRITE_EMPTY: If the table already exists and contains data, a 'duplicate'
2214 * error is returned in the job result. The default value is WRITE_EMPTY. Each 2428 * error is returned in the job result. The default value is WRITE_EMPTY. Each
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after
2326 _json["sourceUris"] = sourceUris; 2540 _json["sourceUris"] = sourceUris;
2327 } 2541 }
2328 if (writeDisposition != null) { 2542 if (writeDisposition != null) {
2329 _json["writeDisposition"] = writeDisposition; 2543 _json["writeDisposition"] = writeDisposition;
2330 } 2544 }
2331 return _json; 2545 return _json;
2332 } 2546 }
2333 } 2547 }
2334 2548
2335 2549
2336 /** Not documented yet. */
2337 class JobConfigurationQuery { 2550 class JobConfigurationQuery {
2338 /** 2551 /**
2339 * If true, allows the query to produce arbitrarily large result tables at a 2552 * If true, allows the query to produce arbitrarily large result tables at a
2340 * slight cost in performance. Requires destinationTable to be set. 2553 * slight cost in performance. Requires destinationTable to be set.
2341 */ 2554 */
2342 core.bool allowLargeResults; 2555 core.bool allowLargeResults;
2343 2556
2344 /** 2557 /**
2345 * [Optional] Specifies whether the job is allowed to create new tables. The 2558 * [Optional] Specifies whether the job is allowed to create new tables. The
2346 * following values are supported: CREATE_IF_NEEDED: If the table does not 2559 * following values are supported: CREATE_IF_NEEDED: If the table does not
(...skipping 29 matching lines...) Expand all
2376 /** 2589 /**
2377 * [Optional] Specifies a priority for the query. Possible values include 2590 * [Optional] Specifies a priority for the query. Possible values include
2378 * INTERACTIVE and BATCH. The default value is INTERACTIVE. 2591 * INTERACTIVE and BATCH. The default value is INTERACTIVE.
2379 */ 2592 */
2380 core.String priority; 2593 core.String priority;
2381 2594
2382 /** [Required] BigQuery SQL query to execute. */ 2595 /** [Required] BigQuery SQL query to execute. */
2383 core.String query; 2596 core.String query;
2384 2597
2385 /** 2598 /**
2599 * [Experimental] If querying an external data source outside of BigQuery,
2600 * describes the data format, location and other properties of the data
2601 * source. By defining these properties, the data source can then be queried
2602 * as if it were a standard BigQuery table.
2603 */
2604 core.Map<core.String, ExternalDataConfiguration> tableDefinitions;
2605
2606 /**
2386 * [Optional] Whether to look for the result in the query cache. The query 2607 * [Optional] Whether to look for the result in the query cache. The query
2387 * cache is a best-effort cache that will be flushed whenever tables in the 2608 * cache is a best-effort cache that will be flushed whenever tables in the
2388 * query are modified. Moreover, the query cache is only available when a 2609 * query are modified. Moreover, the query cache is only available when a
2389 * query does not have a destination table specified. 2610 * query does not have a destination table specified.
2390 */ 2611 */
2391 core.bool useQueryCache; 2612 core.bool useQueryCache;
2392 2613
2393 /** 2614 /**
2394 * [Optional] Specifies the action that occurs if the destination table 2615 * [Optional] Specifies the action that occurs if the destination table
2395 * already exists. The following values are supported: WRITE_TRUNCATE: If the 2616 * already exists. The following values are supported: WRITE_TRUNCATE: If the
(...skipping 28 matching lines...) Expand all
2424 } 2645 }
2425 if (_json.containsKey("preserveNulls")) { 2646 if (_json.containsKey("preserveNulls")) {
2426 preserveNulls = _json["preserveNulls"]; 2647 preserveNulls = _json["preserveNulls"];
2427 } 2648 }
2428 if (_json.containsKey("priority")) { 2649 if (_json.containsKey("priority")) {
2429 priority = _json["priority"]; 2650 priority = _json["priority"];
2430 } 2651 }
2431 if (_json.containsKey("query")) { 2652 if (_json.containsKey("query")) {
2432 query = _json["query"]; 2653 query = _json["query"];
2433 } 2654 }
2655 if (_json.containsKey("tableDefinitions")) {
2656 tableDefinitions = commons.mapMap(_json["tableDefinitions"], (item) => new ExternalDataConfiguration.fromJson(item));
2657 }
2434 if (_json.containsKey("useQueryCache")) { 2658 if (_json.containsKey("useQueryCache")) {
2435 useQueryCache = _json["useQueryCache"]; 2659 useQueryCache = _json["useQueryCache"];
2436 } 2660 }
2437 if (_json.containsKey("writeDisposition")) { 2661 if (_json.containsKey("writeDisposition")) {
2438 writeDisposition = _json["writeDisposition"]; 2662 writeDisposition = _json["writeDisposition"];
2439 } 2663 }
2440 } 2664 }
2441 2665
2442 core.Map toJson() { 2666 core.Map toJson() {
2443 var _json = new core.Map(); 2667 var _json = new core.Map();
(...skipping 14 matching lines...) Expand all
2458 } 2682 }
2459 if (preserveNulls != null) { 2683 if (preserveNulls != null) {
2460 _json["preserveNulls"] = preserveNulls; 2684 _json["preserveNulls"] = preserveNulls;
2461 } 2685 }
2462 if (priority != null) { 2686 if (priority != null) {
2463 _json["priority"] = priority; 2687 _json["priority"] = priority;
2464 } 2688 }
2465 if (query != null) { 2689 if (query != null) {
2466 _json["query"] = query; 2690 _json["query"] = query;
2467 } 2691 }
2692 if (tableDefinitions != null) {
2693 _json["tableDefinitions"] = commons.mapMap(tableDefinitions, (item) => (it em).toJson());
2694 }
2468 if (useQueryCache != null) { 2695 if (useQueryCache != null) {
2469 _json["useQueryCache"] = useQueryCache; 2696 _json["useQueryCache"] = useQueryCache;
2470 } 2697 }
2471 if (writeDisposition != null) { 2698 if (writeDisposition != null) {
2472 _json["writeDisposition"] = writeDisposition; 2699 _json["writeDisposition"] = writeDisposition;
2473 } 2700 }
2474 return _json; 2701 return _json;
2475 } 2702 }
2476 } 2703 }
2477 2704
2478 2705
2479 /** Not documented yet. */
2480 class JobConfigurationTableCopy { 2706 class JobConfigurationTableCopy {
2481 /** 2707 /**
2482 * [Optional] Specifies whether the job is allowed to create new tables. The 2708 * [Optional] Specifies whether the job is allowed to create new tables. The
2483 * following values are supported: CREATE_IF_NEEDED: If the table does not 2709 * following values are supported: CREATE_IF_NEEDED: If the table does not
2484 * exist, BigQuery creates the table. CREATE_NEVER: The table must already 2710 * exist, BigQuery creates the table. CREATE_NEVER: The table must already
2485 * exist. If it does not, a 'notFound' error is returned in the job result. 2711 * exist. If it does not, a 'notFound' error is returned in the job result.
2486 * The default value is CREATE_IF_NEEDED. Creation, truncation and append 2712 * The default value is CREATE_IF_NEEDED. Creation, truncation and append
2487 * actions occur as one atomic update upon job completion. 2713 * actions occur as one atomic update upon job completion.
2488 */ 2714 */
2489 core.String createDisposition; 2715 core.String createDisposition;
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
2546 _json["sourceTables"] = sourceTables.map((value) => (value).toJson()).toLi st(); 2772 _json["sourceTables"] = sourceTables.map((value) => (value).toJson()).toLi st();
2547 } 2773 }
2548 if (writeDisposition != null) { 2774 if (writeDisposition != null) {
2549 _json["writeDisposition"] = writeDisposition; 2775 _json["writeDisposition"] = writeDisposition;
2550 } 2776 }
2551 return _json; 2777 return _json;
2552 } 2778 }
2553 } 2779 }
2554 2780
2555 2781
2556 /** Not documented yet. */
2557 class JobListJobs { 2782 class JobListJobs {
2558 /** [Full-projection-only] Specifies the job configuration. */ 2783 /** [Full-projection-only] Specifies the job configuration. */
2559 JobConfiguration configuration; 2784 JobConfiguration configuration;
2560 2785
2561 /** A result object that will be present only if the job has failed. */ 2786 /** A result object that will be present only if the job has failed. */
2562 ErrorProto errorResult; 2787 ErrorProto errorResult;
2563 2788
2564 /** Unique opaque ID of the job. */ 2789 /** Unique opaque ID of the job. */
2565 core.String id; 2790 core.String id;
2566 2791
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
2648 _json["status"] = (status).toJson(); 2873 _json["status"] = (status).toJson();
2649 } 2874 }
2650 if (userEmail != null) { 2875 if (userEmail != null) {
2651 _json["user_email"] = userEmail; 2876 _json["user_email"] = userEmail;
2652 } 2877 }
2653 return _json; 2878 return _json;
2654 } 2879 }
2655 } 2880 }
2656 2881
2657 2882
2658 /** Not documented yet. */
2659 class JobList { 2883 class JobList {
2660 /** A hash of this page of results. */ 2884 /** A hash of this page of results. */
2661 core.String etag; 2885 core.String etag;
2662 2886
2663 /** List of jobs that were requested. */ 2887 /** List of jobs that were requested. */
2664 core.List<JobListJobs> jobs; 2888 core.List<JobListJobs> jobs;
2665 2889
2666 /** The resource type of the response. */ 2890 /** The resource type of the response. */
2667 core.String kind; 2891 core.String kind;
2668 2892
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
2708 _json["nextPageToken"] = nextPageToken; 2932 _json["nextPageToken"] = nextPageToken;
2709 } 2933 }
2710 if (totalItems != null) { 2934 if (totalItems != null) {
2711 _json["totalItems"] = totalItems; 2935 _json["totalItems"] = totalItems;
2712 } 2936 }
2713 return _json; 2937 return _json;
2714 } 2938 }
2715 } 2939 }
2716 2940
2717 2941
2718 /** Not documented yet. */
2719 class JobReference { 2942 class JobReference {
2720 /** 2943 /**
2721 * [Required] The ID of the job. The ID must contain only letters (a-z, A-Z), 2944 * [Required] The ID of the job. The ID must contain only letters (a-z, A-Z),
2722 * numbers (0-9), underscores (_), or dashes (-). The maximum length is 1,024 2945 * numbers (0-9), underscores (_), or dashes (-). The maximum length is 1,024
2723 * characters. 2946 * characters.
2724 */ 2947 */
2725 core.String jobId; 2948 core.String jobId;
2726 2949
2727 /** [Required] The ID of the project containing this job. */ 2950 /** [Required] The ID of the project containing this job. */
2728 core.String projectId; 2951 core.String projectId;
(...skipping 16 matching lines...) Expand all
2745 _json["jobId"] = jobId; 2968 _json["jobId"] = jobId;
2746 } 2969 }
2747 if (projectId != null) { 2970 if (projectId != null) {
2748 _json["projectId"] = projectId; 2971 _json["projectId"] = projectId;
2749 } 2972 }
2750 return _json; 2973 return _json;
2751 } 2974 }
2752 } 2975 }
2753 2976
2754 2977
2755 /** Not documented yet. */
2756 class JobStatistics { 2978 class JobStatistics {
2757 /** 2979 /**
2758 * [Output-only] Creation time of this job, in milliseconds since the epoch. 2980 * [Output-only] Creation time of this job, in milliseconds since the epoch.
2759 * This field will be present on all jobs. 2981 * This field will be present on all jobs.
2760 */ 2982 */
2761 core.String creationTime; 2983 core.String creationTime;
2762 2984
2763 /** 2985 /**
2764 * [Output-only] End time of this job, in milliseconds since the epoch. This 2986 * [Output-only] End time of this job, in milliseconds since the epoch. This
2765 * field will be present whenever a job is in the DONE state. 2987 * field will be present whenever a job is in the DONE state.
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
2836 _json["startTime"] = startTime; 3058 _json["startTime"] = startTime;
2837 } 3059 }
2838 if (totalBytesProcessed != null) { 3060 if (totalBytesProcessed != null) {
2839 _json["totalBytesProcessed"] = totalBytesProcessed; 3061 _json["totalBytesProcessed"] = totalBytesProcessed;
2840 } 3062 }
2841 return _json; 3063 return _json;
2842 } 3064 }
2843 } 3065 }
2844 3066
2845 3067
2846 /** Not documented yet. */
2847 class JobStatistics2 { 3068 class JobStatistics2 {
2848 /** 3069 /**
2849 * [Output-only] Whether the query result was fetched from the query cache. 3070 * [Output-only] Whether the query result was fetched from the query cache.
2850 */ 3071 */
2851 core.bool cacheHit; 3072 core.bool cacheHit;
2852 3073
2853 /** [Output-only] Total bytes processed for this job. */ 3074 /** [Output-only] Total bytes processed for this job. */
2854 core.String totalBytesProcessed; 3075 core.String totalBytesProcessed;
2855 3076
2856 3077
(...skipping 14 matching lines...) Expand all
2871 _json["cacheHit"] = cacheHit; 3092 _json["cacheHit"] = cacheHit;
2872 } 3093 }
2873 if (totalBytesProcessed != null) { 3094 if (totalBytesProcessed != null) {
2874 _json["totalBytesProcessed"] = totalBytesProcessed; 3095 _json["totalBytesProcessed"] = totalBytesProcessed;
2875 } 3096 }
2876 return _json; 3097 return _json;
2877 } 3098 }
2878 } 3099 }
2879 3100
2880 3101
2881 /** Not documented yet. */
2882 class JobStatistics3 { 3102 class JobStatistics3 {
2883 /** [Output-only] Number of bytes of source data in a joad job. */ 3103 /** [Output-only] Number of bytes of source data in a joad job. */
2884 core.String inputFileBytes; 3104 core.String inputFileBytes;
2885 3105
2886 /** [Output-only] Number of source files in a load job. */ 3106 /** [Output-only] Number of source files in a load job. */
2887 core.String inputFiles; 3107 core.String inputFiles;
2888 3108
2889 /** 3109 /**
2890 * [Output-only] Size of the loaded data in bytes. Note that while an import 3110 * [Output-only] Size of the loaded data in bytes. Note that while an import
2891 * job is in the running state, this value may change. 3111 * job is in the running state, this value may change.
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
2928 _json["outputBytes"] = outputBytes; 3148 _json["outputBytes"] = outputBytes;
2929 } 3149 }
2930 if (outputRows != null) { 3150 if (outputRows != null) {
2931 _json["outputRows"] = outputRows; 3151 _json["outputRows"] = outputRows;
2932 } 3152 }
2933 return _json; 3153 return _json;
2934 } 3154 }
2935 } 3155 }
2936 3156
2937 3157
2938 /** Not documented yet. */
2939 class JobStatistics4 { 3158 class JobStatistics4 {
2940 /** 3159 /**
2941 * [Experimental] Number of files per destination URI or URI pattern specified 3160 * [Experimental] Number of files per destination URI or URI pattern specified
2942 * in the extract configuration. These values will be in the same order as the 3161 * in the extract configuration. These values will be in the same order as the
2943 * URIs specified in the 'destinationUris' field. 3162 * URIs specified in the 'destinationUris' field.
2944 */ 3163 */
2945 core.List<core.String> destinationUriFileCounts; 3164 core.List<core.String> destinationUriFileCounts;
2946 3165
2947 3166
2948 JobStatistics4(); 3167 JobStatistics4();
2949 3168
2950 JobStatistics4.fromJson(core.Map _json) { 3169 JobStatistics4.fromJson(core.Map _json) {
2951 if (_json.containsKey("destinationUriFileCounts")) { 3170 if (_json.containsKey("destinationUriFileCounts")) {
2952 destinationUriFileCounts = _json["destinationUriFileCounts"]; 3171 destinationUriFileCounts = _json["destinationUriFileCounts"];
2953 } 3172 }
2954 } 3173 }
2955 3174
2956 core.Map toJson() { 3175 core.Map toJson() {
2957 var _json = new core.Map(); 3176 var _json = new core.Map();
2958 if (destinationUriFileCounts != null) { 3177 if (destinationUriFileCounts != null) {
2959 _json["destinationUriFileCounts"] = destinationUriFileCounts; 3178 _json["destinationUriFileCounts"] = destinationUriFileCounts;
2960 } 3179 }
2961 return _json; 3180 return _json;
2962 } 3181 }
2963 } 3182 }
2964 3183
2965 3184
2966 /** Not documented yet. */
2967 class JobStatus { 3185 class JobStatus {
2968 /** 3186 /**
2969 * [Output-only] Final error result of the job. If present, indicates that the 3187 * [Output-only] Final error result of the job. If present, indicates that the
2970 * job has completed and was unsuccessful. 3188 * job has completed and was unsuccessful.
2971 */ 3189 */
2972 ErrorProto errorResult; 3190 ErrorProto errorResult;
2973 3191
2974 /** 3192 /**
2975 * [Output-only] All errors encountered during the running of the job. Errors 3193 * [Output-only] All errors encountered during the running of the job. Errors
2976 * here do not necessarily mean that the job has completed or was 3194 * here do not necessarily mean that the job has completed or was
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
3043 void clear() { 3261 void clear() {
3044 _innerMap.clear(); 3262 _innerMap.clear();
3045 } 3263 }
3046 3264
3047 core.Iterable<core.String> get keys => _innerMap.keys; 3265 core.Iterable<core.String> get keys => _innerMap.keys;
3048 3266
3049 core.Object remove(core.Object key) => _innerMap.remove(key); 3267 core.Object remove(core.Object key) => _innerMap.remove(key);
3050 } 3268 }
3051 3269
3052 3270
3053 /** Not documented yet. */
3054 class ProjectListProjects { 3271 class ProjectListProjects {
3055 /** A descriptive name for this project. */ 3272 /** A descriptive name for this project. */
3056 core.String friendlyName; 3273 core.String friendlyName;
3057 3274
3058 /** An opaque ID of this project. */ 3275 /** An opaque ID of this project. */
3059 core.String id; 3276 core.String id;
3060 3277
3061 /** The resource type. */ 3278 /** The resource type. */
3062 core.String kind; 3279 core.String kind;
3063 3280
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
3103 _json["numericId"] = numericId; 3320 _json["numericId"] = numericId;
3104 } 3321 }
3105 if (projectReference != null) { 3322 if (projectReference != null) {
3106 _json["projectReference"] = (projectReference).toJson(); 3323 _json["projectReference"] = (projectReference).toJson();
3107 } 3324 }
3108 return _json; 3325 return _json;
3109 } 3326 }
3110 } 3327 }
3111 3328
3112 3329
3113 /** Not documented yet. */
3114 class ProjectList { 3330 class ProjectList {
3115 /** A hash of the page of results */ 3331 /** A hash of the page of results */
3116 core.String etag; 3332 core.String etag;
3117 3333
3118 /** The type of list. */ 3334 /** The type of list. */
3119 core.String kind; 3335 core.String kind;
3120 3336
3121 /** A token to request the next page of results. */ 3337 /** A token to request the next page of results. */
3122 core.String nextPageToken; 3338 core.String nextPageToken;
3123 3339
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
3163 _json["projects"] = projects.map((value) => (value).toJson()).toList(); 3379 _json["projects"] = projects.map((value) => (value).toJson()).toList();
3164 } 3380 }
3165 if (totalItems != null) { 3381 if (totalItems != null) {
3166 _json["totalItems"] = totalItems; 3382 _json["totalItems"] = totalItems;
3167 } 3383 }
3168 return _json; 3384 return _json;
3169 } 3385 }
3170 } 3386 }
3171 3387
3172 3388
3173 /** Not documented yet. */
3174 class ProjectReference { 3389 class ProjectReference {
3175 /** 3390 /**
3176 * [Required] ID of the project. Can be either the numeric ID or the assigned 3391 * [Required] ID of the project. Can be either the numeric ID or the assigned
3177 * ID of the project. 3392 * ID of the project.
3178 */ 3393 */
3179 core.String projectId; 3394 core.String projectId;
3180 3395
3181 3396
3182 ProjectReference(); 3397 ProjectReference();
3183 3398
3184 ProjectReference.fromJson(core.Map _json) { 3399 ProjectReference.fromJson(core.Map _json) {
3185 if (_json.containsKey("projectId")) { 3400 if (_json.containsKey("projectId")) {
3186 projectId = _json["projectId"]; 3401 projectId = _json["projectId"];
3187 } 3402 }
3188 } 3403 }
3189 3404
3190 core.Map toJson() { 3405 core.Map toJson() {
3191 var _json = new core.Map(); 3406 var _json = new core.Map();
3192 if (projectId != null) { 3407 if (projectId != null) {
3193 _json["projectId"] = projectId; 3408 _json["projectId"] = projectId;
3194 } 3409 }
3195 return _json; 3410 return _json;
3196 } 3411 }
3197 } 3412 }
3198 3413
3199 3414
3200 /** Not documented yet. */
3201 class QueryRequest { 3415 class QueryRequest {
3202 /** 3416 /**
3203 * [Optional] Specifies the default datasetId and projectId to assume for any 3417 * [Optional] Specifies the default datasetId and projectId to assume for any
3204 * unqualified table names in the query. If not set, all table names in the 3418 * unqualified table names in the query. If not set, all table names in the
3205 * query string must be qualified in the format 'datasetId.tableId'. 3419 * query string must be qualified in the format 'datasetId.tableId'.
3206 */ 3420 */
3207 DatasetReference defaultDataset; 3421 DatasetReference defaultDataset;
3208 3422
3209 /** 3423 /**
3210 * [Optional] If set, don't actually run this job. A valid query will return a 3424 * [Optional] If set, don't actually run this job. A valid query will return a
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after
3307 _json["timeoutMs"] = timeoutMs; 3521 _json["timeoutMs"] = timeoutMs;
3308 } 3522 }
3309 if (useQueryCache != null) { 3523 if (useQueryCache != null) {
3310 _json["useQueryCache"] = useQueryCache; 3524 _json["useQueryCache"] = useQueryCache;
3311 } 3525 }
3312 return _json; 3526 return _json;
3313 } 3527 }
3314 } 3528 }
3315 3529
3316 3530
3317 /** Not documented yet. */
3318 class QueryResponse { 3531 class QueryResponse {
3319 /** Whether the query result was fetched from the query cache. */ 3532 /** Whether the query result was fetched from the query cache. */
3320 core.bool cacheHit; 3533 core.bool cacheHit;
3321 3534
3322 /** 3535 /**
3323 * Whether the query has completed or not. If rows or totalRows are present, 3536 * Whether the query has completed or not. If rows or totalRows are present,
3324 * this will always be true. If this is false, totalRows will not be 3537 * this will always be true. If this is false, totalRows will not be
3325 * available. 3538 * available.
3326 */ 3539 */
3327 core.bool jobComplete; 3540 core.bool jobComplete;
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
3427 _json["totalBytesProcessed"] = totalBytesProcessed; 3640 _json["totalBytesProcessed"] = totalBytesProcessed;
3428 } 3641 }
3429 if (totalRows != null) { 3642 if (totalRows != null) {
3430 _json["totalRows"] = totalRows; 3643 _json["totalRows"] = totalRows;
3431 } 3644 }
3432 return _json; 3645 return _json;
3433 } 3646 }
3434 } 3647 }
3435 3648
3436 3649
3437 /** Not documented yet. */
3438 class Table { 3650 class Table {
3439 /** 3651 /**
3440 * [Output-only] The time when this table was created, in milliseconds since 3652 * [Output-only] The time when this table was created, in milliseconds since
3441 * the epoch. 3653 * the epoch.
3442 */ 3654 */
3443 core.String creationTime; 3655 core.String creationTime;
3444 3656
3445 /** [Optional] A user-friendly description of this table. */ 3657 /** [Optional] A user-friendly description of this table. */
3446 core.String description; 3658 core.String description;
3447 3659
(...skipping 156 matching lines...) Expand 10 before | Expand all | Expand 10 after
3604 } 3816 }
3605 3817
3606 3818
3607 /** 3819 /**
3608 * Represents a single cell in the result set. Users of the java client can 3820 * Represents a single cell in the result set. Users of the java client can
3609 * detect whether their value result is null by calling 3821 * detect whether their value result is null by calling
3610 * 'com.google.api.client.util.Data.isNull(cell.getV())'. 3822 * 'com.google.api.client.util.Data.isNull(cell.getV())'.
3611 */ 3823 */
3612 class TableCell { 3824 class TableCell {
3613 /** 3825 /**
3614 * Not documented yet. 3826 *
3615 * 3827 *
3616 * The values for Object must be JSON objects. It can consist of `num`, 3828 * The values for Object must be JSON objects. It can consist of `num`,
3617 * `String`, `bool` and `null` as well as `Map` and `List` values. 3829 * `String`, `bool` and `null` as well as `Map` and `List` values.
3618 */ 3830 */
3619 core.Object v; 3831 core.Object v;
3620 3832
3621 3833
3622 TableCell(); 3834 TableCell();
3623 3835
3624 TableCell.fromJson(core.Map _json) { 3836 TableCell.fromJson(core.Map _json) {
3625 if (_json.containsKey("v")) { 3837 if (_json.containsKey("v")) {
3626 v = _json["v"]; 3838 v = _json["v"];
3627 } 3839 }
3628 } 3840 }
3629 3841
3630 core.Map toJson() { 3842 core.Map toJson() {
3631 var _json = new core.Map(); 3843 var _json = new core.Map();
3632 if (v != null) { 3844 if (v != null) {
3633 _json["v"] = v; 3845 _json["v"] = v;
3634 } 3846 }
3635 return _json; 3847 return _json;
3636 } 3848 }
3637 } 3849 }
3638 3850
3639 3851
3640 /** Not documented yet. */
3641 class TableDataInsertAllRequestRows { 3852 class TableDataInsertAllRequestRows {
3642 /** 3853 /**
3643 * [Optional] A unique ID for each row. BigQuery uses this property to detect 3854 * [Optional] A unique ID for each row. BigQuery uses this property to detect
3644 * duplicate insertion requests on a best-effort basis. 3855 * duplicate insertion requests on a best-effort basis.
3645 */ 3856 */
3646 core.String insertId; 3857 core.String insertId;
3647 3858
3648 /** 3859 /**
3649 * [Required] A JSON object that contains a row of data. The object's 3860 * [Required] A JSON object that contains a row of data. The object's
3650 * properties and values must match the destination table's schema. 3861 * properties and values must match the destination table's schema.
(...skipping 18 matching lines...) Expand all
3669 _json["insertId"] = insertId; 3880 _json["insertId"] = insertId;
3670 } 3881 }
3671 if (json != null) { 3882 if (json != null) {
3672 _json["json"] = json; 3883 _json["json"] = json;
3673 } 3884 }
3674 return _json; 3885 return _json;
3675 } 3886 }
3676 } 3887 }
3677 3888
3678 3889
3679 /** Not documented yet. */
3680 class TableDataInsertAllRequest { 3890 class TableDataInsertAllRequest {
3891 /**
3892 * [Optional] Accept rows that contain values that do not match the schema.
3893 * The unknown values are ignored. Default is false, which treats unknown
3894 * values as errors.
3895 */
3896 core.bool ignoreUnknownValues;
3897
3681 /** The resource type of the response. */ 3898 /** The resource type of the response. */
3682 core.String kind; 3899 core.String kind;
3683 3900
3684 /** The rows to insert. */ 3901 /** The rows to insert. */
3685 core.List<TableDataInsertAllRequestRows> rows; 3902 core.List<TableDataInsertAllRequestRows> rows;
3686 3903
3904 /**
3905 * [Optional] Insert all valid rows of a request, even if invalid rows exist.
3906 * The default value is false, which causes the entire request to fail if any
3907 * invalid rows exist.
3908 */
3909 core.bool skipInvalidRows;
3910
3687 3911
3688 TableDataInsertAllRequest(); 3912 TableDataInsertAllRequest();
3689 3913
3690 TableDataInsertAllRequest.fromJson(core.Map _json) { 3914 TableDataInsertAllRequest.fromJson(core.Map _json) {
3915 if (_json.containsKey("ignoreUnknownValues")) {
3916 ignoreUnknownValues = _json["ignoreUnknownValues"];
3917 }
3691 if (_json.containsKey("kind")) { 3918 if (_json.containsKey("kind")) {
3692 kind = _json["kind"]; 3919 kind = _json["kind"];
3693 } 3920 }
3694 if (_json.containsKey("rows")) { 3921 if (_json.containsKey("rows")) {
3695 rows = _json["rows"].map((value) => new TableDataInsertAllRequestRows.from Json(value)).toList(); 3922 rows = _json["rows"].map((value) => new TableDataInsertAllRequestRows.from Json(value)).toList();
3696 } 3923 }
3924 if (_json.containsKey("skipInvalidRows")) {
3925 skipInvalidRows = _json["skipInvalidRows"];
3926 }
3697 } 3927 }
3698 3928
3699 core.Map toJson() { 3929 core.Map toJson() {
3700 var _json = new core.Map(); 3930 var _json = new core.Map();
3931 if (ignoreUnknownValues != null) {
3932 _json["ignoreUnknownValues"] = ignoreUnknownValues;
3933 }
3701 if (kind != null) { 3934 if (kind != null) {
3702 _json["kind"] = kind; 3935 _json["kind"] = kind;
3703 } 3936 }
3704 if (rows != null) { 3937 if (rows != null) {
3705 _json["rows"] = rows.map((value) => (value).toJson()).toList(); 3938 _json["rows"] = rows.map((value) => (value).toJson()).toList();
3706 } 3939 }
3940 if (skipInvalidRows != null) {
3941 _json["skipInvalidRows"] = skipInvalidRows;
3942 }
3707 return _json; 3943 return _json;
3708 } 3944 }
3709 } 3945 }
3710 3946
3711 3947
3712 /** Not documented yet. */
3713 class TableDataInsertAllResponseInsertErrors { 3948 class TableDataInsertAllResponseInsertErrors {
3714 /** Error information for the row indicated by the index property. */ 3949 /** Error information for the row indicated by the index property. */
3715 core.List<ErrorProto> errors; 3950 core.List<ErrorProto> errors;
3716 3951
3717 /** The index of the row that error applies to. */ 3952 /** The index of the row that error applies to. */
3718 core.int index; 3953 core.int index;
3719 3954
3720 3955
3721 TableDataInsertAllResponseInsertErrors(); 3956 TableDataInsertAllResponseInsertErrors();
3722 3957
(...skipping 12 matching lines...) Expand all
3735 _json["errors"] = errors.map((value) => (value).toJson()).toList(); 3970 _json["errors"] = errors.map((value) => (value).toJson()).toList();
3736 } 3971 }
3737 if (index != null) { 3972 if (index != null) {
3738 _json["index"] = index; 3973 _json["index"] = index;
3739 } 3974 }
3740 return _json; 3975 return _json;
3741 } 3976 }
3742 } 3977 }
3743 3978
3744 3979
3745 /** Not documented yet. */
3746 class TableDataInsertAllResponse { 3980 class TableDataInsertAllResponse {
3747 /** An array of errors for rows that were not inserted. */ 3981 /** An array of errors for rows that were not inserted. */
3748 core.List<TableDataInsertAllResponseInsertErrors> insertErrors; 3982 core.List<TableDataInsertAllResponseInsertErrors> insertErrors;
3749 3983
3750 /** The resource type of the response. */ 3984 /** The resource type of the response. */
3751 core.String kind; 3985 core.String kind;
3752 3986
3753 3987
3754 TableDataInsertAllResponse(); 3988 TableDataInsertAllResponse();
3755 3989
(...skipping 12 matching lines...) Expand all
3768 _json["insertErrors"] = insertErrors.map((value) => (value).toJson()).toLi st(); 4002 _json["insertErrors"] = insertErrors.map((value) => (value).toJson()).toLi st();
3769 } 4003 }
3770 if (kind != null) { 4004 if (kind != null) {
3771 _json["kind"] = kind; 4005 _json["kind"] = kind;
3772 } 4006 }
3773 return _json; 4007 return _json;
3774 } 4008 }
3775 } 4009 }
3776 4010
3777 4011
3778 /** Not documented yet. */
3779 class TableDataList { 4012 class TableDataList {
3780 /** A hash of this page of results. */ 4013 /** A hash of this page of results. */
3781 core.String etag; 4014 core.String etag;
3782 4015
3783 /** The resource type of the response. */ 4016 /** The resource type of the response. */
3784 core.String kind; 4017 core.String kind;
3785 4018
3786 /** 4019 /**
3787 * A token used for paging results. Providing this token instead of the 4020 * A token used for paging results. Providing this token instead of the
3788 * startIndex parameter can help you retrieve stable results when an 4021 * startIndex parameter can help you retrieve stable results when an
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
3832 _json["rows"] = rows.map((value) => (value).toJson()).toList(); 4065 _json["rows"] = rows.map((value) => (value).toJson()).toList();
3833 } 4066 }
3834 if (totalRows != null) { 4067 if (totalRows != null) {
3835 _json["totalRows"] = totalRows; 4068 _json["totalRows"] = totalRows;
3836 } 4069 }
3837 return _json; 4070 return _json;
3838 } 4071 }
3839 } 4072 }
3840 4073
3841 4074
3842 /** Not documented yet. */
3843 class TableFieldSchema { 4075 class TableFieldSchema {
3844 /** 4076 /**
3845 * [Optional] The field description. The maximum length is 16K characters. 4077 * [Optional] The field description. The maximum length is 16K characters.
3846 */ 4078 */
3847 core.String description; 4079 core.String description;
3848 4080
3849 /** 4081 /**
3850 * [Optional] Describes the nested schema fields if the type property is set 4082 * [Optional] Describes the nested schema fields if the type property is set
3851 * to RECORD. 4083 * to RECORD.
3852 */ 4084 */
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
3908 _json["name"] = name; 4140 _json["name"] = name;
3909 } 4141 }
3910 if (type != null) { 4142 if (type != null) {
3911 _json["type"] = type; 4143 _json["type"] = type;
3912 } 4144 }
3913 return _json; 4145 return _json;
3914 } 4146 }
3915 } 4147 }
3916 4148
3917 4149
3918 /** Not documented yet. */
3919 class TableListTables { 4150 class TableListTables {
3920 /** The user-friendly name for this table. */ 4151 /** The user-friendly name for this table. */
3921 core.String friendlyName; 4152 core.String friendlyName;
3922 4153
3923 /** An opaque ID of the table */ 4154 /** An opaque ID of the table */
3924 core.String id; 4155 core.String id;
3925 4156
3926 /** The resource type. */ 4157 /** The resource type. */
3927 core.String kind; 4158 core.String kind;
3928 4159
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
3968 _json["tableReference"] = (tableReference).toJson(); 4199 _json["tableReference"] = (tableReference).toJson();
3969 } 4200 }
3970 if (type != null) { 4201 if (type != null) {
3971 _json["type"] = type; 4202 _json["type"] = type;
3972 } 4203 }
3973 return _json; 4204 return _json;
3974 } 4205 }
3975 } 4206 }
3976 4207
3977 4208
3978 /** Not documented yet. */
3979 class TableList { 4209 class TableList {
3980 /** A hash of this page of results. */ 4210 /** A hash of this page of results. */
3981 core.String etag; 4211 core.String etag;
3982 4212
3983 /** The type of list. */ 4213 /** The type of list. */
3984 core.String kind; 4214 core.String kind;
3985 4215
3986 /** A token to request the next page of results. */ 4216 /** A token to request the next page of results. */
3987 core.String nextPageToken; 4217 core.String nextPageToken;
3988 4218
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
4028 _json["tables"] = tables.map((value) => (value).toJson()).toList(); 4258 _json["tables"] = tables.map((value) => (value).toJson()).toList();
4029 } 4259 }
4030 if (totalItems != null) { 4260 if (totalItems != null) {
4031 _json["totalItems"] = totalItems; 4261 _json["totalItems"] = totalItems;
4032 } 4262 }
4033 return _json; 4263 return _json;
4034 } 4264 }
4035 } 4265 }
4036 4266
4037 4267
4038 /** Not documented yet. */
4039 class TableReference { 4268 class TableReference {
4040 /** [Required] The ID of the dataset containing this table. */ 4269 /** [Required] The ID of the dataset containing this table. */
4041 core.String datasetId; 4270 core.String datasetId;
4042 4271
4043 /** [Required] The ID of the project containing this table. */ 4272 /** [Required] The ID of the project containing this table. */
4044 core.String projectId; 4273 core.String projectId;
4045 4274
4046 /** 4275 /**
4047 * [Required] The ID of the table. The ID must contain only letters (a-z, 4276 * [Required] The ID of the table. The ID must contain only letters (a-z,
4048 * A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 4277 * A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024
(...skipping 29 matching lines...) Expand all
4078 } 4307 }
4079 return _json; 4308 return _json;
4080 } 4309 }
4081 } 4310 }
4082 4311
4083 4312
4084 /** 4313 /**
4085 * Represents a single row in the result set, consisting of one or more fields. 4314 * Represents a single row in the result set, consisting of one or more fields.
4086 */ 4315 */
4087 class TableRow { 4316 class TableRow {
4088 /** Not documented yet. */
4089 core.List<TableCell> f; 4317 core.List<TableCell> f;
4090 4318
4091 4319
4092 TableRow(); 4320 TableRow();
4093 4321
4094 TableRow.fromJson(core.Map _json) { 4322 TableRow.fromJson(core.Map _json) {
4095 if (_json.containsKey("f")) { 4323 if (_json.containsKey("f")) {
4096 f = _json["f"].map((value) => new TableCell.fromJson(value)).toList(); 4324 f = _json["f"].map((value) => new TableCell.fromJson(value)).toList();
4097 } 4325 }
4098 } 4326 }
4099 4327
4100 core.Map toJson() { 4328 core.Map toJson() {
4101 var _json = new core.Map(); 4329 var _json = new core.Map();
4102 if (f != null) { 4330 if (f != null) {
4103 _json["f"] = f.map((value) => (value).toJson()).toList(); 4331 _json["f"] = f.map((value) => (value).toJson()).toList();
4104 } 4332 }
4105 return _json; 4333 return _json;
4106 } 4334 }
4107 } 4335 }
4108 4336
4109 4337
4110 /** Not documented yet. */
4111 class TableSchema { 4338 class TableSchema {
4112 /** Describes the fields in a table. */ 4339 /** Describes the fields in a table. */
4113 core.List<TableFieldSchema> fields; 4340 core.List<TableFieldSchema> fields;
4114 4341
4115 4342
4116 TableSchema(); 4343 TableSchema();
4117 4344
4118 TableSchema.fromJson(core.Map _json) { 4345 TableSchema.fromJson(core.Map _json) {
4119 if (_json.containsKey("fields")) { 4346 if (_json.containsKey("fields")) {
4120 fields = _json["fields"].map((value) => new TableFieldSchema.fromJson(valu e)).toList(); 4347 fields = _json["fields"].map((value) => new TableFieldSchema.fromJson(valu e)).toList();
4121 } 4348 }
4122 } 4349 }
4123 4350
4124 core.Map toJson() { 4351 core.Map toJson() {
4125 var _json = new core.Map(); 4352 var _json = new core.Map();
4126 if (fields != null) { 4353 if (fields != null) {
4127 _json["fields"] = fields.map((value) => (value).toJson()).toList(); 4354 _json["fields"] = fields.map((value) => (value).toJson()).toList();
4128 } 4355 }
4129 return _json; 4356 return _json;
4130 } 4357 }
4131 } 4358 }
4132 4359
4133 4360
4134 /** Not documented yet. */
4135 class ViewDefinition { 4361 class ViewDefinition {
4136 /** [Required] A query that BigQuery executes when the view is referenced. */ 4362 /** [Required] A query that BigQuery executes when the view is referenced. */
4137 core.String query; 4363 core.String query;
4138 4364
4139 4365
4140 ViewDefinition(); 4366 ViewDefinition();
4141 4367
4142 ViewDefinition.fromJson(core.Map _json) { 4368 ViewDefinition.fromJson(core.Map _json) {
4143 if (_json.containsKey("query")) { 4369 if (_json.containsKey("query")) {
4144 query = _json["query"]; 4370 query = _json["query"];
4145 } 4371 }
4146 } 4372 }
4147 4373
4148 core.Map toJson() { 4374 core.Map toJson() {
4149 var _json = new core.Map(); 4375 var _json = new core.Map();
4150 if (query != null) { 4376 if (query != null) {
4151 _json["query"] = query; 4377 _json["query"] = query;
4152 } 4378 }
4153 return _json; 4379 return _json;
4154 } 4380 }
4155 } 4381 }
4156
4157
OLDNEW
« no previous file with comments | « generated/googleapis/lib/appstate/v1.dart ('k') | generated/googleapis/lib/blogger/v3.dart » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698