| OLD | NEW |
| 1 // This is a generated file (see the discoveryapis_generator project). | 1 // This is a generated file (see the discoveryapis_generator project). |
| 2 | 2 |
| 3 library googleapis.bigquery.v2; | 3 library googleapis.bigquery.v2; |
| 4 | 4 |
| 5 import 'dart:core' as core; | 5 import 'dart:core' as core; |
| 6 import 'dart:collection' as collection; | 6 import 'dart:collection' as collection; |
| 7 import 'dart:async' as async; | 7 import 'dart:async' as async; |
| 8 import 'dart:convert' as convert; | 8 import 'dart:convert' as convert; |
| 9 | 9 |
| 10 import 'package:_discoveryapis_commons/_discoveryapis_commons.dart' as commons; | 10 import 'package:_discoveryapis_commons/_discoveryapis_commons.dart' as commons; |
| 11 import 'package:http/http.dart' as http; | 11 import 'package:http/http.dart' as http; |
| 12 | 12 |
| 13 export 'package:_discoveryapis_commons/_discoveryapis_commons.dart' show | 13 export 'package:_discoveryapis_commons/_discoveryapis_commons.dart' |
| 14 ApiRequestError, DetailedApiRequestError, Media, UploadOptions, | 14 show |
| 15 ResumableUploadOptions, DownloadOptions, PartialDownloadOptions, | 15 ApiRequestError, |
| 16 ByteRange; | 16 DetailedApiRequestError, |
| 17 Media, |
| 18 UploadOptions, |
| 19 ResumableUploadOptions, |
| 20 DownloadOptions, |
| 21 PartialDownloadOptions, |
| 22 ByteRange; |
| 17 | 23 |
| 18 const core.String USER_AGENT = 'dart-api-client bigquery/v2'; | 24 const core.String USER_AGENT = 'dart-api-client bigquery/v2'; |
| 19 | 25 |
| 20 /** A data platform for customers to create, manage, share and query data. */ | 26 /// A data platform for customers to create, manage, share and query data. |
| 21 class BigqueryApi { | 27 class BigqueryApi { |
| 22 /** View and manage your data in Google BigQuery */ | 28 /// View and manage your data in Google BigQuery |
| 23 static const BigqueryScope = "https://www.googleapis.com/auth/bigquery"; | 29 static const BigqueryScope = "https://www.googleapis.com/auth/bigquery"; |
| 24 | 30 |
| 25 /** Insert data into Google BigQuery */ | 31 /// Insert data into Google BigQuery |
| 26 static const BigqueryInsertdataScope = "https://www.googleapis.com/auth/bigque
ry.insertdata"; | 32 static const BigqueryInsertdataScope = |
| 33 "https://www.googleapis.com/auth/bigquery.insertdata"; |
| 27 | 34 |
| 28 /** View and manage your data across Google Cloud Platform services */ | 35 /// View and manage your data across Google Cloud Platform services |
| 29 static const CloudPlatformScope = "https://www.googleapis.com/auth/cloud-platf
orm"; | 36 static const CloudPlatformScope = |
| 37 "https://www.googleapis.com/auth/cloud-platform"; |
| 30 | 38 |
| 31 /** View your data across Google Cloud Platform services */ | 39 /// View your data across Google Cloud Platform services |
| 32 static const CloudPlatformReadOnlyScope = "https://www.googleapis.com/auth/clo
ud-platform.read-only"; | 40 static const CloudPlatformReadOnlyScope = |
| 41 "https://www.googleapis.com/auth/cloud-platform.read-only"; |
| 33 | 42 |
| 34 /** Manage your data and permissions in Google Cloud Storage */ | 43 /// Manage your data and permissions in Google Cloud Storage |
| 35 static const DevstorageFullControlScope = "https://www.googleapis.com/auth/dev
storage.full_control"; | 44 static const DevstorageFullControlScope = |
| 45 "https://www.googleapis.com/auth/devstorage.full_control"; |
| 36 | 46 |
| 37 /** View your data in Google Cloud Storage */ | 47 /// View your data in Google Cloud Storage |
| 38 static const DevstorageReadOnlyScope = "https://www.googleapis.com/auth/devsto
rage.read_only"; | 48 static const DevstorageReadOnlyScope = |
| 49 "https://www.googleapis.com/auth/devstorage.read_only"; |
| 39 | 50 |
| 40 /** Manage your data in Google Cloud Storage */ | 51 /// Manage your data in Google Cloud Storage |
| 41 static const DevstorageReadWriteScope = "https://www.googleapis.com/auth/devst
orage.read_write"; | 52 static const DevstorageReadWriteScope = |
| 42 | 53 "https://www.googleapis.com/auth/devstorage.read_write"; |
| 43 | 54 |
| 44 final commons.ApiRequester _requester; | 55 final commons.ApiRequester _requester; |
| 45 | 56 |
| 46 DatasetsResourceApi get datasets => new DatasetsResourceApi(_requester); | 57 DatasetsResourceApi get datasets => new DatasetsResourceApi(_requester); |
| 47 JobsResourceApi get jobs => new JobsResourceApi(_requester); | 58 JobsResourceApi get jobs => new JobsResourceApi(_requester); |
| 48 ProjectsResourceApi get projects => new ProjectsResourceApi(_requester); | 59 ProjectsResourceApi get projects => new ProjectsResourceApi(_requester); |
| 49 TabledataResourceApi get tabledata => new TabledataResourceApi(_requester); | 60 TabledataResourceApi get tabledata => new TabledataResourceApi(_requester); |
| 50 TablesResourceApi get tables => new TablesResourceApi(_requester); | 61 TablesResourceApi get tables => new TablesResourceApi(_requester); |
| 51 | 62 |
| 52 BigqueryApi(http.Client client, {core.String rootUrl: "https://www.googleapis.
com/", core.String servicePath: "bigquery/v2/"}) : | 63 BigqueryApi(http.Client client, |
| 53 _requester = new commons.ApiRequester(client, rootUrl, servicePath, USER_A
GENT); | 64 {core.String rootUrl: "https://www.googleapis.com/", |
| 65 core.String servicePath: "bigquery/v2/"}) |
| 66 : _requester = |
| 67 new commons.ApiRequester(client, rootUrl, servicePath, USER_AGENT); |
| 54 } | 68 } |
| 55 | 69 |
| 56 | |
| 57 class DatasetsResourceApi { | 70 class DatasetsResourceApi { |
| 58 final commons.ApiRequester _requester; | 71 final commons.ApiRequester _requester; |
| 59 | 72 |
| 60 DatasetsResourceApi(commons.ApiRequester client) : | 73 DatasetsResourceApi(commons.ApiRequester client) : _requester = client; |
| 61 _requester = client; | |
| 62 | 74 |
| 63 /** | 75 /// Deletes the dataset specified by the datasetId value. Before you can |
| 64 * Deletes the dataset specified by the datasetId value. Before you can delete | 76 /// delete a dataset, you must delete all its tables, either manually or by |
| 65 * a dataset, you must delete all its tables, either manually or by specifying | 77 /// specifying deleteContents. Immediately after deletion, you can create |
| 66 * deleteContents. Immediately after deletion, you can create another dataset | 78 /// another dataset with the same name. |
| 67 * with the same name. | 79 /// |
| 68 * | 80 /// Request parameters: |
| 69 * Request parameters: | 81 /// |
| 70 * | 82 /// [projectId] - Project ID of the dataset being deleted |
| 71 * [projectId] - Project ID of the dataset being deleted | 83 /// |
| 72 * | 84 /// [datasetId] - Dataset ID of dataset being deleted |
| 73 * [datasetId] - Dataset ID of dataset being deleted | 85 /// |
| 74 * | 86 /// [deleteContents] - If True, delete all the tables in the dataset. If |
| 75 * [deleteContents] - If True, delete all the tables in the dataset. If False | 87 /// False and the dataset contains tables, the request will fail. Default is |
| 76 * and the dataset contains tables, the request will fail. Default is False | 88 /// False |
| 77 * | 89 /// |
| 78 * Completes with a [commons.ApiRequestError] if the API endpoint returned an | 90 /// Completes with a [commons.ApiRequestError] if the API endpoint returned |
| 79 * error. | 91 /// an error. |
| 80 * | 92 /// |
| 81 * If the used [http.Client] completes with an error when making a REST call, | 93 /// If the used [http.Client] completes with an error when making a REST |
| 82 * this method will complete with the same error. | 94 /// call, this method will complete with the same error. |
| 83 */ | 95 async.Future delete(core.String projectId, core.String datasetId, |
| 84 async.Future delete(core.String projectId, core.String datasetId, {core.bool d
eleteContents}) { | 96 {core.bool deleteContents}) { |
| 85 var _url = null; | 97 var _url = null; |
| 86 var _queryParams = new core.Map(); | 98 var _queryParams = new core.Map(); |
| 87 var _uploadMedia = null; | 99 var _uploadMedia = null; |
| 88 var _uploadOptions = null; | 100 var _uploadOptions = null; |
| 89 var _downloadOptions = commons.DownloadOptions.Metadata; | 101 var _downloadOptions = commons.DownloadOptions.Metadata; |
| 90 var _body = null; | 102 var _body = null; |
| 91 | 103 |
| 92 if (projectId == null) { | 104 if (projectId == null) { |
| 93 throw new core.ArgumentError("Parameter projectId is required."); | 105 throw new core.ArgumentError("Parameter projectId is required."); |
| 94 } | 106 } |
| 95 if (datasetId == null) { | 107 if (datasetId == null) { |
| 96 throw new core.ArgumentError("Parameter datasetId is required."); | 108 throw new core.ArgumentError("Parameter datasetId is required."); |
| 97 } | 109 } |
| 98 if (deleteContents != null) { | 110 if (deleteContents != null) { |
| 99 _queryParams["deleteContents"] = ["${deleteContents}"]; | 111 _queryParams["deleteContents"] = ["${deleteContents}"]; |
| 100 } | 112 } |
| 101 | 113 |
| 102 _downloadOptions = null; | 114 _downloadOptions = null; |
| 103 | 115 |
| 104 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/dataset
s/' + commons.Escaper.ecapeVariable('$datasetId'); | 116 _url = 'projects/' + |
| 117 commons.Escaper.ecapeVariable('$projectId') + |
| 118 '/datasets/' + |
| 119 commons.Escaper.ecapeVariable('$datasetId'); |
| 105 | 120 |
| 106 var _response = _requester.request(_url, | 121 var _response = _requester.request(_url, "DELETE", |
| 107 "DELETE", | 122 body: _body, |
| 108 body: _body, | 123 queryParams: _queryParams, |
| 109 queryParams: _queryParams, | 124 uploadOptions: _uploadOptions, |
| 110 uploadOptions: _uploadOptions, | 125 uploadMedia: _uploadMedia, |
| 111 uploadMedia: _uploadMedia, | 126 downloadOptions: _downloadOptions); |
| 112 downloadOptions: _downloadOptions); | |
| 113 return _response.then((data) => null); | 127 return _response.then((data) => null); |
| 114 } | 128 } |
| 115 | 129 |
| 116 /** | 130 /// Returns the dataset specified by datasetID. |
| 117 * Returns the dataset specified by datasetID. | 131 /// |
| 118 * | 132 /// Request parameters: |
| 119 * Request parameters: | 133 /// |
| 120 * | 134 /// [projectId] - Project ID of the requested dataset |
| 121 * [projectId] - Project ID of the requested dataset | 135 /// |
| 122 * | 136 /// [datasetId] - Dataset ID of the requested dataset |
| 123 * [datasetId] - Dataset ID of the requested dataset | 137 /// |
| 124 * | 138 /// Completes with a [Dataset]. |
| 125 * Completes with a [Dataset]. | 139 /// |
| 126 * | 140 /// Completes with a [commons.ApiRequestError] if the API endpoint returned |
| 127 * Completes with a [commons.ApiRequestError] if the API endpoint returned an | 141 /// an error. |
| 128 * error. | 142 /// |
| 129 * | 143 /// If the used [http.Client] completes with an error when making a REST |
| 130 * If the used [http.Client] completes with an error when making a REST call, | 144 /// call, this method will complete with the same error. |
| 131 * this method will complete with the same error. | |
| 132 */ | |
| 133 async.Future<Dataset> get(core.String projectId, core.String datasetId) { | 145 async.Future<Dataset> get(core.String projectId, core.String datasetId) { |
| 134 var _url = null; | 146 var _url = null; |
| 135 var _queryParams = new core.Map(); | 147 var _queryParams = new core.Map(); |
| 136 var _uploadMedia = null; | 148 var _uploadMedia = null; |
| 137 var _uploadOptions = null; | 149 var _uploadOptions = null; |
| 138 var _downloadOptions = commons.DownloadOptions.Metadata; | 150 var _downloadOptions = commons.DownloadOptions.Metadata; |
| 139 var _body = null; | 151 var _body = null; |
| 140 | 152 |
| 141 if (projectId == null) { | 153 if (projectId == null) { |
| 142 throw new core.ArgumentError("Parameter projectId is required."); | 154 throw new core.ArgumentError("Parameter projectId is required."); |
| 143 } | 155 } |
| 144 if (datasetId == null) { | 156 if (datasetId == null) { |
| 145 throw new core.ArgumentError("Parameter datasetId is required."); | 157 throw new core.ArgumentError("Parameter datasetId is required."); |
| 146 } | 158 } |
| 147 | 159 |
| 148 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/dataset
s/' + commons.Escaper.ecapeVariable('$datasetId'); | 160 _url = 'projects/' + |
| 161 commons.Escaper.ecapeVariable('$projectId') + |
| 162 '/datasets/' + |
| 163 commons.Escaper.ecapeVariable('$datasetId'); |
| 149 | 164 |
| 150 var _response = _requester.request(_url, | 165 var _response = _requester.request(_url, "GET", |
| 151 "GET", | 166 body: _body, |
| 152 body: _body, | 167 queryParams: _queryParams, |
| 153 queryParams: _queryParams, | 168 uploadOptions: _uploadOptions, |
| 154 uploadOptions: _uploadOptions, | 169 uploadMedia: _uploadMedia, |
| 155 uploadMedia: _uploadMedia, | 170 downloadOptions: _downloadOptions); |
| 156 downloadOptions: _downloadOptions); | |
| 157 return _response.then((data) => new Dataset.fromJson(data)); | 171 return _response.then((data) => new Dataset.fromJson(data)); |
| 158 } | 172 } |
| 159 | 173 |
| 160 /** | 174 /// Creates a new empty dataset. |
| 161 * Creates a new empty dataset. | 175 /// |
| 162 * | 176 /// [request] - The metadata request object. |
| 163 * [request] - The metadata request object. | 177 /// |
| 164 * | 178 /// Request parameters: |
| 165 * Request parameters: | 179 /// |
| 166 * | 180 /// [projectId] - Project ID of the new dataset |
| 167 * [projectId] - Project ID of the new dataset | 181 /// |
| 168 * | 182 /// Completes with a [Dataset]. |
| 169 * Completes with a [Dataset]. | 183 /// |
| 170 * | 184 /// Completes with a [commons.ApiRequestError] if the API endpoint returned |
| 171 * Completes with a [commons.ApiRequestError] if the API endpoint returned an | 185 /// an error. |
| 172 * error. | 186 /// |
| 173 * | 187 /// If the used [http.Client] completes with an error when making a REST |
| 174 * If the used [http.Client] completes with an error when making a REST call, | 188 /// call, this method will complete with the same error. |
| 175 * this method will complete with the same error. | |
| 176 */ | |
| 177 async.Future<Dataset> insert(Dataset request, core.String projectId) { | 189 async.Future<Dataset> insert(Dataset request, core.String projectId) { |
| 178 var _url = null; | 190 var _url = null; |
| 179 var _queryParams = new core.Map(); | 191 var _queryParams = new core.Map(); |
| 180 var _uploadMedia = null; | 192 var _uploadMedia = null; |
| 181 var _uploadOptions = null; | 193 var _uploadOptions = null; |
| 182 var _downloadOptions = commons.DownloadOptions.Metadata; | 194 var _downloadOptions = commons.DownloadOptions.Metadata; |
| 183 var _body = null; | 195 var _body = null; |
| 184 | 196 |
| 185 if (request != null) { | 197 if (request != null) { |
| 186 _body = convert.JSON.encode((request).toJson()); | 198 _body = convert.JSON.encode((request).toJson()); |
| 187 } | 199 } |
| 188 if (projectId == null) { | 200 if (projectId == null) { |
| 189 throw new core.ArgumentError("Parameter projectId is required."); | 201 throw new core.ArgumentError("Parameter projectId is required."); |
| 190 } | 202 } |
| 191 | 203 |
| 192 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/dataset
s'; | 204 _url = |
| 205 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/datasets'; |
| 193 | 206 |
| 194 var _response = _requester.request(_url, | 207 var _response = _requester.request(_url, "POST", |
| 195 "POST", | 208 body: _body, |
| 196 body: _body, | 209 queryParams: _queryParams, |
| 197 queryParams: _queryParams, | 210 uploadOptions: _uploadOptions, |
| 198 uploadOptions: _uploadOptions, | 211 uploadMedia: _uploadMedia, |
| 199 uploadMedia: _uploadMedia, | 212 downloadOptions: _downloadOptions); |
| 200 downloadOptions: _downloadOptions); | |
| 201 return _response.then((data) => new Dataset.fromJson(data)); | 213 return _response.then((data) => new Dataset.fromJson(data)); |
| 202 } | 214 } |
| 203 | 215 |
| 204 /** | 216 /// Lists all datasets in the specified project to which you have been |
| 205 * Lists all datasets in the specified project to which you have been granted | 217 /// granted the READER dataset role. |
| 206 * the READER dataset role. | 218 /// |
| 207 * | 219 /// Request parameters: |
| 208 * Request parameters: | 220 /// |
| 209 * | 221 /// [projectId] - Project ID of the datasets to be listed |
| 210 * [projectId] - Project ID of the datasets to be listed | 222 /// |
| 211 * | 223 /// [all] - Whether to list all datasets, including hidden ones |
| 212 * [all] - Whether to list all datasets, including hidden ones | 224 /// |
| 213 * | 225 /// [filter] - An expression for filtering the results of the request by |
| 214 * [filter] - An expression for filtering the results of the request by label. | 226 /// label. The syntax is "labels.<name>[:<value>]". Multiple filters can be |
| 215 * The syntax is "labels.<name>[:<value>]". Multiple filters can be ANDed | 227 /// ANDed together by connecting with a space. Example: |
| 216 * together by connecting with a space. Example: "labels.department:receiving | 228 /// "labels.department:receiving labels.active". See Filtering datasets using |
| 217 * labels.active". See Filtering datasets using labels for details. | 229 /// labels for details. |
| 218 * | 230 /// |
| 219 * [maxResults] - The maximum number of results to return | 231 /// [maxResults] - The maximum number of results to return |
| 220 * | 232 /// |
| 221 * [pageToken] - Page token, returned by a previous call, to request the next | 233 /// [pageToken] - Page token, returned by a previous call, to request the |
| 222 * page of results | 234 /// next page of results |
| 223 * | 235 /// |
| 224 * Completes with a [DatasetList]. | 236 /// Completes with a [DatasetList]. |
| 225 * | 237 /// |
| 226 * Completes with a [commons.ApiRequestError] if the API endpoint returned an | 238 /// Completes with a [commons.ApiRequestError] if the API endpoint returned |
| 227 * error. | 239 /// an error. |
| 228 * | 240 /// |
| 229 * If the used [http.Client] completes with an error when making a REST call, | 241 /// If the used [http.Client] completes with an error when making a REST |
| 230 * this method will complete with the same error. | 242 /// call, this method will complete with the same error. |
| 231 */ | 243 async.Future<DatasetList> list(core.String projectId, |
| 232 async.Future<DatasetList> list(core.String projectId, {core.bool all, core.Str
ing filter, core.int maxResults, core.String pageToken}) { | 244 {core.bool all, |
| 245 core.String filter, |
| 246 core.int maxResults, |
| 247 core.String pageToken}) { |
| 233 var _url = null; | 248 var _url = null; |
| 234 var _queryParams = new core.Map(); | 249 var _queryParams = new core.Map(); |
| 235 var _uploadMedia = null; | 250 var _uploadMedia = null; |
| 236 var _uploadOptions = null; | 251 var _uploadOptions = null; |
| 237 var _downloadOptions = commons.DownloadOptions.Metadata; | 252 var _downloadOptions = commons.DownloadOptions.Metadata; |
| 238 var _body = null; | 253 var _body = null; |
| 239 | 254 |
| 240 if (projectId == null) { | 255 if (projectId == null) { |
| 241 throw new core.ArgumentError("Parameter projectId is required."); | 256 throw new core.ArgumentError("Parameter projectId is required."); |
| 242 } | 257 } |
| 243 if (all != null) { | 258 if (all != null) { |
| 244 _queryParams["all"] = ["${all}"]; | 259 _queryParams["all"] = ["${all}"]; |
| 245 } | 260 } |
| 246 if (filter != null) { | 261 if (filter != null) { |
| 247 _queryParams["filter"] = [filter]; | 262 _queryParams["filter"] = [filter]; |
| 248 } | 263 } |
| 249 if (maxResults != null) { | 264 if (maxResults != null) { |
| 250 _queryParams["maxResults"] = ["${maxResults}"]; | 265 _queryParams["maxResults"] = ["${maxResults}"]; |
| 251 } | 266 } |
| 252 if (pageToken != null) { | 267 if (pageToken != null) { |
| 253 _queryParams["pageToken"] = [pageToken]; | 268 _queryParams["pageToken"] = [pageToken]; |
| 254 } | 269 } |
| 255 | 270 |
| 256 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/dataset
s'; | 271 _url = |
| 272 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/datasets'; |
| 257 | 273 |
| 258 var _response = _requester.request(_url, | 274 var _response = _requester.request(_url, "GET", |
| 259 "GET", | 275 body: _body, |
| 260 body: _body, | 276 queryParams: _queryParams, |
| 261 queryParams: _queryParams, | 277 uploadOptions: _uploadOptions, |
| 262 uploadOptions: _uploadOptions, | 278 uploadMedia: _uploadMedia, |
| 263 uploadMedia: _uploadMedia, | 279 downloadOptions: _downloadOptions); |
| 264 downloadOptions: _downloadOptions); | |
| 265 return _response.then((data) => new DatasetList.fromJson(data)); | 280 return _response.then((data) => new DatasetList.fromJson(data)); |
| 266 } | 281 } |
| 267 | 282 |
| 268 /** | 283 /// Updates information in an existing dataset. The update method replaces |
| 269 * Updates information in an existing dataset. The update method replaces the | 284 /// the entire dataset resource, whereas the patch method only replaces |
| 270 * entire dataset resource, whereas the patch method only replaces fields that | 285 /// fields that are provided in the submitted dataset resource. This method |
| 271 * are provided in the submitted dataset resource. This method supports patch | 286 /// supports patch semantics. |
| 272 * semantics. | 287 /// |
| 273 * | 288 /// [request] - The metadata request object. |
| 274 * [request] - The metadata request object. | 289 /// |
| 275 * | 290 /// Request parameters: |
| 276 * Request parameters: | 291 /// |
| 277 * | 292 /// [projectId] - Project ID of the dataset being updated |
| 278 * [projectId] - Project ID of the dataset being updated | 293 /// |
| 279 * | 294 /// [datasetId] - Dataset ID of the dataset being updated |
| 280 * [datasetId] - Dataset ID of the dataset being updated | 295 /// |
| 281 * | 296 /// Completes with a [Dataset]. |
| 282 * Completes with a [Dataset]. | 297 /// |
| 283 * | 298 /// Completes with a [commons.ApiRequestError] if the API endpoint returned |
| 284 * Completes with a [commons.ApiRequestError] if the API endpoint returned an | 299 /// an error. |
| 285 * error. | 300 /// |
| 286 * | 301 /// If the used [http.Client] completes with an error when making a REST |
| 287 * If the used [http.Client] completes with an error when making a REST call, | 302 /// call, this method will complete with the same error. |
| 288 * this method will complete with the same error. | 303 async.Future<Dataset> patch( |
| 289 */ | 304 Dataset request, core.String projectId, core.String datasetId) { |
| 290 async.Future<Dataset> patch(Dataset request, core.String projectId, core.Strin
g datasetId) { | |
| 291 var _url = null; | 305 var _url = null; |
| 292 var _queryParams = new core.Map(); | 306 var _queryParams = new core.Map(); |
| 293 var _uploadMedia = null; | 307 var _uploadMedia = null; |
| 294 var _uploadOptions = null; | 308 var _uploadOptions = null; |
| 295 var _downloadOptions = commons.DownloadOptions.Metadata; | 309 var _downloadOptions = commons.DownloadOptions.Metadata; |
| 296 var _body = null; | 310 var _body = null; |
| 297 | 311 |
| 298 if (request != null) { | 312 if (request != null) { |
| 299 _body = convert.JSON.encode((request).toJson()); | 313 _body = convert.JSON.encode((request).toJson()); |
| 300 } | 314 } |
| 301 if (projectId == null) { | 315 if (projectId == null) { |
| 302 throw new core.ArgumentError("Parameter projectId is required."); | 316 throw new core.ArgumentError("Parameter projectId is required."); |
| 303 } | 317 } |
| 304 if (datasetId == null) { | 318 if (datasetId == null) { |
| 305 throw new core.ArgumentError("Parameter datasetId is required."); | 319 throw new core.ArgumentError("Parameter datasetId is required."); |
| 306 } | 320 } |
| 307 | 321 |
| 308 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/dataset
s/' + commons.Escaper.ecapeVariable('$datasetId'); | 322 _url = 'projects/' + |
| 323 commons.Escaper.ecapeVariable('$projectId') + |
| 324 '/datasets/' + |
| 325 commons.Escaper.ecapeVariable('$datasetId'); |
| 309 | 326 |
| 310 var _response = _requester.request(_url, | 327 var _response = _requester.request(_url, "PATCH", |
| 311 "PATCH", | 328 body: _body, |
| 312 body: _body, | 329 queryParams: _queryParams, |
| 313 queryParams: _queryParams, | 330 uploadOptions: _uploadOptions, |
| 314 uploadOptions: _uploadOptions, | 331 uploadMedia: _uploadMedia, |
| 315 uploadMedia: _uploadMedia, | 332 downloadOptions: _downloadOptions); |
| 316 downloadOptions: _downloadOptions); | |
| 317 return _response.then((data) => new Dataset.fromJson(data)); | 333 return _response.then((data) => new Dataset.fromJson(data)); |
| 318 } | 334 } |
| 319 | 335 |
| 320 /** | 336 /// Updates information in an existing dataset. The update method replaces |
| 321 * Updates information in an existing dataset. The update method replaces the | 337 /// the entire dataset resource, whereas the patch method only replaces |
| 322 * entire dataset resource, whereas the patch method only replaces fields that | 338 /// fields that are provided in the submitted dataset resource. |
| 323 * are provided in the submitted dataset resource. | 339 /// |
| 324 * | 340 /// [request] - The metadata request object. |
| 325 * [request] - The metadata request object. | 341 /// |
| 326 * | 342 /// Request parameters: |
| 327 * Request parameters: | 343 /// |
| 328 * | 344 /// [projectId] - Project ID of the dataset being updated |
| 329 * [projectId] - Project ID of the dataset being updated | 345 /// |
| 330 * | 346 /// [datasetId] - Dataset ID of the dataset being updated |
| 331 * [datasetId] - Dataset ID of the dataset being updated | 347 /// |
| 332 * | 348 /// Completes with a [Dataset]. |
| 333 * Completes with a [Dataset]. | 349 /// |
| 334 * | 350 /// Completes with a [commons.ApiRequestError] if the API endpoint returned |
| 335 * Completes with a [commons.ApiRequestError] if the API endpoint returned an | 351 /// an error. |
| 336 * error. | 352 /// |
| 337 * | 353 /// If the used [http.Client] completes with an error when making a REST |
| 338 * If the used [http.Client] completes with an error when making a REST call, | 354 /// call, this method will complete with the same error. |
| 339 * this method will complete with the same error. | 355 async.Future<Dataset> update( |
| 340 */ | 356 Dataset request, core.String projectId, core.String datasetId) { |
| 341 async.Future<Dataset> update(Dataset request, core.String projectId, core.Stri
ng datasetId) { | |
| 342 var _url = null; | 357 var _url = null; |
| 343 var _queryParams = new core.Map(); | 358 var _queryParams = new core.Map(); |
| 344 var _uploadMedia = null; | 359 var _uploadMedia = null; |
| 345 var _uploadOptions = null; | 360 var _uploadOptions = null; |
| 346 var _downloadOptions = commons.DownloadOptions.Metadata; | 361 var _downloadOptions = commons.DownloadOptions.Metadata; |
| 347 var _body = null; | 362 var _body = null; |
| 348 | 363 |
| 349 if (request != null) { | 364 if (request != null) { |
| 350 _body = convert.JSON.encode((request).toJson()); | 365 _body = convert.JSON.encode((request).toJson()); |
| 351 } | 366 } |
| 352 if (projectId == null) { | 367 if (projectId == null) { |
| 353 throw new core.ArgumentError("Parameter projectId is required."); | 368 throw new core.ArgumentError("Parameter projectId is required."); |
| 354 } | 369 } |
| 355 if (datasetId == null) { | 370 if (datasetId == null) { |
| 356 throw new core.ArgumentError("Parameter datasetId is required."); | 371 throw new core.ArgumentError("Parameter datasetId is required."); |
| 357 } | 372 } |
| 358 | 373 |
| 359 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/dataset
s/' + commons.Escaper.ecapeVariable('$datasetId'); | 374 _url = 'projects/' + |
| 375 commons.Escaper.ecapeVariable('$projectId') + |
| 376 '/datasets/' + |
| 377 commons.Escaper.ecapeVariable('$datasetId'); |
| 360 | 378 |
| 361 var _response = _requester.request(_url, | 379 var _response = _requester.request(_url, "PUT", |
| 362 "PUT", | 380 body: _body, |
| 363 body: _body, | 381 queryParams: _queryParams, |
| 364 queryParams: _queryParams, | 382 uploadOptions: _uploadOptions, |
| 365 uploadOptions: _uploadOptions, | 383 uploadMedia: _uploadMedia, |
| 366 uploadMedia: _uploadMedia, | 384 downloadOptions: _downloadOptions); |
| 367 downloadOptions: _downloadOptions); | |
| 368 return _response.then((data) => new Dataset.fromJson(data)); | 385 return _response.then((data) => new Dataset.fromJson(data)); |
| 369 } | 386 } |
| 370 | |
| 371 } | 387 } |
| 372 | 388 |
| 373 | |
| 374 class JobsResourceApi { | 389 class JobsResourceApi { |
| 375 final commons.ApiRequester _requester; | 390 final commons.ApiRequester _requester; |
| 376 | 391 |
| 377 JobsResourceApi(commons.ApiRequester client) : | 392 JobsResourceApi(commons.ApiRequester client) : _requester = client; |
| 378 _requester = client; | |
| 379 | 393 |
| 380 /** | 394 /// Requests that a job be cancelled. This call will return immediately, and |
| 381 * Requests that a job be cancelled. This call will return immediately, and | 395 /// the client will need to poll for the job status to see if the cancel |
| 382 * the client will need to poll for the job status to see if the cancel | 396 /// completed successfully. Cancelled jobs may still incur costs. |
| 383 * completed successfully. Cancelled jobs may still incur costs. | 397 /// |
| 384 * | 398 /// Request parameters: |
| 385 * Request parameters: | 399 /// |
| 386 * | 400 /// [projectId] - [Required] Project ID of the job to cancel |
| 387 * [projectId] - [Required] Project ID of the job to cancel | 401 /// |
| 388 * | 402 /// [jobId] - [Required] Job ID of the job to cancel |
| 389 * [jobId] - [Required] Job ID of the job to cancel | 403 /// |
| 390 * | 404 /// Completes with a [JobCancelResponse]. |
| 391 * Completes with a [JobCancelResponse]. | 405 /// |
| 392 * | 406 /// Completes with a [commons.ApiRequestError] if the API endpoint returned |
| 393 * Completes with a [commons.ApiRequestError] if the API endpoint returned an | 407 /// an error. |
| 394 * error. | 408 /// |
| 395 * | 409 /// If the used [http.Client] completes with an error when making a REST |
| 396 * If the used [http.Client] completes with an error when making a REST call, | 410 /// call, this method will complete with the same error. |
| 397 * this method will complete with the same error. | 411 async.Future<JobCancelResponse> cancel( |
| 398 */ | 412 core.String projectId, core.String jobId) { |
| 399 async.Future<JobCancelResponse> cancel(core.String projectId, core.String jobI
d) { | |
| 400 var _url = null; | 413 var _url = null; |
| 401 var _queryParams = new core.Map(); | 414 var _queryParams = new core.Map(); |
| 402 var _uploadMedia = null; | 415 var _uploadMedia = null; |
| 403 var _uploadOptions = null; | 416 var _uploadOptions = null; |
| 404 var _downloadOptions = commons.DownloadOptions.Metadata; | 417 var _downloadOptions = commons.DownloadOptions.Metadata; |
| 405 var _body = null; | 418 var _body = null; |
| 406 | 419 |
| 407 if (projectId == null) { | 420 if (projectId == null) { |
| 408 throw new core.ArgumentError("Parameter projectId is required."); | 421 throw new core.ArgumentError("Parameter projectId is required."); |
| 409 } | 422 } |
| 410 if (jobId == null) { | 423 if (jobId == null) { |
| 411 throw new core.ArgumentError("Parameter jobId is required."); | 424 throw new core.ArgumentError("Parameter jobId is required."); |
| 412 } | 425 } |
| 413 | 426 |
| 414 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/jobs/'
+ commons.Escaper.ecapeVariable('$jobId') + '/cancel'; | 427 _url = 'projects/' + |
| 428 commons.Escaper.ecapeVariable('$projectId') + |
| 429 '/jobs/' + |
| 430 commons.Escaper.ecapeVariable('$jobId') + |
| 431 '/cancel'; |
| 415 | 432 |
| 416 var _response = _requester.request(_url, | 433 var _response = _requester.request(_url, "POST", |
| 417 "POST", | 434 body: _body, |
| 418 body: _body, | 435 queryParams: _queryParams, |
| 419 queryParams: _queryParams, | 436 uploadOptions: _uploadOptions, |
| 420 uploadOptions: _uploadOptions, | 437 uploadMedia: _uploadMedia, |
| 421 uploadMedia: _uploadMedia, | 438 downloadOptions: _downloadOptions); |
| 422 downloadOptions: _downloadOptions); | |
| 423 return _response.then((data) => new JobCancelResponse.fromJson(data)); | 439 return _response.then((data) => new JobCancelResponse.fromJson(data)); |
| 424 } | 440 } |
| 425 | 441 |
| 426 /** | 442 /// Returns information about a specific job. Job information is available |
| 427 * Returns information about a specific job. Job information is available for | 443 /// for a six month period after creation. Requires that you're the person |
| 428 * a six month period after creation. Requires that you're the person who ran | 444 /// who ran the job, or have the Is Owner project role. |
| 429 * the job, or have the Is Owner project role. | 445 /// |
| 430 * | 446 /// Request parameters: |
| 431 * Request parameters: | 447 /// |
| 432 * | 448 /// [projectId] - [Required] Project ID of the requested job |
| 433 * [projectId] - [Required] Project ID of the requested job | 449 /// |
| 434 * | 450 /// [jobId] - [Required] Job ID of the requested job |
| 435 * [jobId] - [Required] Job ID of the requested job | 451 /// |
| 436 * | 452 /// Completes with a [Job]. |
| 437 * Completes with a [Job]. | 453 /// |
| 438 * | 454 /// Completes with a [commons.ApiRequestError] if the API endpoint returned |
| 439 * Completes with a [commons.ApiRequestError] if the API endpoint returned an | 455 /// an error. |
| 440 * error. | 456 /// |
| 441 * | 457 /// If the used [http.Client] completes with an error when making a REST |
| 442 * If the used [http.Client] completes with an error when making a REST call, | 458 /// call, this method will complete with the same error. |
| 443 * this method will complete with the same error. | |
| 444 */ | |
| 445 async.Future<Job> get(core.String projectId, core.String jobId) { | 459 async.Future<Job> get(core.String projectId, core.String jobId) { |
| 446 var _url = null; | 460 var _url = null; |
| 447 var _queryParams = new core.Map(); | 461 var _queryParams = new core.Map(); |
| 448 var _uploadMedia = null; | 462 var _uploadMedia = null; |
| 449 var _uploadOptions = null; | 463 var _uploadOptions = null; |
| 450 var _downloadOptions = commons.DownloadOptions.Metadata; | 464 var _downloadOptions = commons.DownloadOptions.Metadata; |
| 451 var _body = null; | 465 var _body = null; |
| 452 | 466 |
| 453 if (projectId == null) { | 467 if (projectId == null) { |
| 454 throw new core.ArgumentError("Parameter projectId is required."); | 468 throw new core.ArgumentError("Parameter projectId is required."); |
| 455 } | 469 } |
| 456 if (jobId == null) { | 470 if (jobId == null) { |
| 457 throw new core.ArgumentError("Parameter jobId is required."); | 471 throw new core.ArgumentError("Parameter jobId is required."); |
| 458 } | 472 } |
| 459 | 473 |
| 460 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/jobs/'
+ commons.Escaper.ecapeVariable('$jobId'); | 474 _url = 'projects/' + |
| 475 commons.Escaper.ecapeVariable('$projectId') + |
| 476 '/jobs/' + |
| 477 commons.Escaper.ecapeVariable('$jobId'); |
| 461 | 478 |
| 462 var _response = _requester.request(_url, | 479 var _response = _requester.request(_url, "GET", |
| 463 "GET", | 480 body: _body, |
| 464 body: _body, | 481 queryParams: _queryParams, |
| 465 queryParams: _queryParams, | 482 uploadOptions: _uploadOptions, |
| 466 uploadOptions: _uploadOptions, | 483 uploadMedia: _uploadMedia, |
| 467 uploadMedia: _uploadMedia, | 484 downloadOptions: _downloadOptions); |
| 468 downloadOptions: _downloadOptions); | |
| 469 return _response.then((data) => new Job.fromJson(data)); | 485 return _response.then((data) => new Job.fromJson(data)); |
| 470 } | 486 } |
| 471 | 487 |
| 472 /** | 488 /// Retrieves the results of a query job. |
| 473 * Retrieves the results of a query job. | 489 /// |
| 474 * | 490 /// Request parameters: |
| 475 * Request parameters: | 491 /// |
| 476 * | 492 /// [projectId] - [Required] Project ID of the query job |
| 477 * [projectId] - [Required] Project ID of the query job | 493 /// |
| 478 * | 494 /// [jobId] - [Required] Job ID of the query job |
| 479 * [jobId] - [Required] Job ID of the query job | 495 /// |
| 480 * | 496 /// [maxResults] - Maximum number of results to read |
| 481 * [maxResults] - Maximum number of results to read | 497 /// |
| 482 * | 498 /// [pageToken] - Page token, returned by a previous call, to request the |
| 483 * [pageToken] - Page token, returned by a previous call, to request the next | 499 /// next page of results |
| 484 * page of results | 500 /// |
| 485 * | 501 /// [startIndex] - Zero-based index of the starting row |
| 486 * [startIndex] - Zero-based index of the starting row | 502 /// |
| 487 * | 503 /// [timeoutMs] - How long to wait for the query to complete, in |
| 488 * [timeoutMs] - How long to wait for the query to complete, in milliseconds, | 504 /// milliseconds, before returning. Default is 10 seconds. If the timeout |
| 489 * before returning. Default is 10 seconds. If the timeout passes before the | 505 /// passes before the job completes, the 'jobComplete' field in the response |
| 490 * job completes, the 'jobComplete' field in the response will be false | 506 /// will be false |
| 491 * | 507 /// |
| 492 * Completes with a [GetQueryResultsResponse]. | 508 /// Completes with a [GetQueryResultsResponse]. |
| 493 * | 509 /// |
| 494 * Completes with a [commons.ApiRequestError] if the API endpoint returned an | 510 /// Completes with a [commons.ApiRequestError] if the API endpoint returned |
| 495 * error. | 511 /// an error. |
| 496 * | 512 /// |
| 497 * If the used [http.Client] completes with an error when making a REST call, | 513 /// If the used [http.Client] completes with an error when making a REST |
| 498 * this method will complete with the same error. | 514 /// call, this method will complete with the same error. |
| 499 */ | 515 async.Future<GetQueryResultsResponse> getQueryResults( |
| 500 async.Future<GetQueryResultsResponse> getQueryResults(core.String projectId, c
ore.String jobId, {core.int maxResults, core.String pageToken, core.String start
Index, core.int timeoutMs}) { | 516 core.String projectId, core.String jobId, |
| 517 {core.int maxResults, |
| 518 core.String pageToken, |
| 519 core.String startIndex, |
| 520 core.int timeoutMs}) { |
| 501 var _url = null; | 521 var _url = null; |
| 502 var _queryParams = new core.Map(); | 522 var _queryParams = new core.Map(); |
| 503 var _uploadMedia = null; | 523 var _uploadMedia = null; |
| 504 var _uploadOptions = null; | 524 var _uploadOptions = null; |
| 505 var _downloadOptions = commons.DownloadOptions.Metadata; | 525 var _downloadOptions = commons.DownloadOptions.Metadata; |
| 506 var _body = null; | 526 var _body = null; |
| 507 | 527 |
| 508 if (projectId == null) { | 528 if (projectId == null) { |
| 509 throw new core.ArgumentError("Parameter projectId is required."); | 529 throw new core.ArgumentError("Parameter projectId is required."); |
| 510 } | 530 } |
| 511 if (jobId == null) { | 531 if (jobId == null) { |
| 512 throw new core.ArgumentError("Parameter jobId is required."); | 532 throw new core.ArgumentError("Parameter jobId is required."); |
| 513 } | 533 } |
| 514 if (maxResults != null) { | 534 if (maxResults != null) { |
| 515 _queryParams["maxResults"] = ["${maxResults}"]; | 535 _queryParams["maxResults"] = ["${maxResults}"]; |
| 516 } | 536 } |
| 517 if (pageToken != null) { | 537 if (pageToken != null) { |
| 518 _queryParams["pageToken"] = [pageToken]; | 538 _queryParams["pageToken"] = [pageToken]; |
| 519 } | 539 } |
| 520 if (startIndex != null) { | 540 if (startIndex != null) { |
| 521 _queryParams["startIndex"] = [startIndex]; | 541 _queryParams["startIndex"] = [startIndex]; |
| 522 } | 542 } |
| 523 if (timeoutMs != null) { | 543 if (timeoutMs != null) { |
| 524 _queryParams["timeoutMs"] = ["${timeoutMs}"]; | 544 _queryParams["timeoutMs"] = ["${timeoutMs}"]; |
| 525 } | 545 } |
| 526 | 546 |
| 527 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/queries
/' + commons.Escaper.ecapeVariable('$jobId'); | 547 _url = 'projects/' + |
| 548 commons.Escaper.ecapeVariable('$projectId') + |
| 549 '/queries/' + |
| 550 commons.Escaper.ecapeVariable('$jobId'); |
| 528 | 551 |
| 529 var _response = _requester.request(_url, | 552 var _response = _requester.request(_url, "GET", |
| 530 "GET", | 553 body: _body, |
| 531 body: _body, | 554 queryParams: _queryParams, |
| 532 queryParams: _queryParams, | 555 uploadOptions: _uploadOptions, |
| 533 uploadOptions: _uploadOptions, | 556 uploadMedia: _uploadMedia, |
| 534 uploadMedia: _uploadMedia, | 557 downloadOptions: _downloadOptions); |
| 535 downloadOptions: _downloadOptions); | |
| 536 return _response.then((data) => new GetQueryResultsResponse.fromJson(data)); | 558 return _response.then((data) => new GetQueryResultsResponse.fromJson(data)); |
| 537 } | 559 } |
| 538 | 560 |
| 539 /** | 561 /// Starts a new asynchronous job. Requires the Can View project role. |
| 540 * Starts a new asynchronous job. Requires the Can View project role. | 562 /// |
| 541 * | 563 /// [request] - The metadata request object. |
| 542 * [request] - The metadata request object. | 564 /// |
| 543 * | 565 /// Request parameters: |
| 544 * Request parameters: | 566 /// |
| 545 * | 567 /// [projectId] - Project ID of the project that will be billed for the job |
| 546 * [projectId] - Project ID of the project that will be billed for the job | 568 /// |
| 547 * | 569 /// [uploadMedia] - The media to upload. |
| 548 * [uploadMedia] - The media to upload. | 570 /// |
| 549 * | 571 /// [uploadOptions] - Options for the media upload. Streaming Media without |
| 550 * [uploadOptions] - Options for the media upload. Streaming Media without the | 572 /// the length being known ahead of time is only supported via resumable |
| 551 * length being known ahead of time is only supported via resumable uploads. | 573 /// uploads. |
| 552 * | 574 /// |
| 553 * Completes with a [Job]. | 575 /// Completes with a [Job]. |
| 554 * | 576 /// |
| 555 * Completes with a [commons.ApiRequestError] if the API endpoint returned an | 577 /// Completes with a [commons.ApiRequestError] if the API endpoint returned |
| 556 * error. | 578 /// an error. |
| 557 * | 579 /// |
| 558 * If the used [http.Client] completes with an error when making a REST call, | 580 /// If the used [http.Client] completes with an error when making a REST |
| 559 * this method will complete with the same error. | 581 /// call, this method will complete with the same error. |
| 560 */ | 582 async.Future<Job> insert(Job request, core.String projectId, |
| 561 async.Future<Job> insert(Job request, core.String projectId, {commons.UploadOp
tions uploadOptions : commons.UploadOptions.Default, commons.Media uploadMedia})
{ | 583 {commons.UploadOptions uploadOptions: commons.UploadOptions.Default, |
| 584 commons.Media uploadMedia}) { |
| 562 var _url = null; | 585 var _url = null; |
| 563 var _queryParams = new core.Map(); | 586 var _queryParams = new core.Map(); |
| 564 var _uploadMedia = null; | 587 var _uploadMedia = null; |
| 565 var _uploadOptions = null; | 588 var _uploadOptions = null; |
| 566 var _downloadOptions = commons.DownloadOptions.Metadata; | 589 var _downloadOptions = commons.DownloadOptions.Metadata; |
| 567 var _body = null; | 590 var _body = null; |
| 568 | 591 |
| 569 if (request != null) { | 592 if (request != null) { |
| 570 _body = convert.JSON.encode((request).toJson()); | 593 _body = convert.JSON.encode((request).toJson()); |
| 571 } | 594 } |
| 572 if (projectId == null) { | 595 if (projectId == null) { |
| 573 throw new core.ArgumentError("Parameter projectId is required."); | 596 throw new core.ArgumentError("Parameter projectId is required."); |
| 574 } | 597 } |
| 575 | 598 |
| 576 _uploadMedia = uploadMedia; | 599 _uploadMedia = uploadMedia; |
| 577 _uploadOptions = uploadOptions; | 600 _uploadOptions = uploadOptions; |
| 578 | 601 |
| 579 if (_uploadMedia == null) { | 602 if (_uploadMedia == null) { |
| 580 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/jobs'
; | 603 _url = |
| 604 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/jobs'; |
| 581 } else if (_uploadOptions is commons.ResumableUploadOptions) { | 605 } else if (_uploadOptions is commons.ResumableUploadOptions) { |
| 582 _url = '/resumable/upload/bigquery/v2/projects/' + commons.Escaper.ecapeVa
riable('$projectId') + '/jobs'; | 606 _url = '/resumable/upload/bigquery/v2/projects/' + |
| 607 commons.Escaper.ecapeVariable('$projectId') + |
| 608 '/jobs'; |
| 583 } else { | 609 } else { |
| 584 _url = '/upload/bigquery/v2/projects/' + commons.Escaper.ecapeVariable('$p
rojectId') + '/jobs'; | 610 _url = '/upload/bigquery/v2/projects/' + |
| 611 commons.Escaper.ecapeVariable('$projectId') + |
| 612 '/jobs'; |
| 585 } | 613 } |
| 586 | 614 |
| 587 | 615 var _response = _requester.request(_url, "POST", |
| 588 var _response = _requester.request(_url, | 616 body: _body, |
| 589 "POST", | 617 queryParams: _queryParams, |
| 590 body: _body, | 618 uploadOptions: _uploadOptions, |
| 591 queryParams: _queryParams, | 619 uploadMedia: _uploadMedia, |
| 592 uploadOptions: _uploadOptions, | 620 downloadOptions: _downloadOptions); |
| 593 uploadMedia: _uploadMedia, | |
| 594 downloadOptions: _downloadOptions); | |
| 595 return _response.then((data) => new Job.fromJson(data)); | 621 return _response.then((data) => new Job.fromJson(data)); |
| 596 } | 622 } |
| 597 | 623 |
| 598 /** | 624 /// Lists all jobs that you started in the specified project. Job information |
| 599 * Lists all jobs that you started in the specified project. Job information | 625 /// is available for a six month period after creation. The job list is |
| 600 * is available for a six month period after creation. The job list is sorted | 626 /// sorted in reverse chronological order, by job creation time. Requires the |
| 601 * in reverse chronological order, by job creation time. Requires the Can View | 627 /// Can View project role, or the Is Owner project role if you set the |
| 602 * project role, or the Is Owner project role if you set the allUsers | 628 /// allUsers property. |
| 603 * property. | 629 /// |
| 604 * | 630 /// Request parameters: |
| 605 * Request parameters: | 631 /// |
| 606 * | 632 /// [projectId] - Project ID of the jobs to list |
| 607 * [projectId] - Project ID of the jobs to list | 633 /// |
| 608 * | 634 /// [allUsers] - Whether to display jobs owned by all users in the project. |
| 609 * [allUsers] - Whether to display jobs owned by all users in the project. | 635 /// Default false |
| 610 * Default false | 636 /// |
| 611 * | 637 /// [maxResults] - Maximum number of results to return |
| 612 * [maxResults] - Maximum number of results to return | 638 /// |
| 613 * | 639 /// [pageToken] - Page token, returned by a previous call, to request the |
| 614 * [pageToken] - Page token, returned by a previous call, to request the next | 640 /// next page of results |
| 615 * page of results | 641 /// |
| 616 * | 642 /// [projection] - Restrict information returned to a set of selected fields |
| 617 * [projection] - Restrict information returned to a set of selected fields | 643 /// Possible string values are: |
| 618 * Possible string values are: | 644 /// - "full" : Includes all job data |
| 619 * - "full" : Includes all job data | 645 /// - "minimal" : Does not include the job configuration |
| 620 * - "minimal" : Does not include the job configuration | 646 /// |
| 621 * | 647 /// [stateFilter] - Filter for job state |
| 622 * [stateFilter] - Filter for job state | 648 /// |
| 623 * | 649 /// Completes with a [JobList]. |
| 624 * Completes with a [JobList]. | 650 /// |
| 625 * | 651 /// Completes with a [commons.ApiRequestError] if the API endpoint returned |
| 626 * Completes with a [commons.ApiRequestError] if the API endpoint returned an | 652 /// an error. |
| 627 * error. | 653 /// |
| 628 * | 654 /// If the used [http.Client] completes with an error when making a REST |
| 629 * If the used [http.Client] completes with an error when making a REST call, | 655 /// call, this method will complete with the same error. |
| 630 * this method will complete with the same error. | 656 async.Future<JobList> list(core.String projectId, |
| 631 */ | 657 {core.bool allUsers, |
| 632 async.Future<JobList> list(core.String projectId, {core.bool allUsers, core.in
t maxResults, core.String pageToken, core.String projection, core.List<core.Stri
ng> stateFilter}) { | 658 core.int maxResults, |
| 659 core.String pageToken, |
| 660 core.String projection, |
| 661 core.List<core.String> stateFilter}) { |
| 633 var _url = null; | 662 var _url = null; |
| 634 var _queryParams = new core.Map(); | 663 var _queryParams = new core.Map(); |
| 635 var _uploadMedia = null; | 664 var _uploadMedia = null; |
| 636 var _uploadOptions = null; | 665 var _uploadOptions = null; |
| 637 var _downloadOptions = commons.DownloadOptions.Metadata; | 666 var _downloadOptions = commons.DownloadOptions.Metadata; |
| 638 var _body = null; | 667 var _body = null; |
| 639 | 668 |
| 640 if (projectId == null) { | 669 if (projectId == null) { |
| 641 throw new core.ArgumentError("Parameter projectId is required."); | 670 throw new core.ArgumentError("Parameter projectId is required."); |
| 642 } | 671 } |
| 643 if (allUsers != null) { | 672 if (allUsers != null) { |
| 644 _queryParams["allUsers"] = ["${allUsers}"]; | 673 _queryParams["allUsers"] = ["${allUsers}"]; |
| 645 } | 674 } |
| 646 if (maxResults != null) { | 675 if (maxResults != null) { |
| 647 _queryParams["maxResults"] = ["${maxResults}"]; | 676 _queryParams["maxResults"] = ["${maxResults}"]; |
| 648 } | 677 } |
| 649 if (pageToken != null) { | 678 if (pageToken != null) { |
| 650 _queryParams["pageToken"] = [pageToken]; | 679 _queryParams["pageToken"] = [pageToken]; |
| 651 } | 680 } |
| 652 if (projection != null) { | 681 if (projection != null) { |
| 653 _queryParams["projection"] = [projection]; | 682 _queryParams["projection"] = [projection]; |
| 654 } | 683 } |
| 655 if (stateFilter != null) { | 684 if (stateFilter != null) { |
| 656 _queryParams["stateFilter"] = stateFilter; | 685 _queryParams["stateFilter"] = stateFilter; |
| 657 } | 686 } |
| 658 | 687 |
| 659 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/jobs'; | 688 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/jobs'; |
| 660 | 689 |
| 661 var _response = _requester.request(_url, | 690 var _response = _requester.request(_url, "GET", |
| 662 "GET", | 691 body: _body, |
| 663 body: _body, | 692 queryParams: _queryParams, |
| 664 queryParams: _queryParams, | 693 uploadOptions: _uploadOptions, |
| 665 uploadOptions: _uploadOptions, | 694 uploadMedia: _uploadMedia, |
| 666 uploadMedia: _uploadMedia, | 695 downloadOptions: _downloadOptions); |
| 667 downloadOptions: _downloadOptions); | |
| 668 return _response.then((data) => new JobList.fromJson(data)); | 696 return _response.then((data) => new JobList.fromJson(data)); |
| 669 } | 697 } |
| 670 | 698 |
| 671 /** | 699 /// Runs a BigQuery SQL query synchronously and returns query results if the |
| 672 * Runs a BigQuery SQL query synchronously and returns query results if the | 700 /// query completes within a specified timeout. |
| 673 * query completes within a specified timeout. | 701 /// |
| 674 * | 702 /// [request] - The metadata request object. |
| 675 * [request] - The metadata request object. | 703 /// |
| 676 * | 704 /// Request parameters: |
| 677 * Request parameters: | 705 /// |
| 678 * | 706 /// [projectId] - Project ID of the project billed for the query |
| 679 * [projectId] - Project ID of the project billed for the query | 707 /// |
| 680 * | 708 /// Completes with a [QueryResponse]. |
| 681 * Completes with a [QueryResponse]. | 709 /// |
| 682 * | 710 /// Completes with a [commons.ApiRequestError] if the API endpoint returned |
| 683 * Completes with a [commons.ApiRequestError] if the API endpoint returned an | 711 /// an error. |
| 684 * error. | 712 /// |
| 685 * | 713 /// If the used [http.Client] completes with an error when making a REST |
| 686 * If the used [http.Client] completes with an error when making a REST call, | 714 /// call, this method will complete with the same error. |
| 687 * this method will complete with the same error. | 715 async.Future<QueryResponse> query( |
| 688 */ | 716 QueryRequest request, core.String projectId) { |
| 689 async.Future<QueryResponse> query(QueryRequest request, core.String projectId)
{ | |
| 690 var _url = null; | 717 var _url = null; |
| 691 var _queryParams = new core.Map(); | 718 var _queryParams = new core.Map(); |
| 692 var _uploadMedia = null; | 719 var _uploadMedia = null; |
| 693 var _uploadOptions = null; | 720 var _uploadOptions = null; |
| 694 var _downloadOptions = commons.DownloadOptions.Metadata; | 721 var _downloadOptions = commons.DownloadOptions.Metadata; |
| 695 var _body = null; | 722 var _body = null; |
| 696 | 723 |
| 697 if (request != null) { | 724 if (request != null) { |
| 698 _body = convert.JSON.encode((request).toJson()); | 725 _body = convert.JSON.encode((request).toJson()); |
| 699 } | 726 } |
| 700 if (projectId == null) { | 727 if (projectId == null) { |
| 701 throw new core.ArgumentError("Parameter projectId is required."); | 728 throw new core.ArgumentError("Parameter projectId is required."); |
| 702 } | 729 } |
| 703 | 730 |
| 704 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/queries
'; | 731 _url = |
| 732 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/queries'; |
| 705 | 733 |
| 706 var _response = _requester.request(_url, | 734 var _response = _requester.request(_url, "POST", |
| 707 "POST", | 735 body: _body, |
| 708 body: _body, | 736 queryParams: _queryParams, |
| 709 queryParams: _queryParams, | 737 uploadOptions: _uploadOptions, |
| 710 uploadOptions: _uploadOptions, | 738 uploadMedia: _uploadMedia, |
| 711 uploadMedia: _uploadMedia, | 739 downloadOptions: _downloadOptions); |
| 712 downloadOptions: _downloadOptions); | |
| 713 return _response.then((data) => new QueryResponse.fromJson(data)); | 740 return _response.then((data) => new QueryResponse.fromJson(data)); |
| 714 } | 741 } |
| 715 | |
| 716 } | 742 } |
| 717 | 743 |
| 718 | |
| 719 class ProjectsResourceApi { | 744 class ProjectsResourceApi { |
| 720 final commons.ApiRequester _requester; | 745 final commons.ApiRequester _requester; |
| 721 | 746 |
| 722 ProjectsResourceApi(commons.ApiRequester client) : | 747 ProjectsResourceApi(commons.ApiRequester client) : _requester = client; |
| 723 _requester = client; | |
| 724 | 748 |
| 725 /** | 749 /// Returns the email address of the service account for your project used |
| 726 * Lists all projects to which you have been granted any project role. | 750 /// for interactions with Google Cloud KMS. |
| 727 * | 751 /// |
| 728 * Request parameters: | 752 /// Request parameters: |
| 729 * | 753 /// |
| 730 * [maxResults] - Maximum number of results to return | 754 /// [projectId] - Project ID for which the service account is requested. |
| 731 * | 755 /// |
| 732 * [pageToken] - Page token, returned by a previous call, to request the next | 756 /// Completes with a [GetServiceAccountResponse]. |
| 733 * page of results | 757 /// |
| 734 * | 758 /// Completes with a [commons.ApiRequestError] if the API endpoint returned |
| 735 * Completes with a [ProjectList]. | 759 /// an error. |
| 736 * | 760 /// |
| 737 * Completes with a [commons.ApiRequestError] if the API endpoint returned an | 761 /// If the used [http.Client] completes with an error when making a REST |
| 738 * error. | 762 /// call, this method will complete with the same error. |
| 739 * | 763 async.Future<GetServiceAccountResponse> getServiceAccount( |
| 740 * If the used [http.Client] completes with an error when making a REST call, | 764 core.String projectId) { |
| 741 * this method will complete with the same error. | 765 var _url = null; |
| 742 */ | 766 var _queryParams = new core.Map(); |
| 767 var _uploadMedia = null; |
| 768 var _uploadOptions = null; |
| 769 var _downloadOptions = commons.DownloadOptions.Metadata; |
| 770 var _body = null; |
| 771 |
| 772 if (projectId == null) { |
| 773 throw new core.ArgumentError("Parameter projectId is required."); |
| 774 } |
| 775 |
| 776 _url = 'projects/' + |
| 777 commons.Escaper.ecapeVariable('$projectId') + |
| 778 '/serviceAccount'; |
| 779 |
| 780 var _response = _requester.request(_url, "GET", |
| 781 body: _body, |
| 782 queryParams: _queryParams, |
| 783 uploadOptions: _uploadOptions, |
| 784 uploadMedia: _uploadMedia, |
| 785 downloadOptions: _downloadOptions); |
| 786 return _response |
| 787 .then((data) => new GetServiceAccountResponse.fromJson(data)); |
| 788 } |
| 789 |
| 790 /// Lists all projects to which you have been granted any project role. |
| 791 /// |
| 792 /// Request parameters: |
| 793 /// |
| 794 /// [maxResults] - Maximum number of results to return |
| 795 /// |
| 796 /// [pageToken] - Page token, returned by a previous call, to request the |
| 797 /// next page of results |
| 798 /// |
| 799 /// Completes with a [ProjectList]. |
| 800 /// |
| 801 /// Completes with a [commons.ApiRequestError] if the API endpoint returned |
| 802 /// an error. |
| 803 /// |
| 804 /// If the used [http.Client] completes with an error when making a REST |
| 805 /// call, this method will complete with the same error. |
| 743 async.Future<ProjectList> list({core.int maxResults, core.String pageToken}) { | 806 async.Future<ProjectList> list({core.int maxResults, core.String pageToken}) { |
| 744 var _url = null; | 807 var _url = null; |
| 745 var _queryParams = new core.Map(); | 808 var _queryParams = new core.Map(); |
| 746 var _uploadMedia = null; | 809 var _uploadMedia = null; |
| 747 var _uploadOptions = null; | 810 var _uploadOptions = null; |
| 748 var _downloadOptions = commons.DownloadOptions.Metadata; | 811 var _downloadOptions = commons.DownloadOptions.Metadata; |
| 749 var _body = null; | 812 var _body = null; |
| 750 | 813 |
| 751 if (maxResults != null) { | 814 if (maxResults != null) { |
| 752 _queryParams["maxResults"] = ["${maxResults}"]; | 815 _queryParams["maxResults"] = ["${maxResults}"]; |
| 753 } | 816 } |
| 754 if (pageToken != null) { | 817 if (pageToken != null) { |
| 755 _queryParams["pageToken"] = [pageToken]; | 818 _queryParams["pageToken"] = [pageToken]; |
| 756 } | 819 } |
| 757 | 820 |
| 758 _url = 'projects'; | 821 _url = 'projects'; |
| 759 | 822 |
| 760 var _response = _requester.request(_url, | 823 var _response = _requester.request(_url, "GET", |
| 761 "GET", | 824 body: _body, |
| 762 body: _body, | 825 queryParams: _queryParams, |
| 763 queryParams: _queryParams, | 826 uploadOptions: _uploadOptions, |
| 764 uploadOptions: _uploadOptions, | 827 uploadMedia: _uploadMedia, |
| 765 uploadMedia: _uploadMedia, | 828 downloadOptions: _downloadOptions); |
| 766 downloadOptions: _downloadOptions); | |
| 767 return _response.then((data) => new ProjectList.fromJson(data)); | 829 return _response.then((data) => new ProjectList.fromJson(data)); |
| 768 } | 830 } |
| 769 | |
| 770 } | 831 } |
| 771 | 832 |
| 772 | |
| 773 class TabledataResourceApi { | 833 class TabledataResourceApi { |
| 774 final commons.ApiRequester _requester; | 834 final commons.ApiRequester _requester; |
| 775 | 835 |
| 776 TabledataResourceApi(commons.ApiRequester client) : | 836 TabledataResourceApi(commons.ApiRequester client) : _requester = client; |
| 777 _requester = client; | |
| 778 | 837 |
| 779 /** | 838 /// Streams data into BigQuery one record at a time without needing to run a |
| 780 * Streams data into BigQuery one record at a time without needing to run a | 839 /// load job. Requires the WRITER dataset role. |
| 781 * load job. Requires the WRITER dataset role. | 840 /// |
| 782 * | 841 /// [request] - The metadata request object. |
| 783 * [request] - The metadata request object. | 842 /// |
| 784 * | 843 /// Request parameters: |
| 785 * Request parameters: | 844 /// |
| 786 * | 845 /// [projectId] - Project ID of the destination table. |
| 787 * [projectId] - Project ID of the destination table. | 846 /// |
| 788 * | 847 /// [datasetId] - Dataset ID of the destination table. |
| 789 * [datasetId] - Dataset ID of the destination table. | 848 /// |
| 790 * | 849 /// [tableId] - Table ID of the destination table. |
| 791 * [tableId] - Table ID of the destination table. | 850 /// |
| 792 * | 851 /// Completes with a [TableDataInsertAllResponse]. |
| 793 * Completes with a [TableDataInsertAllResponse]. | 852 /// |
| 794 * | 853 /// Completes with a [commons.ApiRequestError] if the API endpoint returned |
| 795 * Completes with a [commons.ApiRequestError] if the API endpoint returned an | 854 /// an error. |
| 796 * error. | 855 /// |
| 797 * | 856 /// If the used [http.Client] completes with an error when making a REST |
| 798 * If the used [http.Client] completes with an error when making a REST call, | 857 /// call, this method will complete with the same error. |
| 799 * this method will complete with the same error. | 858 async.Future<TableDataInsertAllResponse> insertAll( |
| 800 */ | 859 TableDataInsertAllRequest request, |
| 801 async.Future<TableDataInsertAllResponse> insertAll(TableDataInsertAllRequest r
equest, core.String projectId, core.String datasetId, core.String tableId) { | 860 core.String projectId, |
| 861 core.String datasetId, |
| 862 core.String tableId) { |
| 802 var _url = null; | 863 var _url = null; |
| 803 var _queryParams = new core.Map(); | 864 var _queryParams = new core.Map(); |
| 804 var _uploadMedia = null; | 865 var _uploadMedia = null; |
| 805 var _uploadOptions = null; | 866 var _uploadOptions = null; |
| 806 var _downloadOptions = commons.DownloadOptions.Metadata; | 867 var _downloadOptions = commons.DownloadOptions.Metadata; |
| 807 var _body = null; | 868 var _body = null; |
| 808 | 869 |
| 809 if (request != null) { | 870 if (request != null) { |
| 810 _body = convert.JSON.encode((request).toJson()); | 871 _body = convert.JSON.encode((request).toJson()); |
| 811 } | 872 } |
| 812 if (projectId == null) { | 873 if (projectId == null) { |
| 813 throw new core.ArgumentError("Parameter projectId is required."); | 874 throw new core.ArgumentError("Parameter projectId is required."); |
| 814 } | 875 } |
| 815 if (datasetId == null) { | 876 if (datasetId == null) { |
| 816 throw new core.ArgumentError("Parameter datasetId is required."); | 877 throw new core.ArgumentError("Parameter datasetId is required."); |
| 817 } | 878 } |
| 818 if (tableId == null) { | 879 if (tableId == null) { |
| 819 throw new core.ArgumentError("Parameter tableId is required."); | 880 throw new core.ArgumentError("Parameter tableId is required."); |
| 820 } | 881 } |
| 821 | 882 |
| 822 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/dataset
s/' + commons.Escaper.ecapeVariable('$datasetId') + '/tables/' + commons.Escaper
.ecapeVariable('$tableId') + '/insertAll'; | 883 _url = 'projects/' + |
| 884 commons.Escaper.ecapeVariable('$projectId') + |
| 885 '/datasets/' + |
| 886 commons.Escaper.ecapeVariable('$datasetId') + |
| 887 '/tables/' + |
| 888 commons.Escaper.ecapeVariable('$tableId') + |
| 889 '/insertAll'; |
| 823 | 890 |
| 824 var _response = _requester.request(_url, | 891 var _response = _requester.request(_url, "POST", |
| 825 "POST", | 892 body: _body, |
| 826 body: _body, | 893 queryParams: _queryParams, |
| 827 queryParams: _queryParams, | 894 uploadOptions: _uploadOptions, |
| 828 uploadOptions: _uploadOptions, | 895 uploadMedia: _uploadMedia, |
| 829 uploadMedia: _uploadMedia, | 896 downloadOptions: _downloadOptions); |
| 830 downloadOptions: _downloadOptions); | 897 return _response |
| 831 return _response.then((data) => new TableDataInsertAllResponse.fromJson(data
)); | 898 .then((data) => new TableDataInsertAllResponse.fromJson(data)); |
| 832 } | 899 } |
| 833 | 900 |
| 834 /** | 901 /// Retrieves table data from a specified set of rows. Requires the READER |
| 835 * Retrieves table data from a specified set of rows. Requires the READER | 902 /// dataset role. |
| 836 * dataset role. | 903 /// |
| 837 * | 904 /// Request parameters: |
| 838 * Request parameters: | 905 /// |
| 839 * | 906 /// [projectId] - Project ID of the table to read |
| 840 * [projectId] - Project ID of the table to read | 907 /// |
| 841 * | 908 /// [datasetId] - Dataset ID of the table to read |
| 842 * [datasetId] - Dataset ID of the table to read | 909 /// |
| 843 * | 910 /// [tableId] - Table ID of the table to read |
| 844 * [tableId] - Table ID of the table to read | 911 /// |
| 845 * | 912 /// [maxResults] - Maximum number of results to return |
| 846 * [maxResults] - Maximum number of results to return | 913 /// |
| 847 * | 914 /// [pageToken] - Page token, returned by a previous call, identifying the |
| 848 * [pageToken] - Page token, returned by a previous call, identifying the | 915 /// result set |
| 849 * result set | 916 /// |
| 850 * | 917 /// [selectedFields] - List of fields to return (comma-separated). If |
| 851 * [selectedFields] - List of fields to return (comma-separated). If | 918 /// unspecified, all fields are returned |
| 852 * unspecified, all fields are returned | 919 /// |
| 853 * | 920 /// [startIndex] - Zero-based index of the starting row to read |
| 854 * [startIndex] - Zero-based index of the starting row to read | 921 /// |
| 855 * | 922 /// Completes with a [TableDataList]. |
| 856 * Completes with a [TableDataList]. | 923 /// |
| 857 * | 924 /// Completes with a [commons.ApiRequestError] if the API endpoint returned |
| 858 * Completes with a [commons.ApiRequestError] if the API endpoint returned an | 925 /// an error. |
| 859 * error. | 926 /// |
| 860 * | 927 /// If the used [http.Client] completes with an error when making a REST |
| 861 * If the used [http.Client] completes with an error when making a REST call, | 928 /// call, this method will complete with the same error. |
| 862 * this method will complete with the same error. | 929 async.Future<TableDataList> list( |
| 863 */ | 930 core.String projectId, core.String datasetId, core.String tableId, |
| 864 async.Future<TableDataList> list(core.String projectId, core.String datasetId,
core.String tableId, {core.int maxResults, core.String pageToken, core.String s
electedFields, core.String startIndex}) { | 931 {core.int maxResults, |
| 932 core.String pageToken, |
| 933 core.String selectedFields, |
| 934 core.String startIndex}) { |
| 865 var _url = null; | 935 var _url = null; |
| 866 var _queryParams = new core.Map(); | 936 var _queryParams = new core.Map(); |
| 867 var _uploadMedia = null; | 937 var _uploadMedia = null; |
| 868 var _uploadOptions = null; | 938 var _uploadOptions = null; |
| 869 var _downloadOptions = commons.DownloadOptions.Metadata; | 939 var _downloadOptions = commons.DownloadOptions.Metadata; |
| 870 var _body = null; | 940 var _body = null; |
| 871 | 941 |
| 872 if (projectId == null) { | 942 if (projectId == null) { |
| 873 throw new core.ArgumentError("Parameter projectId is required."); | 943 throw new core.ArgumentError("Parameter projectId is required."); |
| 874 } | 944 } |
| 875 if (datasetId == null) { | 945 if (datasetId == null) { |
| 876 throw new core.ArgumentError("Parameter datasetId is required."); | 946 throw new core.ArgumentError("Parameter datasetId is required."); |
| 877 } | 947 } |
| 878 if (tableId == null) { | 948 if (tableId == null) { |
| 879 throw new core.ArgumentError("Parameter tableId is required."); | 949 throw new core.ArgumentError("Parameter tableId is required."); |
| 880 } | 950 } |
| 881 if (maxResults != null) { | 951 if (maxResults != null) { |
| 882 _queryParams["maxResults"] = ["${maxResults}"]; | 952 _queryParams["maxResults"] = ["${maxResults}"]; |
| 883 } | 953 } |
| 884 if (pageToken != null) { | 954 if (pageToken != null) { |
| 885 _queryParams["pageToken"] = [pageToken]; | 955 _queryParams["pageToken"] = [pageToken]; |
| 886 } | 956 } |
| 887 if (selectedFields != null) { | 957 if (selectedFields != null) { |
| 888 _queryParams["selectedFields"] = [selectedFields]; | 958 _queryParams["selectedFields"] = [selectedFields]; |
| 889 } | 959 } |
| 890 if (startIndex != null) { | 960 if (startIndex != null) { |
| 891 _queryParams["startIndex"] = [startIndex]; | 961 _queryParams["startIndex"] = [startIndex]; |
| 892 } | 962 } |
| 893 | 963 |
| 894 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/dataset
s/' + commons.Escaper.ecapeVariable('$datasetId') + '/tables/' + commons.Escaper
.ecapeVariable('$tableId') + '/data'; | 964 _url = 'projects/' + |
| 965 commons.Escaper.ecapeVariable('$projectId') + |
| 966 '/datasets/' + |
| 967 commons.Escaper.ecapeVariable('$datasetId') + |
| 968 '/tables/' + |
| 969 commons.Escaper.ecapeVariable('$tableId') + |
| 970 '/data'; |
| 895 | 971 |
| 896 var _response = _requester.request(_url, | 972 var _response = _requester.request(_url, "GET", |
| 897 "GET", | 973 body: _body, |
| 898 body: _body, | 974 queryParams: _queryParams, |
| 899 queryParams: _queryParams, | 975 uploadOptions: _uploadOptions, |
| 900 uploadOptions: _uploadOptions, | 976 uploadMedia: _uploadMedia, |
| 901 uploadMedia: _uploadMedia, | 977 downloadOptions: _downloadOptions); |
| 902 downloadOptions: _downloadOptions); | |
| 903 return _response.then((data) => new TableDataList.fromJson(data)); | 978 return _response.then((data) => new TableDataList.fromJson(data)); |
| 904 } | 979 } |
| 905 | |
| 906 } | 980 } |
| 907 | 981 |
| 908 | |
| 909 class TablesResourceApi { | 982 class TablesResourceApi { |
| 910 final commons.ApiRequester _requester; | 983 final commons.ApiRequester _requester; |
| 911 | 984 |
| 912 TablesResourceApi(commons.ApiRequester client) : | 985 TablesResourceApi(commons.ApiRequester client) : _requester = client; |
| 913 _requester = client; | |
| 914 | 986 |
| 915 /** | 987 /// Deletes the table specified by tableId from the dataset. If the table |
| 916 * Deletes the table specified by tableId from the dataset. If the table | 988 /// contains data, all the data will be deleted. |
| 917 * contains data, all the data will be deleted. | 989 /// |
| 918 * | 990 /// Request parameters: |
| 919 * Request parameters: | 991 /// |
| 920 * | 992 /// [projectId] - Project ID of the table to delete |
| 921 * [projectId] - Project ID of the table to delete | 993 /// |
| 922 * | 994 /// [datasetId] - Dataset ID of the table to delete |
| 923 * [datasetId] - Dataset ID of the table to delete | 995 /// |
| 924 * | 996 /// [tableId] - Table ID of the table to delete |
| 925 * [tableId] - Table ID of the table to delete | 997 /// |
| 926 * | 998 /// Completes with a [commons.ApiRequestError] if the API endpoint returned |
| 927 * Completes with a [commons.ApiRequestError] if the API endpoint returned an | 999 /// an error. |
| 928 * error. | 1000 /// |
| 929 * | 1001 /// If the used [http.Client] completes with an error when making a REST |
| 930 * If the used [http.Client] completes with an error when making a REST call, | 1002 /// call, this method will complete with the same error. |
| 931 * this method will complete with the same error. | 1003 async.Future delete( |
| 932 */ | 1004 core.String projectId, core.String datasetId, core.String tableId) { |
| 933 async.Future delete(core.String projectId, core.String datasetId, core.String
tableId) { | |
| 934 var _url = null; | 1005 var _url = null; |
| 935 var _queryParams = new core.Map(); | 1006 var _queryParams = new core.Map(); |
| 936 var _uploadMedia = null; | 1007 var _uploadMedia = null; |
| 937 var _uploadOptions = null; | 1008 var _uploadOptions = null; |
| 938 var _downloadOptions = commons.DownloadOptions.Metadata; | 1009 var _downloadOptions = commons.DownloadOptions.Metadata; |
| 939 var _body = null; | 1010 var _body = null; |
| 940 | 1011 |
| 941 if (projectId == null) { | 1012 if (projectId == null) { |
| 942 throw new core.ArgumentError("Parameter projectId is required."); | 1013 throw new core.ArgumentError("Parameter projectId is required."); |
| 943 } | 1014 } |
| 944 if (datasetId == null) { | 1015 if (datasetId == null) { |
| 945 throw new core.ArgumentError("Parameter datasetId is required."); | 1016 throw new core.ArgumentError("Parameter datasetId is required."); |
| 946 } | 1017 } |
| 947 if (tableId == null) { | 1018 if (tableId == null) { |
| 948 throw new core.ArgumentError("Parameter tableId is required."); | 1019 throw new core.ArgumentError("Parameter tableId is required."); |
| 949 } | 1020 } |
| 950 | 1021 |
| 951 _downloadOptions = null; | 1022 _downloadOptions = null; |
| 952 | 1023 |
| 953 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/dataset
s/' + commons.Escaper.ecapeVariable('$datasetId') + '/tables/' + commons.Escaper
.ecapeVariable('$tableId'); | 1024 _url = 'projects/' + |
| 1025 commons.Escaper.ecapeVariable('$projectId') + |
| 1026 '/datasets/' + |
| 1027 commons.Escaper.ecapeVariable('$datasetId') + |
| 1028 '/tables/' + |
| 1029 commons.Escaper.ecapeVariable('$tableId'); |
| 954 | 1030 |
| 955 var _response = _requester.request(_url, | 1031 var _response = _requester.request(_url, "DELETE", |
| 956 "DELETE", | 1032 body: _body, |
| 957 body: _body, | 1033 queryParams: _queryParams, |
| 958 queryParams: _queryParams, | 1034 uploadOptions: _uploadOptions, |
| 959 uploadOptions: _uploadOptions, | 1035 uploadMedia: _uploadMedia, |
| 960 uploadMedia: _uploadMedia, | 1036 downloadOptions: _downloadOptions); |
| 961 downloadOptions: _downloadOptions); | |
| 962 return _response.then((data) => null); | 1037 return _response.then((data) => null); |
| 963 } | 1038 } |
| 964 | 1039 |
| 965 /** | 1040 /// Gets the specified table resource by table ID. This method does not |
| 966 * Gets the specified table resource by table ID. This method does not return | 1041 /// return the data in the table, it only returns the table resource, which |
| 967 * the data in the table, it only returns the table resource, which describes | 1042 /// describes the structure of this table. |
| 968 * the structure of this table. | 1043 /// |
| 969 * | 1044 /// Request parameters: |
| 970 * Request parameters: | 1045 /// |
| 971 * | 1046 /// [projectId] - Project ID of the requested table |
| 972 * [projectId] - Project ID of the requested table | 1047 /// |
| 973 * | 1048 /// [datasetId] - Dataset ID of the requested table |
| 974 * [datasetId] - Dataset ID of the requested table | 1049 /// |
| 975 * | 1050 /// [tableId] - Table ID of the requested table |
| 976 * [tableId] - Table ID of the requested table | 1051 /// |
| 977 * | 1052 /// [selectedFields] - List of fields to return (comma-separated). If |
| 978 * [selectedFields] - List of fields to return (comma-separated). If | 1053 /// unspecified, all fields are returned |
| 979 * unspecified, all fields are returned | 1054 /// |
| 980 * | 1055 /// Completes with a [Table]. |
| 981 * Completes with a [Table]. | 1056 /// |
| 982 * | 1057 /// Completes with a [commons.ApiRequestError] if the API endpoint returned |
| 983 * Completes with a [commons.ApiRequestError] if the API endpoint returned an | 1058 /// an error. |
| 984 * error. | 1059 /// |
| 985 * | 1060 /// If the used [http.Client] completes with an error when making a REST |
| 986 * If the used [http.Client] completes with an error when making a REST call, | 1061 /// call, this method will complete with the same error. |
| 987 * this method will complete with the same error. | 1062 async.Future<Table> get( |
| 988 */ | 1063 core.String projectId, core.String datasetId, core.String tableId, |
| 989 async.Future<Table> get(core.String projectId, core.String datasetId, core.Str
ing tableId, {core.String selectedFields}) { | 1064 {core.String selectedFields}) { |
| 990 var _url = null; | 1065 var _url = null; |
| 991 var _queryParams = new core.Map(); | 1066 var _queryParams = new core.Map(); |
| 992 var _uploadMedia = null; | 1067 var _uploadMedia = null; |
| 993 var _uploadOptions = null; | 1068 var _uploadOptions = null; |
| 994 var _downloadOptions = commons.DownloadOptions.Metadata; | 1069 var _downloadOptions = commons.DownloadOptions.Metadata; |
| 995 var _body = null; | 1070 var _body = null; |
| 996 | 1071 |
| 997 if (projectId == null) { | 1072 if (projectId == null) { |
| 998 throw new core.ArgumentError("Parameter projectId is required."); | 1073 throw new core.ArgumentError("Parameter projectId is required."); |
| 999 } | 1074 } |
| 1000 if (datasetId == null) { | 1075 if (datasetId == null) { |
| 1001 throw new core.ArgumentError("Parameter datasetId is required."); | 1076 throw new core.ArgumentError("Parameter datasetId is required."); |
| 1002 } | 1077 } |
| 1003 if (tableId == null) { | 1078 if (tableId == null) { |
| 1004 throw new core.ArgumentError("Parameter tableId is required."); | 1079 throw new core.ArgumentError("Parameter tableId is required."); |
| 1005 } | 1080 } |
| 1006 if (selectedFields != null) { | 1081 if (selectedFields != null) { |
| 1007 _queryParams["selectedFields"] = [selectedFields]; | 1082 _queryParams["selectedFields"] = [selectedFields]; |
| 1008 } | 1083 } |
| 1009 | 1084 |
| 1010 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/dataset
s/' + commons.Escaper.ecapeVariable('$datasetId') + '/tables/' + commons.Escaper
.ecapeVariable('$tableId'); | 1085 _url = 'projects/' + |
| 1086 commons.Escaper.ecapeVariable('$projectId') + |
| 1087 '/datasets/' + |
| 1088 commons.Escaper.ecapeVariable('$datasetId') + |
| 1089 '/tables/' + |
| 1090 commons.Escaper.ecapeVariable('$tableId'); |
| 1011 | 1091 |
| 1012 var _response = _requester.request(_url, | 1092 var _response = _requester.request(_url, "GET", |
| 1013 "GET", | 1093 body: _body, |
| 1014 body: _body, | 1094 queryParams: _queryParams, |
| 1015 queryParams: _queryParams, | 1095 uploadOptions: _uploadOptions, |
| 1016 uploadOptions: _uploadOptions, | 1096 uploadMedia: _uploadMedia, |
| 1017 uploadMedia: _uploadMedia, | 1097 downloadOptions: _downloadOptions); |
| 1018 downloadOptions: _downloadOptions); | |
| 1019 return _response.then((data) => new Table.fromJson(data)); | 1098 return _response.then((data) => new Table.fromJson(data)); |
| 1020 } | 1099 } |
| 1021 | 1100 |
| 1022 /** | 1101 /// Creates a new, empty table in the dataset. |
| 1023 * Creates a new, empty table in the dataset. | 1102 /// |
| 1024 * | 1103 /// [request] - The metadata request object. |
| 1025 * [request] - The metadata request object. | 1104 /// |
| 1026 * | 1105 /// Request parameters: |
| 1027 * Request parameters: | 1106 /// |
| 1028 * | 1107 /// [projectId] - Project ID of the new table |
| 1029 * [projectId] - Project ID of the new table | 1108 /// |
| 1030 * | 1109 /// [datasetId] - Dataset ID of the new table |
| 1031 * [datasetId] - Dataset ID of the new table | 1110 /// |
| 1032 * | 1111 /// Completes with a [Table]. |
| 1033 * Completes with a [Table]. | 1112 /// |
| 1034 * | 1113 /// Completes with a [commons.ApiRequestError] if the API endpoint returned |
| 1035 * Completes with a [commons.ApiRequestError] if the API endpoint returned an | 1114 /// an error. |
| 1036 * error. | 1115 /// |
| 1037 * | 1116 /// If the used [http.Client] completes with an error when making a REST |
| 1038 * If the used [http.Client] completes with an error when making a REST call, | 1117 /// call, this method will complete with the same error. |
| 1039 * this method will complete with the same error. | 1118 async.Future<Table> insert( |
| 1040 */ | 1119 Table request, core.String projectId, core.String datasetId) { |
| 1041 async.Future<Table> insert(Table request, core.String projectId, core.String d
atasetId) { | |
| 1042 var _url = null; | 1120 var _url = null; |
| 1043 var _queryParams = new core.Map(); | 1121 var _queryParams = new core.Map(); |
| 1044 var _uploadMedia = null; | 1122 var _uploadMedia = null; |
| 1045 var _uploadOptions = null; | 1123 var _uploadOptions = null; |
| 1046 var _downloadOptions = commons.DownloadOptions.Metadata; | 1124 var _downloadOptions = commons.DownloadOptions.Metadata; |
| 1047 var _body = null; | 1125 var _body = null; |
| 1048 | 1126 |
| 1049 if (request != null) { | 1127 if (request != null) { |
| 1050 _body = convert.JSON.encode((request).toJson()); | 1128 _body = convert.JSON.encode((request).toJson()); |
| 1051 } | 1129 } |
| 1052 if (projectId == null) { | 1130 if (projectId == null) { |
| 1053 throw new core.ArgumentError("Parameter projectId is required."); | 1131 throw new core.ArgumentError("Parameter projectId is required."); |
| 1054 } | 1132 } |
| 1055 if (datasetId == null) { | 1133 if (datasetId == null) { |
| 1056 throw new core.ArgumentError("Parameter datasetId is required."); | 1134 throw new core.ArgumentError("Parameter datasetId is required."); |
| 1057 } | 1135 } |
| 1058 | 1136 |
| 1059 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/dataset
s/' + commons.Escaper.ecapeVariable('$datasetId') + '/tables'; | 1137 _url = 'projects/' + |
| 1138 commons.Escaper.ecapeVariable('$projectId') + |
| 1139 '/datasets/' + |
| 1140 commons.Escaper.ecapeVariable('$datasetId') + |
| 1141 '/tables'; |
| 1060 | 1142 |
| 1061 var _response = _requester.request(_url, | 1143 var _response = _requester.request(_url, "POST", |
| 1062 "POST", | 1144 body: _body, |
| 1063 body: _body, | 1145 queryParams: _queryParams, |
| 1064 queryParams: _queryParams, | 1146 uploadOptions: _uploadOptions, |
| 1065 uploadOptions: _uploadOptions, | 1147 uploadMedia: _uploadMedia, |
| 1066 uploadMedia: _uploadMedia, | 1148 downloadOptions: _downloadOptions); |
| 1067 downloadOptions: _downloadOptions); | |
| 1068 return _response.then((data) => new Table.fromJson(data)); | 1149 return _response.then((data) => new Table.fromJson(data)); |
| 1069 } | 1150 } |
| 1070 | 1151 |
| 1071 /** | 1152 /// Lists all tables in the specified dataset. Requires the READER dataset |
| 1072 * Lists all tables in the specified dataset. Requires the READER dataset | 1153 /// role. |
| 1073 * role. | 1154 /// |
| 1074 * | 1155 /// Request parameters: |
| 1075 * Request parameters: | 1156 /// |
| 1076 * | 1157 /// [projectId] - Project ID of the tables to list |
| 1077 * [projectId] - Project ID of the tables to list | 1158 /// |
| 1078 * | 1159 /// [datasetId] - Dataset ID of the tables to list |
| 1079 * [datasetId] - Dataset ID of the tables to list | 1160 /// |
| 1080 * | 1161 /// [maxResults] - Maximum number of results to return |
| 1081 * [maxResults] - Maximum number of results to return | 1162 /// |
| 1082 * | 1163 /// [pageToken] - Page token, returned by a previous call, to request the |
| 1083 * [pageToken] - Page token, returned by a previous call, to request the next | 1164 /// next page of results |
| 1084 * page of results | 1165 /// |
| 1085 * | 1166 /// Completes with a [TableList]. |
| 1086 * Completes with a [TableList]. | 1167 /// |
| 1087 * | 1168 /// Completes with a [commons.ApiRequestError] if the API endpoint returned |
| 1088 * Completes with a [commons.ApiRequestError] if the API endpoint returned an | 1169 /// an error. |
| 1089 * error. | 1170 /// |
| 1090 * | 1171 /// If the used [http.Client] completes with an error when making a REST |
| 1091 * If the used [http.Client] completes with an error when making a REST call, | 1172 /// call, this method will complete with the same error. |
| 1092 * this method will complete with the same error. | 1173 async.Future<TableList> list(core.String projectId, core.String datasetId, |
| 1093 */ | 1174 {core.int maxResults, core.String pageToken}) { |
| 1094 async.Future<TableList> list(core.String projectId, core.String datasetId, {co
re.int maxResults, core.String pageToken}) { | |
| 1095 var _url = null; | 1175 var _url = null; |
| 1096 var _queryParams = new core.Map(); | 1176 var _queryParams = new core.Map(); |
| 1097 var _uploadMedia = null; | 1177 var _uploadMedia = null; |
| 1098 var _uploadOptions = null; | 1178 var _uploadOptions = null; |
| 1099 var _downloadOptions = commons.DownloadOptions.Metadata; | 1179 var _downloadOptions = commons.DownloadOptions.Metadata; |
| 1100 var _body = null; | 1180 var _body = null; |
| 1101 | 1181 |
| 1102 if (projectId == null) { | 1182 if (projectId == null) { |
| 1103 throw new core.ArgumentError("Parameter projectId is required."); | 1183 throw new core.ArgumentError("Parameter projectId is required."); |
| 1104 } | 1184 } |
| 1105 if (datasetId == null) { | 1185 if (datasetId == null) { |
| 1106 throw new core.ArgumentError("Parameter datasetId is required."); | 1186 throw new core.ArgumentError("Parameter datasetId is required."); |
| 1107 } | 1187 } |
| 1108 if (maxResults != null) { | 1188 if (maxResults != null) { |
| 1109 _queryParams["maxResults"] = ["${maxResults}"]; | 1189 _queryParams["maxResults"] = ["${maxResults}"]; |
| 1110 } | 1190 } |
| 1111 if (pageToken != null) { | 1191 if (pageToken != null) { |
| 1112 _queryParams["pageToken"] = [pageToken]; | 1192 _queryParams["pageToken"] = [pageToken]; |
| 1113 } | 1193 } |
| 1114 | 1194 |
| 1115 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/dataset
s/' + commons.Escaper.ecapeVariable('$datasetId') + '/tables'; | 1195 _url = 'projects/' + |
| 1196 commons.Escaper.ecapeVariable('$projectId') + |
| 1197 '/datasets/' + |
| 1198 commons.Escaper.ecapeVariable('$datasetId') + |
| 1199 '/tables'; |
| 1116 | 1200 |
| 1117 var _response = _requester.request(_url, | 1201 var _response = _requester.request(_url, "GET", |
| 1118 "GET", | 1202 body: _body, |
| 1119 body: _body, | 1203 queryParams: _queryParams, |
| 1120 queryParams: _queryParams, | 1204 uploadOptions: _uploadOptions, |
| 1121 uploadOptions: _uploadOptions, | 1205 uploadMedia: _uploadMedia, |
| 1122 uploadMedia: _uploadMedia, | 1206 downloadOptions: _downloadOptions); |
| 1123 downloadOptions: _downloadOptions); | |
| 1124 return _response.then((data) => new TableList.fromJson(data)); | 1207 return _response.then((data) => new TableList.fromJson(data)); |
| 1125 } | 1208 } |
| 1126 | 1209 |
| 1127 /** | 1210 /// Updates information in an existing table. The update method replaces the |
| 1128 * Updates information in an existing table. The update method replaces the | 1211 /// entire table resource, whereas the patch method only replaces fields that |
| 1129 * entire table resource, whereas the patch method only replaces fields that | 1212 /// are provided in the submitted table resource. This method supports patch |
| 1130 * are provided in the submitted table resource. This method supports patch | 1213 /// semantics. |
| 1131 * semantics. | 1214 /// |
| 1132 * | 1215 /// [request] - The metadata request object. |
| 1133 * [request] - The metadata request object. | 1216 /// |
| 1134 * | 1217 /// Request parameters: |
| 1135 * Request parameters: | 1218 /// |
| 1136 * | 1219 /// [projectId] - Project ID of the table to update |
| 1137 * [projectId] - Project ID of the table to update | 1220 /// |
| 1138 * | 1221 /// [datasetId] - Dataset ID of the table to update |
| 1139 * [datasetId] - Dataset ID of the table to update | 1222 /// |
| 1140 * | 1223 /// [tableId] - Table ID of the table to update |
| 1141 * [tableId] - Table ID of the table to update | 1224 /// |
| 1142 * | 1225 /// Completes with a [Table]. |
| 1143 * Completes with a [Table]. | 1226 /// |
| 1144 * | 1227 /// Completes with a [commons.ApiRequestError] if the API endpoint returned |
| 1145 * Completes with a [commons.ApiRequestError] if the API endpoint returned an | 1228 /// an error. |
| 1146 * error. | 1229 /// |
| 1147 * | 1230 /// If the used [http.Client] completes with an error when making a REST |
| 1148 * If the used [http.Client] completes with an error when making a REST call, | 1231 /// call, this method will complete with the same error. |
| 1149 * this method will complete with the same error. | 1232 async.Future<Table> patch(Table request, core.String projectId, |
| 1150 */ | 1233 core.String datasetId, core.String tableId) { |
| 1151 async.Future<Table> patch(Table request, core.String projectId, core.String da
tasetId, core.String tableId) { | |
| 1152 var _url = null; | 1234 var _url = null; |
| 1153 var _queryParams = new core.Map(); | 1235 var _queryParams = new core.Map(); |
| 1154 var _uploadMedia = null; | 1236 var _uploadMedia = null; |
| 1155 var _uploadOptions = null; | 1237 var _uploadOptions = null; |
| 1156 var _downloadOptions = commons.DownloadOptions.Metadata; | 1238 var _downloadOptions = commons.DownloadOptions.Metadata; |
| 1157 var _body = null; | 1239 var _body = null; |
| 1158 | 1240 |
| 1159 if (request != null) { | 1241 if (request != null) { |
| 1160 _body = convert.JSON.encode((request).toJson()); | 1242 _body = convert.JSON.encode((request).toJson()); |
| 1161 } | 1243 } |
| 1162 if (projectId == null) { | 1244 if (projectId == null) { |
| 1163 throw new core.ArgumentError("Parameter projectId is required."); | 1245 throw new core.ArgumentError("Parameter projectId is required."); |
| 1164 } | 1246 } |
| 1165 if (datasetId == null) { | 1247 if (datasetId == null) { |
| 1166 throw new core.ArgumentError("Parameter datasetId is required."); | 1248 throw new core.ArgumentError("Parameter datasetId is required."); |
| 1167 } | 1249 } |
| 1168 if (tableId == null) { | 1250 if (tableId == null) { |
| 1169 throw new core.ArgumentError("Parameter tableId is required."); | 1251 throw new core.ArgumentError("Parameter tableId is required."); |
| 1170 } | 1252 } |
| 1171 | 1253 |
| 1172 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/dataset
s/' + commons.Escaper.ecapeVariable('$datasetId') + '/tables/' + commons.Escaper
.ecapeVariable('$tableId'); | 1254 _url = 'projects/' + |
| 1255 commons.Escaper.ecapeVariable('$projectId') + |
| 1256 '/datasets/' + |
| 1257 commons.Escaper.ecapeVariable('$datasetId') + |
| 1258 '/tables/' + |
| 1259 commons.Escaper.ecapeVariable('$tableId'); |
| 1173 | 1260 |
| 1174 var _response = _requester.request(_url, | 1261 var _response = _requester.request(_url, "PATCH", |
| 1175 "PATCH", | 1262 body: _body, |
| 1176 body: _body, | 1263 queryParams: _queryParams, |
| 1177 queryParams: _queryParams, | 1264 uploadOptions: _uploadOptions, |
| 1178 uploadOptions: _uploadOptions, | 1265 uploadMedia: _uploadMedia, |
| 1179 uploadMedia: _uploadMedia, | 1266 downloadOptions: _downloadOptions); |
| 1180 downloadOptions: _downloadOptions); | |
| 1181 return _response.then((data) => new Table.fromJson(data)); | 1267 return _response.then((data) => new Table.fromJson(data)); |
| 1182 } | 1268 } |
| 1183 | 1269 |
| 1184 /** | 1270 /// Updates information in an existing table. The update method replaces the |
| 1185 * Updates information in an existing table. The update method replaces the | 1271 /// entire table resource, whereas the patch method only replaces fields that |
| 1186 * entire table resource, whereas the patch method only replaces fields that | 1272 /// are provided in the submitted table resource. |
| 1187 * are provided in the submitted table resource. | 1273 /// |
| 1188 * | 1274 /// [request] - The metadata request object. |
| 1189 * [request] - The metadata request object. | 1275 /// |
| 1190 * | 1276 /// Request parameters: |
| 1191 * Request parameters: | 1277 /// |
| 1192 * | 1278 /// [projectId] - Project ID of the table to update |
| 1193 * [projectId] - Project ID of the table to update | 1279 /// |
| 1194 * | 1280 /// [datasetId] - Dataset ID of the table to update |
| 1195 * [datasetId] - Dataset ID of the table to update | 1281 /// |
| 1196 * | 1282 /// [tableId] - Table ID of the table to update |
| 1197 * [tableId] - Table ID of the table to update | 1283 /// |
| 1198 * | 1284 /// Completes with a [Table]. |
| 1199 * Completes with a [Table]. | 1285 /// |
| 1200 * | 1286 /// Completes with a [commons.ApiRequestError] if the API endpoint returned |
| 1201 * Completes with a [commons.ApiRequestError] if the API endpoint returned an | 1287 /// an error. |
| 1202 * error. | 1288 /// |
| 1203 * | 1289 /// If the used [http.Client] completes with an error when making a REST |
| 1204 * If the used [http.Client] completes with an error when making a REST call, | 1290 /// call, this method will complete with the same error. |
| 1205 * this method will complete with the same error. | 1291 async.Future<Table> update(Table request, core.String projectId, |
| 1206 */ | 1292 core.String datasetId, core.String tableId) { |
| 1207 async.Future<Table> update(Table request, core.String projectId, core.String d
atasetId, core.String tableId) { | |
| 1208 var _url = null; | 1293 var _url = null; |
| 1209 var _queryParams = new core.Map(); | 1294 var _queryParams = new core.Map(); |
| 1210 var _uploadMedia = null; | 1295 var _uploadMedia = null; |
| 1211 var _uploadOptions = null; | 1296 var _uploadOptions = null; |
| 1212 var _downloadOptions = commons.DownloadOptions.Metadata; | 1297 var _downloadOptions = commons.DownloadOptions.Metadata; |
| 1213 var _body = null; | 1298 var _body = null; |
| 1214 | 1299 |
| 1215 if (request != null) { | 1300 if (request != null) { |
| 1216 _body = convert.JSON.encode((request).toJson()); | 1301 _body = convert.JSON.encode((request).toJson()); |
| 1217 } | 1302 } |
| 1218 if (projectId == null) { | 1303 if (projectId == null) { |
| 1219 throw new core.ArgumentError("Parameter projectId is required."); | 1304 throw new core.ArgumentError("Parameter projectId is required."); |
| 1220 } | 1305 } |
| 1221 if (datasetId == null) { | 1306 if (datasetId == null) { |
| 1222 throw new core.ArgumentError("Parameter datasetId is required."); | 1307 throw new core.ArgumentError("Parameter datasetId is required."); |
| 1223 } | 1308 } |
| 1224 if (tableId == null) { | 1309 if (tableId == null) { |
| 1225 throw new core.ArgumentError("Parameter tableId is required."); | 1310 throw new core.ArgumentError("Parameter tableId is required."); |
| 1226 } | 1311 } |
| 1227 | 1312 |
| 1228 _url = 'projects/' + commons.Escaper.ecapeVariable('$projectId') + '/dataset
s/' + commons.Escaper.ecapeVariable('$datasetId') + '/tables/' + commons.Escaper
.ecapeVariable('$tableId'); | 1313 _url = 'projects/' + |
| 1314 commons.Escaper.ecapeVariable('$projectId') + |
| 1315 '/datasets/' + |
| 1316 commons.Escaper.ecapeVariable('$datasetId') + |
| 1317 '/tables/' + |
| 1318 commons.Escaper.ecapeVariable('$tableId'); |
| 1229 | 1319 |
| 1230 var _response = _requester.request(_url, | 1320 var _response = _requester.request(_url, "PUT", |
| 1231 "PUT", | 1321 body: _body, |
| 1232 body: _body, | 1322 queryParams: _queryParams, |
| 1233 queryParams: _queryParams, | 1323 uploadOptions: _uploadOptions, |
| 1234 uploadOptions: _uploadOptions, | 1324 uploadMedia: _uploadMedia, |
| 1235 uploadMedia: _uploadMedia, | 1325 downloadOptions: _downloadOptions); |
| 1236 downloadOptions: _downloadOptions); | |
| 1237 return _response.then((data) => new Table.fromJson(data)); | 1326 return _response.then((data) => new Table.fromJson(data)); |
| 1238 } | 1327 } |
| 1239 | |
| 1240 } | 1328 } |
| 1241 | 1329 |
| 1330 class BigtableColumn { |
| 1331 /// [Optional] The encoding of the values when the type is not STRING. |
| 1332 /// Acceptable encoding values are: TEXT - indicates values are alphanumeric |
| 1333 /// text strings. BINARY - indicates values are encoded using HBase |
| 1334 /// Bytes.toBytes family of functions. 'encoding' can also be set at the |
| 1335 /// column family level. However, the setting at this level takes precedence |
| 1336 /// if 'encoding' is set at both levels. |
| 1337 core.String encoding; |
| 1242 | 1338 |
| 1339 /// [Optional] If the qualifier is not a valid BigQuery field identifier i.e. |
| 1340 /// does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided |
| 1341 /// as the column field name and is used as field name in queries. |
| 1342 core.String fieldName; |
| 1243 | 1343 |
| 1244 class BigtableColumn { | 1344 /// [Optional] If this is set, only the latest version of value in this |
| 1245 /** | 1345 /// column are exposed. 'onlyReadLatest' can also be set at the column family |
| 1246 * [Optional] The encoding of the values when the type is not STRING. | 1346 /// level. However, the setting at this level takes precedence if |
| 1247 * Acceptable encoding values are: TEXT - indicates values are alphanumeric | 1347 /// 'onlyReadLatest' is set at both levels. |
| 1248 * text strings. BINARY - indicates values are encoded using HBase | |
| 1249 * Bytes.toBytes family of functions. 'encoding' can also be set at the column | |
| 1250 * family level. However, the setting at this level takes precedence if | |
| 1251 * 'encoding' is set at both levels. | |
| 1252 */ | |
| 1253 core.String encoding; | |
| 1254 /** | |
| 1255 * [Optional] If the qualifier is not a valid BigQuery field identifier i.e. | |
| 1256 * does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided | |
| 1257 * as the column field name and is used as field name in queries. | |
| 1258 */ | |
| 1259 core.String fieldName; | |
| 1260 /** | |
| 1261 * [Optional] If this is set, only the latest version of value in this column | |
| 1262 * are exposed. 'onlyReadLatest' can also be set at the column family level. | |
| 1263 * However, the setting at this level takes precedence if 'onlyReadLatest' is | |
| 1264 * set at both levels. | |
| 1265 */ | |
| 1266 core.bool onlyReadLatest; | 1348 core.bool onlyReadLatest; |
| 1267 /** | 1349 |
| 1268 * [Required] Qualifier of the column. Columns in the parent column family | 1350 /// [Required] Qualifier of the column. Columns in the parent column family |
| 1269 * that has this exact qualifier are exposed as . field. If the qualifier is | 1351 /// that has this exact qualifier are exposed as . field. If the qualifier is |
| 1270 * valid UTF-8 string, it can be specified in the qualifier_string field. | 1352 /// valid UTF-8 string, it can be specified in the qualifier_string field. |
| 1271 * Otherwise, a base-64 encoded value must be set to qualifier_encoded. The | 1353 /// Otherwise, a base-64 encoded value must be set to qualifier_encoded. The |
| 1272 * column field name is the same as the column qualifier. However, if the | 1354 /// column field name is the same as the column qualifier. However, if the |
| 1273 * qualifier is not a valid BigQuery field identifier i.e. does not match | 1355 /// qualifier is not a valid BigQuery field identifier i.e. does not match |
| 1274 * [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided as field_name. | 1356 /// [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided as field_name. |
| 1275 */ | |
| 1276 core.String qualifierEncoded; | 1357 core.String qualifierEncoded; |
| 1277 core.List<core.int> get qualifierEncodedAsBytes { | 1358 core.List<core.int> get qualifierEncodedAsBytes { |
| 1278 return convert.BASE64.decode(qualifierEncoded); | 1359 return convert.BASE64.decode(qualifierEncoded); |
| 1279 } | 1360 } |
| 1280 | 1361 |
| 1281 void set qualifierEncodedAsBytes(core.List<core.int> _bytes) { | 1362 void set qualifierEncodedAsBytes(core.List<core.int> _bytes) { |
| 1282 qualifierEncoded = convert.BASE64.encode(_bytes).replaceAll("/", "_").replac
eAll("+", "-"); | 1363 qualifierEncoded = |
| 1364 convert.BASE64.encode(_bytes).replaceAll("/", "_").replaceAll("+", "-"); |
| 1283 } | 1365 } |
| 1366 |
| 1284 core.String qualifierString; | 1367 core.String qualifierString; |
| 1285 /** | 1368 |
| 1286 * [Optional] The type to convert the value in cells of this column. The | 1369 /// [Optional] The type to convert the value in cells of this column. The |
| 1287 * values are expected to be encoded using HBase Bytes.toBytes function when | 1370 /// values are expected to be encoded using HBase Bytes.toBytes function when |
| 1288 * using the BINARY encoding value. Following BigQuery types are allowed | 1371 /// using the BINARY encoding value. Following BigQuery types are allowed |
| 1289 * (case-sensitive) - BYTES STRING INTEGER FLOAT BOOLEAN Default type is | 1372 /// (case-sensitive) - BYTES STRING INTEGER FLOAT BOOLEAN Default type is |
| 1290 * BYTES. 'type' can also be set at the column family level. However, the | 1373 /// BYTES. 'type' can also be set at the column family level. However, the |
| 1291 * setting at this level takes precedence if 'type' is set at both levels. | 1374 /// setting at this level takes precedence if 'type' is set at both levels. |
| 1292 */ | |
| 1293 core.String type; | 1375 core.String type; |
| 1294 | 1376 |
| 1295 BigtableColumn(); | 1377 BigtableColumn(); |
| 1296 | 1378 |
| 1297 BigtableColumn.fromJson(core.Map _json) { | 1379 BigtableColumn.fromJson(core.Map _json) { |
| 1298 if (_json.containsKey("encoding")) { | 1380 if (_json.containsKey("encoding")) { |
| 1299 encoding = _json["encoding"]; | 1381 encoding = _json["encoding"]; |
| 1300 } | 1382 } |
| 1301 if (_json.containsKey("fieldName")) { | 1383 if (_json.containsKey("fieldName")) { |
| 1302 fieldName = _json["fieldName"]; | 1384 fieldName = _json["fieldName"]; |
| 1303 } | 1385 } |
| 1304 if (_json.containsKey("onlyReadLatest")) { | 1386 if (_json.containsKey("onlyReadLatest")) { |
| 1305 onlyReadLatest = _json["onlyReadLatest"]; | 1387 onlyReadLatest = _json["onlyReadLatest"]; |
| 1306 } | 1388 } |
| 1307 if (_json.containsKey("qualifierEncoded")) { | 1389 if (_json.containsKey("qualifierEncoded")) { |
| 1308 qualifierEncoded = _json["qualifierEncoded"]; | 1390 qualifierEncoded = _json["qualifierEncoded"]; |
| 1309 } | 1391 } |
| 1310 if (_json.containsKey("qualifierString")) { | 1392 if (_json.containsKey("qualifierString")) { |
| 1311 qualifierString = _json["qualifierString"]; | 1393 qualifierString = _json["qualifierString"]; |
| 1312 } | 1394 } |
| 1313 if (_json.containsKey("type")) { | 1395 if (_json.containsKey("type")) { |
| 1314 type = _json["type"]; | 1396 type = _json["type"]; |
| 1315 } | 1397 } |
| 1316 } | 1398 } |
| 1317 | 1399 |
| 1318 core.Map<core.String, core.Object> toJson() { | 1400 core.Map<core.String, core.Object> toJson() { |
| 1319 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 1401 final core.Map<core.String, core.Object> _json = |
| 1402 new core.Map<core.String, core.Object>(); |
| 1320 if (encoding != null) { | 1403 if (encoding != null) { |
| 1321 _json["encoding"] = encoding; | 1404 _json["encoding"] = encoding; |
| 1322 } | 1405 } |
| 1323 if (fieldName != null) { | 1406 if (fieldName != null) { |
| 1324 _json["fieldName"] = fieldName; | 1407 _json["fieldName"] = fieldName; |
| 1325 } | 1408 } |
| 1326 if (onlyReadLatest != null) { | 1409 if (onlyReadLatest != null) { |
| 1327 _json["onlyReadLatest"] = onlyReadLatest; | 1410 _json["onlyReadLatest"] = onlyReadLatest; |
| 1328 } | 1411 } |
| 1329 if (qualifierEncoded != null) { | 1412 if (qualifierEncoded != null) { |
| 1330 _json["qualifierEncoded"] = qualifierEncoded; | 1413 _json["qualifierEncoded"] = qualifierEncoded; |
| 1331 } | 1414 } |
| 1332 if (qualifierString != null) { | 1415 if (qualifierString != null) { |
| 1333 _json["qualifierString"] = qualifierString; | 1416 _json["qualifierString"] = qualifierString; |
| 1334 } | 1417 } |
| 1335 if (type != null) { | 1418 if (type != null) { |
| 1336 _json["type"] = type; | 1419 _json["type"] = type; |
| 1337 } | 1420 } |
| 1338 return _json; | 1421 return _json; |
| 1339 } | 1422 } |
| 1340 } | 1423 } |
| 1341 | 1424 |
| 1342 class BigtableColumnFamily { | 1425 class BigtableColumnFamily { |
| 1343 /** | 1426 /// [Optional] Lists of columns that should be exposed as individual fields |
| 1344 * [Optional] Lists of columns that should be exposed as individual fields as | 1427 /// as opposed to a list of (column name, value) pairs. All columns whose |
| 1345 * opposed to a list of (column name, value) pairs. All columns whose | 1428 /// qualifier matches a qualifier in this list can be accessed as .. Other |
| 1346 * qualifier matches a qualifier in this list can be accessed as .. Other | 1429 /// columns can be accessed as a list through .Column field. |
| 1347 * columns can be accessed as a list through .Column field. | |
| 1348 */ | |
| 1349 core.List<BigtableColumn> columns; | 1430 core.List<BigtableColumn> columns; |
| 1350 /** | 1431 |
| 1351 * [Optional] The encoding of the values when the type is not STRING. | 1432 /// [Optional] The encoding of the values when the type is not STRING. |
| 1352 * Acceptable encoding values are: TEXT - indicates values are alphanumeric | 1433 /// Acceptable encoding values are: TEXT - indicates values are alphanumeric |
| 1353 * text strings. BINARY - indicates values are encoded using HBase | 1434 /// text strings. BINARY - indicates values are encoded using HBase |
| 1354 * Bytes.toBytes family of functions. This can be overridden for a specific | 1435 /// Bytes.toBytes family of functions. This can be overridden for a specific |
| 1355 * column by listing that column in 'columns' and specifying an encoding for | 1436 /// column by listing that column in 'columns' and specifying an encoding for |
| 1356 * it. | 1437 /// it. |
| 1357 */ | |
| 1358 core.String encoding; | 1438 core.String encoding; |
| 1359 /** Identifier of the column family. */ | 1439 |
| 1440 /// Identifier of the column family. |
| 1360 core.String familyId; | 1441 core.String familyId; |
| 1361 /** | 1442 |
| 1362 * [Optional] If this is set only the latest version of value are exposed for | 1443 /// [Optional] If this is set only the latest version of value are exposed |
| 1363 * all columns in this column family. This can be overridden for a specific | 1444 /// for all columns in this column family. This can be overridden for a |
| 1364 * column by listing that column in 'columns' and specifying a different | 1445 /// specific column by listing that column in 'columns' and specifying a |
| 1365 * setting for that column. | 1446 /// different setting for that column. |
| 1366 */ | |
| 1367 core.bool onlyReadLatest; | 1447 core.bool onlyReadLatest; |
| 1368 /** | 1448 |
| 1369 * [Optional] The type to convert the value in cells of this column family. | 1449 /// [Optional] The type to convert the value in cells of this column family. |
| 1370 * The values are expected to be encoded using HBase Bytes.toBytes function | 1450 /// The values are expected to be encoded using HBase Bytes.toBytes function |
| 1371 * when using the BINARY encoding value. Following BigQuery types are allowed | 1451 /// when using the BINARY encoding value. Following BigQuery types are |
| 1372 * (case-sensitive) - BYTES STRING INTEGER FLOAT BOOLEAN Default type is | 1452 /// allowed (case-sensitive) - BYTES STRING INTEGER FLOAT BOOLEAN Default |
| 1373 * BYTES. This can be overridden for a specific column by listing that column | 1453 /// type is BYTES. This can be overridden for a specific column by listing |
| 1374 * in 'columns' and specifying a type for it. | 1454 /// that column in 'columns' and specifying a type for it. |
| 1375 */ | |
| 1376 core.String type; | 1455 core.String type; |
| 1377 | 1456 |
| 1378 BigtableColumnFamily(); | 1457 BigtableColumnFamily(); |
| 1379 | 1458 |
| 1380 BigtableColumnFamily.fromJson(core.Map _json) { | 1459 BigtableColumnFamily.fromJson(core.Map _json) { |
| 1381 if (_json.containsKey("columns")) { | 1460 if (_json.containsKey("columns")) { |
| 1382 columns = _json["columns"].map((value) => new BigtableColumn.fromJson(valu
e)).toList(); | 1461 columns = _json["columns"] |
| 1462 .map((value) => new BigtableColumn.fromJson(value)) |
| 1463 .toList(); |
| 1383 } | 1464 } |
| 1384 if (_json.containsKey("encoding")) { | 1465 if (_json.containsKey("encoding")) { |
| 1385 encoding = _json["encoding"]; | 1466 encoding = _json["encoding"]; |
| 1386 } | 1467 } |
| 1387 if (_json.containsKey("familyId")) { | 1468 if (_json.containsKey("familyId")) { |
| 1388 familyId = _json["familyId"]; | 1469 familyId = _json["familyId"]; |
| 1389 } | 1470 } |
| 1390 if (_json.containsKey("onlyReadLatest")) { | 1471 if (_json.containsKey("onlyReadLatest")) { |
| 1391 onlyReadLatest = _json["onlyReadLatest"]; | 1472 onlyReadLatest = _json["onlyReadLatest"]; |
| 1392 } | 1473 } |
| 1393 if (_json.containsKey("type")) { | 1474 if (_json.containsKey("type")) { |
| 1394 type = _json["type"]; | 1475 type = _json["type"]; |
| 1395 } | 1476 } |
| 1396 } | 1477 } |
| 1397 | 1478 |
| 1398 core.Map<core.String, core.Object> toJson() { | 1479 core.Map<core.String, core.Object> toJson() { |
| 1399 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 1480 final core.Map<core.String, core.Object> _json = |
| 1481 new core.Map<core.String, core.Object>(); |
| 1400 if (columns != null) { | 1482 if (columns != null) { |
| 1401 _json["columns"] = columns.map((value) => (value).toJson()).toList(); | 1483 _json["columns"] = columns.map((value) => (value).toJson()).toList(); |
| 1402 } | 1484 } |
| 1403 if (encoding != null) { | 1485 if (encoding != null) { |
| 1404 _json["encoding"] = encoding; | 1486 _json["encoding"] = encoding; |
| 1405 } | 1487 } |
| 1406 if (familyId != null) { | 1488 if (familyId != null) { |
| 1407 _json["familyId"] = familyId; | 1489 _json["familyId"] = familyId; |
| 1408 } | 1490 } |
| 1409 if (onlyReadLatest != null) { | 1491 if (onlyReadLatest != null) { |
| 1410 _json["onlyReadLatest"] = onlyReadLatest; | 1492 _json["onlyReadLatest"] = onlyReadLatest; |
| 1411 } | 1493 } |
| 1412 if (type != null) { | 1494 if (type != null) { |
| 1413 _json["type"] = type; | 1495 _json["type"] = type; |
| 1414 } | 1496 } |
| 1415 return _json; | 1497 return _json; |
| 1416 } | 1498 } |
| 1417 } | 1499 } |
| 1418 | 1500 |
| 1419 class BigtableOptions { | 1501 class BigtableOptions { |
| 1420 /** | 1502 /// [Optional] List of column families to expose in the table schema along |
| 1421 * [Optional] List of column families to expose in the table schema along with | 1503 /// with their types. This list restricts the column families that can be |
| 1422 * their types. This list restricts the column families that can be referenced | 1504 /// referenced in queries and specifies their value types. You can use this |
| 1423 * in queries and specifies their value types. You can use this list to do | 1505 /// list to do type conversions - see the 'type' field for more details. If |
| 1424 * type conversions - see the 'type' field for more details. If you leave this | 1506 /// you leave this list empty, all column families are present in the table |
| 1425 * list empty, all column families are present in the table schema and their | 1507 /// schema and their values are read as BYTES. During a query only the column |
| 1426 * values are read as BYTES. During a query only the column families | 1508 /// families referenced in that query are read from Bigtable. |
| 1427 * referenced in that query are read from Bigtable. | |
| 1428 */ | |
| 1429 core.List<BigtableColumnFamily> columnFamilies; | 1509 core.List<BigtableColumnFamily> columnFamilies; |
| 1430 /** | 1510 |
| 1431 * [Optional] If field is true, then the column families that are not | 1511 /// [Optional] If field is true, then the column families that are not |
| 1432 * specified in columnFamilies list are not exposed in the table schema. | 1512 /// specified in columnFamilies list are not exposed in the table schema. |
| 1433 * Otherwise, they are read with BYTES type values. The default value is | 1513 /// Otherwise, they are read with BYTES type values. The default value is |
| 1434 * false. | 1514 /// false. |
| 1435 */ | |
| 1436 core.bool ignoreUnspecifiedColumnFamilies; | 1515 core.bool ignoreUnspecifiedColumnFamilies; |
| 1437 /** | 1516 |
| 1438 * [Optional] If field is true, then the rowkey column families will be read | 1517 /// [Optional] If field is true, then the rowkey column families will be read |
| 1439 * and converted to string. Otherwise they are read with BYTES type values and | 1518 /// and converted to string. Otherwise they are read with BYTES type values |
| 1440 * users need to manually cast them with CAST if necessary. The default value | 1519 /// and users need to manually cast them with CAST if necessary. The default |
| 1441 * is false. | 1520 /// value is false. |
| 1442 */ | |
| 1443 core.bool readRowkeyAsString; | 1521 core.bool readRowkeyAsString; |
| 1444 | 1522 |
| 1445 BigtableOptions(); | 1523 BigtableOptions(); |
| 1446 | 1524 |
| 1447 BigtableOptions.fromJson(core.Map _json) { | 1525 BigtableOptions.fromJson(core.Map _json) { |
| 1448 if (_json.containsKey("columnFamilies")) { | 1526 if (_json.containsKey("columnFamilies")) { |
| 1449 columnFamilies = _json["columnFamilies"].map((value) => new BigtableColumn
Family.fromJson(value)).toList(); | 1527 columnFamilies = _json["columnFamilies"] |
| 1528 .map((value) => new BigtableColumnFamily.fromJson(value)) |
| 1529 .toList(); |
| 1450 } | 1530 } |
| 1451 if (_json.containsKey("ignoreUnspecifiedColumnFamilies")) { | 1531 if (_json.containsKey("ignoreUnspecifiedColumnFamilies")) { |
| 1452 ignoreUnspecifiedColumnFamilies = _json["ignoreUnspecifiedColumnFamilies"]
; | 1532 ignoreUnspecifiedColumnFamilies = |
| 1533 _json["ignoreUnspecifiedColumnFamilies"]; |
| 1453 } | 1534 } |
| 1454 if (_json.containsKey("readRowkeyAsString")) { | 1535 if (_json.containsKey("readRowkeyAsString")) { |
| 1455 readRowkeyAsString = _json["readRowkeyAsString"]; | 1536 readRowkeyAsString = _json["readRowkeyAsString"]; |
| 1456 } | 1537 } |
| 1457 } | 1538 } |
| 1458 | 1539 |
| 1459 core.Map<core.String, core.Object> toJson() { | 1540 core.Map<core.String, core.Object> toJson() { |
| 1460 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 1541 final core.Map<core.String, core.Object> _json = |
| 1542 new core.Map<core.String, core.Object>(); |
| 1461 if (columnFamilies != null) { | 1543 if (columnFamilies != null) { |
| 1462 _json["columnFamilies"] = columnFamilies.map((value) => (value).toJson()).
toList(); | 1544 _json["columnFamilies"] = |
| 1545 columnFamilies.map((value) => (value).toJson()).toList(); |
| 1463 } | 1546 } |
| 1464 if (ignoreUnspecifiedColumnFamilies != null) { | 1547 if (ignoreUnspecifiedColumnFamilies != null) { |
| 1465 _json["ignoreUnspecifiedColumnFamilies"] = ignoreUnspecifiedColumnFamilies
; | 1548 _json["ignoreUnspecifiedColumnFamilies"] = |
| 1549 ignoreUnspecifiedColumnFamilies; |
| 1466 } | 1550 } |
| 1467 if (readRowkeyAsString != null) { | 1551 if (readRowkeyAsString != null) { |
| 1468 _json["readRowkeyAsString"] = readRowkeyAsString; | 1552 _json["readRowkeyAsString"] = readRowkeyAsString; |
| 1469 } | 1553 } |
| 1470 return _json; | 1554 return _json; |
| 1471 } | 1555 } |
| 1472 } | 1556 } |
| 1473 | 1557 |
| 1474 class CsvOptions { | 1558 class CsvOptions { |
| 1475 /** | 1559 /// [Optional] Indicates if BigQuery should accept rows that are missing |
| 1476 * [Optional] Indicates if BigQuery should accept rows that are missing | 1560 /// trailing optional columns. If true, BigQuery treats missing trailing |
| 1477 * trailing optional columns. If true, BigQuery treats missing trailing | 1561 /// columns as null values. If false, records with missing trailing columns |
| 1478 * columns as null values. If false, records with missing trailing columns are | 1562 /// are treated as bad records, and if there are too many bad records, an |
| 1479 * treated as bad records, and if there are too many bad records, an invalid | 1563 /// invalid error is returned in the job result. The default value is false. |
| 1480 * error is returned in the job result. The default value is false. | |
| 1481 */ | |
| 1482 core.bool allowJaggedRows; | 1564 core.bool allowJaggedRows; |
| 1483 /** | 1565 |
| 1484 * [Optional] Indicates if BigQuery should allow quoted data sections that | 1566 /// [Optional] Indicates if BigQuery should allow quoted data sections that |
| 1485 * contain newline characters in a CSV file. The default value is false. | 1567 /// contain newline characters in a CSV file. The default value is false. |
| 1486 */ | |
| 1487 core.bool allowQuotedNewlines; | 1568 core.bool allowQuotedNewlines; |
| 1488 /** | 1569 |
| 1489 * [Optional] The character encoding of the data. The supported values are | 1570 /// [Optional] The character encoding of the data. The supported values are |
| 1490 * UTF-8 or ISO-8859-1. The default value is UTF-8. BigQuery decodes the data | 1571 /// UTF-8 or ISO-8859-1. The default value is UTF-8. BigQuery decodes the |
| 1491 * after the raw, binary data has been split using the values of the quote and | 1572 /// data after the raw, binary data has been split using the values of the |
| 1492 * fieldDelimiter properties. | 1573 /// quote and fieldDelimiter properties. |
| 1493 */ | |
| 1494 core.String encoding; | 1574 core.String encoding; |
| 1495 /** | 1575 |
| 1496 * [Optional] The separator for fields in a CSV file. BigQuery converts the | 1576 /// [Optional] The separator for fields in a CSV file. BigQuery converts the |
| 1497 * string to ISO-8859-1 encoding, and then uses the first byte of the encoded | 1577 /// string to ISO-8859-1 encoding, and then uses the first byte of the |
| 1498 * string to split the data in its raw, binary state. BigQuery also supports | 1578 /// encoded string to split the data in its raw, binary state. BigQuery also |
| 1499 * the escape sequence "\t" to specify a tab separator. The default value is a | 1579 /// supports the escape sequence "\t" to specify a tab separator. The default |
| 1500 * comma (','). | 1580 /// value is a comma (','). |
| 1501 */ | |
| 1502 core.String fieldDelimiter; | 1581 core.String fieldDelimiter; |
| 1503 /** | 1582 |
| 1504 * [Optional] The value that is used to quote data sections in a CSV file. | 1583 /// [Optional] The value that is used to quote data sections in a CSV file. |
| 1505 * BigQuery converts the string to ISO-8859-1 encoding, and then uses the | 1584 /// BigQuery converts the string to ISO-8859-1 encoding, and then uses the |
| 1506 * first byte of the encoded string to split the data in its raw, binary | 1585 /// first byte of the encoded string to split the data in its raw, binary |
| 1507 * state. The default value is a double-quote ('"'). If your data does not | 1586 /// state. The default value is a double-quote ('"'). If your data does not |
| 1508 * contain quoted sections, set the property value to an empty string. If your | 1587 /// contain quoted sections, set the property value to an empty string. If |
| 1509 * data contains quoted newline characters, you must also set the | 1588 /// your data contains quoted newline characters, you must also set the |
| 1510 * allowQuotedNewlines property to true. | 1589 /// allowQuotedNewlines property to true. |
| 1511 */ | |
| 1512 core.String quote; | 1590 core.String quote; |
| 1513 /** | 1591 |
| 1514 * [Optional] The number of rows at the top of a CSV file that BigQuery will | 1592 /// [Optional] The number of rows at the top of a CSV file that BigQuery will |
| 1515 * skip when reading the data. The default value is 0. This property is useful | 1593 /// skip when reading the data. The default value is 0. This property is |
| 1516 * if you have header rows in the file that should be skipped. | 1594 /// useful if you have header rows in the file that should be skipped. |
| 1517 */ | |
| 1518 core.String skipLeadingRows; | 1595 core.String skipLeadingRows; |
| 1519 | 1596 |
| 1520 CsvOptions(); | 1597 CsvOptions(); |
| 1521 | 1598 |
| 1522 CsvOptions.fromJson(core.Map _json) { | 1599 CsvOptions.fromJson(core.Map _json) { |
| 1523 if (_json.containsKey("allowJaggedRows")) { | 1600 if (_json.containsKey("allowJaggedRows")) { |
| 1524 allowJaggedRows = _json["allowJaggedRows"]; | 1601 allowJaggedRows = _json["allowJaggedRows"]; |
| 1525 } | 1602 } |
| 1526 if (_json.containsKey("allowQuotedNewlines")) { | 1603 if (_json.containsKey("allowQuotedNewlines")) { |
| 1527 allowQuotedNewlines = _json["allowQuotedNewlines"]; | 1604 allowQuotedNewlines = _json["allowQuotedNewlines"]; |
| 1528 } | 1605 } |
| 1529 if (_json.containsKey("encoding")) { | 1606 if (_json.containsKey("encoding")) { |
| 1530 encoding = _json["encoding"]; | 1607 encoding = _json["encoding"]; |
| 1531 } | 1608 } |
| 1532 if (_json.containsKey("fieldDelimiter")) { | 1609 if (_json.containsKey("fieldDelimiter")) { |
| 1533 fieldDelimiter = _json["fieldDelimiter"]; | 1610 fieldDelimiter = _json["fieldDelimiter"]; |
| 1534 } | 1611 } |
| 1535 if (_json.containsKey("quote")) { | 1612 if (_json.containsKey("quote")) { |
| 1536 quote = _json["quote"]; | 1613 quote = _json["quote"]; |
| 1537 } | 1614 } |
| 1538 if (_json.containsKey("skipLeadingRows")) { | 1615 if (_json.containsKey("skipLeadingRows")) { |
| 1539 skipLeadingRows = _json["skipLeadingRows"]; | 1616 skipLeadingRows = _json["skipLeadingRows"]; |
| 1540 } | 1617 } |
| 1541 } | 1618 } |
| 1542 | 1619 |
| 1543 core.Map<core.String, core.Object> toJson() { | 1620 core.Map<core.String, core.Object> toJson() { |
| 1544 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 1621 final core.Map<core.String, core.Object> _json = |
| 1622 new core.Map<core.String, core.Object>(); |
| 1545 if (allowJaggedRows != null) { | 1623 if (allowJaggedRows != null) { |
| 1546 _json["allowJaggedRows"] = allowJaggedRows; | 1624 _json["allowJaggedRows"] = allowJaggedRows; |
| 1547 } | 1625 } |
| 1548 if (allowQuotedNewlines != null) { | 1626 if (allowQuotedNewlines != null) { |
| 1549 _json["allowQuotedNewlines"] = allowQuotedNewlines; | 1627 _json["allowQuotedNewlines"] = allowQuotedNewlines; |
| 1550 } | 1628 } |
| 1551 if (encoding != null) { | 1629 if (encoding != null) { |
| 1552 _json["encoding"] = encoding; | 1630 _json["encoding"] = encoding; |
| 1553 } | 1631 } |
| 1554 if (fieldDelimiter != null) { | 1632 if (fieldDelimiter != null) { |
| 1555 _json["fieldDelimiter"] = fieldDelimiter; | 1633 _json["fieldDelimiter"] = fieldDelimiter; |
| 1556 } | 1634 } |
| 1557 if (quote != null) { | 1635 if (quote != null) { |
| 1558 _json["quote"] = quote; | 1636 _json["quote"] = quote; |
| 1559 } | 1637 } |
| 1560 if (skipLeadingRows != null) { | 1638 if (skipLeadingRows != null) { |
| 1561 _json["skipLeadingRows"] = skipLeadingRows; | 1639 _json["skipLeadingRows"] = skipLeadingRows; |
| 1562 } | 1640 } |
| 1563 return _json; | 1641 return _json; |
| 1564 } | 1642 } |
| 1565 } | 1643 } |
| 1566 | 1644 |
| 1567 class DatasetAccess { | 1645 class DatasetAccess { |
| 1568 /** | 1646 /// [Pick one] A domain to grant access to. Any users signed in with the |
| 1569 * [Pick one] A domain to grant access to. Any users signed in with the domain | 1647 /// domain specified will be granted the specified access. Example: |
| 1570 * specified will be granted the specified access. Example: "example.com". | 1648 /// "example.com". |
| 1571 */ | |
| 1572 core.String domain; | 1649 core.String domain; |
| 1573 /** [Pick one] An email address of a Google Group to grant access to. */ | 1650 |
| 1651 /// [Pick one] An email address of a Google Group to grant access to. |
| 1574 core.String groupByEmail; | 1652 core.String groupByEmail; |
| 1575 /** | 1653 |
| 1576 * [Required] Describes the rights granted to the user specified by the other | 1654 /// [Required] Describes the rights granted to the user specified by the |
| 1577 * member of the access object. The following string values are supported: | 1655 /// other member of the access object. The following string values are |
| 1578 * READER, WRITER, OWNER. | 1656 /// supported: READER, WRITER, OWNER. |
| 1579 */ | |
| 1580 core.String role; | 1657 core.String role; |
| 1581 /** | 1658 |
| 1582 * [Pick one] A special group to grant access to. Possible values include: | 1659 /// [Pick one] A special group to grant access to. Possible values include: |
| 1583 * projectOwners: Owners of the enclosing project. projectReaders: Readers of | 1660 /// projectOwners: Owners of the enclosing project. projectReaders: Readers |
| 1584 * the enclosing project. projectWriters: Writers of the enclosing project. | 1661 /// of the enclosing project. projectWriters: Writers of the enclosing |
| 1585 * allAuthenticatedUsers: All authenticated BigQuery users. | 1662 /// project. allAuthenticatedUsers: All authenticated BigQuery users. |
| 1586 */ | |
| 1587 core.String specialGroup; | 1663 core.String specialGroup; |
| 1588 /** | 1664 |
| 1589 * [Pick one] An email address of a user to grant access to. For example: | 1665 /// [Pick one] An email address of a user to grant access to. For example: |
| 1590 * fred@example.com. | 1666 /// fred@example.com. |
| 1591 */ | |
| 1592 core.String userByEmail; | 1667 core.String userByEmail; |
| 1593 /** | 1668 |
| 1594 * [Pick one] A view from a different dataset to grant access to. Queries | 1669 /// [Pick one] A view from a different dataset to grant access to. Queries |
| 1595 * executed against that view will have read access to tables in this dataset. | 1670 /// executed against that view will have read access to tables in this |
| 1596 * The role field is not required when this field is set. If that view is | 1671 /// dataset. The role field is not required when this field is set. If that |
| 1597 * updated by any user, access to the view needs to be granted again via an | 1672 /// view is updated by any user, access to the view needs to be granted again |
| 1598 * update operation. | 1673 /// via an update operation. |
| 1599 */ | |
| 1600 TableReference view; | 1674 TableReference view; |
| 1601 | 1675 |
| 1602 DatasetAccess(); | 1676 DatasetAccess(); |
| 1603 | 1677 |
| 1604 DatasetAccess.fromJson(core.Map _json) { | 1678 DatasetAccess.fromJson(core.Map _json) { |
| 1605 if (_json.containsKey("domain")) { | 1679 if (_json.containsKey("domain")) { |
| 1606 domain = _json["domain"]; | 1680 domain = _json["domain"]; |
| 1607 } | 1681 } |
| 1608 if (_json.containsKey("groupByEmail")) { | 1682 if (_json.containsKey("groupByEmail")) { |
| 1609 groupByEmail = _json["groupByEmail"]; | 1683 groupByEmail = _json["groupByEmail"]; |
| 1610 } | 1684 } |
| 1611 if (_json.containsKey("role")) { | 1685 if (_json.containsKey("role")) { |
| 1612 role = _json["role"]; | 1686 role = _json["role"]; |
| 1613 } | 1687 } |
| 1614 if (_json.containsKey("specialGroup")) { | 1688 if (_json.containsKey("specialGroup")) { |
| 1615 specialGroup = _json["specialGroup"]; | 1689 specialGroup = _json["specialGroup"]; |
| 1616 } | 1690 } |
| 1617 if (_json.containsKey("userByEmail")) { | 1691 if (_json.containsKey("userByEmail")) { |
| 1618 userByEmail = _json["userByEmail"]; | 1692 userByEmail = _json["userByEmail"]; |
| 1619 } | 1693 } |
| 1620 if (_json.containsKey("view")) { | 1694 if (_json.containsKey("view")) { |
| 1621 view = new TableReference.fromJson(_json["view"]); | 1695 view = new TableReference.fromJson(_json["view"]); |
| 1622 } | 1696 } |
| 1623 } | 1697 } |
| 1624 | 1698 |
| 1625 core.Map<core.String, core.Object> toJson() { | 1699 core.Map<core.String, core.Object> toJson() { |
| 1626 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 1700 final core.Map<core.String, core.Object> _json = |
| 1701 new core.Map<core.String, core.Object>(); |
| 1627 if (domain != null) { | 1702 if (domain != null) { |
| 1628 _json["domain"] = domain; | 1703 _json["domain"] = domain; |
| 1629 } | 1704 } |
| 1630 if (groupByEmail != null) { | 1705 if (groupByEmail != null) { |
| 1631 _json["groupByEmail"] = groupByEmail; | 1706 _json["groupByEmail"] = groupByEmail; |
| 1632 } | 1707 } |
| 1633 if (role != null) { | 1708 if (role != null) { |
| 1634 _json["role"] = role; | 1709 _json["role"] = role; |
| 1635 } | 1710 } |
| 1636 if (specialGroup != null) { | 1711 if (specialGroup != null) { |
| 1637 _json["specialGroup"] = specialGroup; | 1712 _json["specialGroup"] = specialGroup; |
| 1638 } | 1713 } |
| 1639 if (userByEmail != null) { | 1714 if (userByEmail != null) { |
| 1640 _json["userByEmail"] = userByEmail; | 1715 _json["userByEmail"] = userByEmail; |
| 1641 } | 1716 } |
| 1642 if (view != null) { | 1717 if (view != null) { |
| 1643 _json["view"] = (view).toJson(); | 1718 _json["view"] = (view).toJson(); |
| 1644 } | 1719 } |
| 1645 return _json; | 1720 return _json; |
| 1646 } | 1721 } |
| 1647 } | 1722 } |
| 1648 | 1723 |
| 1649 class Dataset { | 1724 class Dataset { |
| 1650 /** | 1725 /// [Optional] An array of objects that define dataset access for one or more |
| 1651 * [Optional] An array of objects that define dataset access for one or more | 1726 /// entities. You can set this property when inserting or updating a dataset |
| 1652 * entities. You can set this property when inserting or updating a dataset in | 1727 /// in order to control who is allowed to access the data. If unspecified at |
| 1653 * order to control who is allowed to access the data. If unspecified at | 1728 /// dataset creation time, BigQuery adds default dataset access for the |
| 1654 * dataset creation time, BigQuery adds default dataset access for the | 1729 /// following entities: access.specialGroup: projectReaders; access.role: |
| 1655 * following entities: access.specialGroup: projectReaders; access.role: | 1730 /// READER; access.specialGroup: projectWriters; access.role: WRITER; |
| 1656 * READER; access.specialGroup: projectWriters; access.role: WRITER; | 1731 /// access.specialGroup: projectOwners; access.role: OWNER; |
| 1657 * access.specialGroup: projectOwners; access.role: OWNER; access.userByEmail: | 1732 /// access.userByEmail: [dataset creator email]; access.role: OWNER; |
| 1658 * [dataset creator email]; access.role: OWNER; | |
| 1659 */ | |
| 1660 core.List<DatasetAccess> access; | 1733 core.List<DatasetAccess> access; |
| 1661 /** | 1734 |
| 1662 * [Output-only] The time when this dataset was created, in milliseconds since | 1735 /// [Output-only] The time when this dataset was created, in milliseconds |
| 1663 * the epoch. | 1736 /// since the epoch. |
| 1664 */ | |
| 1665 core.String creationTime; | 1737 core.String creationTime; |
| 1666 /** [Required] A reference that identifies the dataset. */ | 1738 |
| 1739 /// [Required] A reference that identifies the dataset. |
| 1667 DatasetReference datasetReference; | 1740 DatasetReference datasetReference; |
| 1668 /** | 1741 |
| 1669 * [Optional] The default lifetime of all tables in the dataset, in | 1742 /// [Optional] The default lifetime of all tables in the dataset, in |
| 1670 * milliseconds. The minimum value is 3600000 milliseconds (one hour). Once | 1743 /// milliseconds. The minimum value is 3600000 milliseconds (one hour). Once |
| 1671 * this property is set, all newly-created tables in the dataset will have an | 1744 /// this property is set, all newly-created tables in the dataset will have |
| 1672 * expirationTime property set to the creation time plus the value in this | 1745 /// an expirationTime property set to the creation time plus the value in |
| 1673 * property, and changing the value will only affect new tables, not existing | 1746 /// this property, and changing the value will only affect new tables, not |
| 1674 * ones. When the expirationTime for a given table is reached, that table will | 1747 /// existing ones. When the expirationTime for a given table is reached, that |
| 1675 * be deleted automatically. If a table's expirationTime is modified or | 1748 /// table will be deleted automatically. If a table's expirationTime is |
| 1676 * removed before the table expires, or if you provide an explicit | 1749 /// modified or removed before the table expires, or if you provide an |
| 1677 * expirationTime when creating a table, that value takes precedence over the | 1750 /// explicit expirationTime when creating a table, that value takes |
| 1678 * default expiration time indicated by this property. | 1751 /// precedence over the default expiration time indicated by this property. |
| 1679 */ | |
| 1680 core.String defaultTableExpirationMs; | 1752 core.String defaultTableExpirationMs; |
| 1681 /** [Optional] A user-friendly description of the dataset. */ | 1753 |
| 1754 /// [Optional] A user-friendly description of the dataset. |
| 1682 core.String description; | 1755 core.String description; |
| 1683 /** [Output-only] A hash of the resource. */ | 1756 |
| 1757 /// [Output-only] A hash of the resource. |
| 1684 core.String etag; | 1758 core.String etag; |
| 1685 /** [Optional] A descriptive name for the dataset. */ | 1759 |
| 1760 /// [Optional] A descriptive name for the dataset. |
| 1686 core.String friendlyName; | 1761 core.String friendlyName; |
| 1687 /** | 1762 |
| 1688 * [Output-only] The fully-qualified unique name of the dataset in the format | 1763 /// [Output-only] The fully-qualified unique name of the dataset in the |
| 1689 * projectId:datasetId. The dataset name without the project name is given in | 1764 /// format projectId:datasetId. The dataset name without the project name is |
| 1690 * the datasetId field. When creating a new dataset, leave this field blank, | 1765 /// given in the datasetId field. When creating a new dataset, leave this |
| 1691 * and instead specify the datasetId field. | 1766 /// field blank, and instead specify the datasetId field. |
| 1692 */ | |
| 1693 core.String id; | 1767 core.String id; |
| 1694 /** [Output-only] The resource type. */ | 1768 |
| 1769 /// [Output-only] The resource type. |
| 1695 core.String kind; | 1770 core.String kind; |
| 1696 /** | 1771 |
| 1697 * The labels associated with this dataset. You can use these to organize and | 1772 /// The labels associated with this dataset. You can use these to organize |
| 1698 * group your datasets. You can set this property when inserting or updating a | 1773 /// and group your datasets. You can set this property when inserting or |
| 1699 * dataset. See Labeling Datasets for more information. | 1774 /// updating a dataset. See Labeling Datasets for more information. |
| 1700 */ | |
| 1701 core.Map<core.String, core.String> labels; | 1775 core.Map<core.String, core.String> labels; |
| 1702 /** | 1776 |
| 1703 * [Output-only] The date when this dataset or any of its tables was last | 1777 /// [Output-only] The date when this dataset or any of its tables was last |
| 1704 * modified, in milliseconds since the epoch. | 1778 /// modified, in milliseconds since the epoch. |
| 1705 */ | |
| 1706 core.String lastModifiedTime; | 1779 core.String lastModifiedTime; |
| 1707 /** | 1780 |
| 1708 * The geographic location where the dataset should reside. Possible values | 1781 /// The geographic location where the dataset should reside. Possible values |
| 1709 * include EU and US. The default value is US. | 1782 /// include EU and US. The default value is US. |
| 1710 */ | |
| 1711 core.String location; | 1783 core.String location; |
| 1712 /** | 1784 |
| 1713 * [Output-only] A URL that can be used to access the resource again. You can | 1785 /// [Output-only] A URL that can be used to access the resource again. You |
| 1714 * use this URL in Get or Update requests to the resource. | 1786 /// can use this URL in Get or Update requests to the resource. |
| 1715 */ | |
| 1716 core.String selfLink; | 1787 core.String selfLink; |
| 1717 | 1788 |
| 1718 Dataset(); | 1789 Dataset(); |
| 1719 | 1790 |
| 1720 Dataset.fromJson(core.Map _json) { | 1791 Dataset.fromJson(core.Map _json) { |
| 1721 if (_json.containsKey("access")) { | 1792 if (_json.containsKey("access")) { |
| 1722 access = _json["access"].map((value) => new DatasetAccess.fromJson(value))
.toList(); | 1793 access = _json["access"] |
| 1794 .map((value) => new DatasetAccess.fromJson(value)) |
| 1795 .toList(); |
| 1723 } | 1796 } |
| 1724 if (_json.containsKey("creationTime")) { | 1797 if (_json.containsKey("creationTime")) { |
| 1725 creationTime = _json["creationTime"]; | 1798 creationTime = _json["creationTime"]; |
| 1726 } | 1799 } |
| 1727 if (_json.containsKey("datasetReference")) { | 1800 if (_json.containsKey("datasetReference")) { |
| 1728 datasetReference = new DatasetReference.fromJson(_json["datasetReference"]
); | 1801 datasetReference = |
| 1802 new DatasetReference.fromJson(_json["datasetReference"]); |
| 1729 } | 1803 } |
| 1730 if (_json.containsKey("defaultTableExpirationMs")) { | 1804 if (_json.containsKey("defaultTableExpirationMs")) { |
| 1731 defaultTableExpirationMs = _json["defaultTableExpirationMs"]; | 1805 defaultTableExpirationMs = _json["defaultTableExpirationMs"]; |
| 1732 } | 1806 } |
| 1733 if (_json.containsKey("description")) { | 1807 if (_json.containsKey("description")) { |
| 1734 description = _json["description"]; | 1808 description = _json["description"]; |
| 1735 } | 1809 } |
| 1736 if (_json.containsKey("etag")) { | 1810 if (_json.containsKey("etag")) { |
| 1737 etag = _json["etag"]; | 1811 etag = _json["etag"]; |
| 1738 } | 1812 } |
| (...skipping 14 matching lines...) Expand all Loading... |
| 1753 } | 1827 } |
| 1754 if (_json.containsKey("location")) { | 1828 if (_json.containsKey("location")) { |
| 1755 location = _json["location"]; | 1829 location = _json["location"]; |
| 1756 } | 1830 } |
| 1757 if (_json.containsKey("selfLink")) { | 1831 if (_json.containsKey("selfLink")) { |
| 1758 selfLink = _json["selfLink"]; | 1832 selfLink = _json["selfLink"]; |
| 1759 } | 1833 } |
| 1760 } | 1834 } |
| 1761 | 1835 |
| 1762 core.Map<core.String, core.Object> toJson() { | 1836 core.Map<core.String, core.Object> toJson() { |
| 1763 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 1837 final core.Map<core.String, core.Object> _json = |
| 1838 new core.Map<core.String, core.Object>(); |
| 1764 if (access != null) { | 1839 if (access != null) { |
| 1765 _json["access"] = access.map((value) => (value).toJson()).toList(); | 1840 _json["access"] = access.map((value) => (value).toJson()).toList(); |
| 1766 } | 1841 } |
| 1767 if (creationTime != null) { | 1842 if (creationTime != null) { |
| 1768 _json["creationTime"] = creationTime; | 1843 _json["creationTime"] = creationTime; |
| 1769 } | 1844 } |
| 1770 if (datasetReference != null) { | 1845 if (datasetReference != null) { |
| 1771 _json["datasetReference"] = (datasetReference).toJson(); | 1846 _json["datasetReference"] = (datasetReference).toJson(); |
| 1772 } | 1847 } |
| 1773 if (defaultTableExpirationMs != null) { | 1848 if (defaultTableExpirationMs != null) { |
| (...skipping 24 matching lines...) Expand all Loading... |
| 1798 _json["location"] = location; | 1873 _json["location"] = location; |
| 1799 } | 1874 } |
| 1800 if (selfLink != null) { | 1875 if (selfLink != null) { |
| 1801 _json["selfLink"] = selfLink; | 1876 _json["selfLink"] = selfLink; |
| 1802 } | 1877 } |
| 1803 return _json; | 1878 return _json; |
| 1804 } | 1879 } |
| 1805 } | 1880 } |
| 1806 | 1881 |
| 1807 class DatasetListDatasets { | 1882 class DatasetListDatasets { |
| 1808 /** | 1883 /// The dataset reference. Use this property to access specific parts of the |
| 1809 * The dataset reference. Use this property to access specific parts of the | 1884 /// dataset's ID, such as project ID or dataset ID. |
| 1810 * dataset's ID, such as project ID or dataset ID. | |
| 1811 */ | |
| 1812 DatasetReference datasetReference; | 1885 DatasetReference datasetReference; |
| 1813 /** A descriptive name for the dataset, if one exists. */ | 1886 |
| 1887 /// A descriptive name for the dataset, if one exists. |
| 1814 core.String friendlyName; | 1888 core.String friendlyName; |
| 1815 /** The fully-qualified, unique, opaque ID of the dataset. */ | 1889 |
| 1890 /// The fully-qualified, unique, opaque ID of the dataset. |
| 1816 core.String id; | 1891 core.String id; |
| 1817 /** | 1892 |
| 1818 * The resource type. This property always returns the value | 1893 /// The resource type. This property always returns the value |
| 1819 * "bigquery#dataset". | 1894 /// "bigquery#dataset". |
| 1820 */ | |
| 1821 core.String kind; | 1895 core.String kind; |
| 1822 /** | 1896 |
| 1823 * The labels associated with this dataset. You can use these to organize and | 1897 /// The labels associated with this dataset. You can use these to organize |
| 1824 * group your datasets. | 1898 /// and group your datasets. |
| 1825 */ | |
| 1826 core.Map<core.String, core.String> labels; | 1899 core.Map<core.String, core.String> labels; |
| 1827 | 1900 |
| 1828 DatasetListDatasets(); | 1901 DatasetListDatasets(); |
| 1829 | 1902 |
| 1830 DatasetListDatasets.fromJson(core.Map _json) { | 1903 DatasetListDatasets.fromJson(core.Map _json) { |
| 1831 if (_json.containsKey("datasetReference")) { | 1904 if (_json.containsKey("datasetReference")) { |
| 1832 datasetReference = new DatasetReference.fromJson(_json["datasetReference"]
); | 1905 datasetReference = |
| 1906 new DatasetReference.fromJson(_json["datasetReference"]); |
| 1833 } | 1907 } |
| 1834 if (_json.containsKey("friendlyName")) { | 1908 if (_json.containsKey("friendlyName")) { |
| 1835 friendlyName = _json["friendlyName"]; | 1909 friendlyName = _json["friendlyName"]; |
| 1836 } | 1910 } |
| 1837 if (_json.containsKey("id")) { | 1911 if (_json.containsKey("id")) { |
| 1838 id = _json["id"]; | 1912 id = _json["id"]; |
| 1839 } | 1913 } |
| 1840 if (_json.containsKey("kind")) { | 1914 if (_json.containsKey("kind")) { |
| 1841 kind = _json["kind"]; | 1915 kind = _json["kind"]; |
| 1842 } | 1916 } |
| 1843 if (_json.containsKey("labels")) { | 1917 if (_json.containsKey("labels")) { |
| 1844 labels = _json["labels"]; | 1918 labels = _json["labels"]; |
| 1845 } | 1919 } |
| 1846 } | 1920 } |
| 1847 | 1921 |
| 1848 core.Map<core.String, core.Object> toJson() { | 1922 core.Map<core.String, core.Object> toJson() { |
| 1849 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 1923 final core.Map<core.String, core.Object> _json = |
| 1924 new core.Map<core.String, core.Object>(); |
| 1850 if (datasetReference != null) { | 1925 if (datasetReference != null) { |
| 1851 _json["datasetReference"] = (datasetReference).toJson(); | 1926 _json["datasetReference"] = (datasetReference).toJson(); |
| 1852 } | 1927 } |
| 1853 if (friendlyName != null) { | 1928 if (friendlyName != null) { |
| 1854 _json["friendlyName"] = friendlyName; | 1929 _json["friendlyName"] = friendlyName; |
| 1855 } | 1930 } |
| 1856 if (id != null) { | 1931 if (id != null) { |
| 1857 _json["id"] = id; | 1932 _json["id"] = id; |
| 1858 } | 1933 } |
| 1859 if (kind != null) { | 1934 if (kind != null) { |
| 1860 _json["kind"] = kind; | 1935 _json["kind"] = kind; |
| 1861 } | 1936 } |
| 1862 if (labels != null) { | 1937 if (labels != null) { |
| 1863 _json["labels"] = labels; | 1938 _json["labels"] = labels; |
| 1864 } | 1939 } |
| 1865 return _json; | 1940 return _json; |
| 1866 } | 1941 } |
| 1867 } | 1942 } |
| 1868 | 1943 |
| 1869 class DatasetList { | 1944 class DatasetList { |
| 1870 /** | 1945 /// An array of the dataset resources in the project. Each resource contains |
| 1871 * An array of the dataset resources in the project. Each resource contains | 1946 /// basic information. For full information about a particular dataset |
| 1872 * basic information. For full information about a particular dataset | 1947 /// resource, use the Datasets: get method. This property is omitted when |
| 1873 * resource, use the Datasets: get method. This property is omitted when there | 1948 /// there are no datasets in the project. |
| 1874 * are no datasets in the project. | |
| 1875 */ | |
| 1876 core.List<DatasetListDatasets> datasets; | 1949 core.List<DatasetListDatasets> datasets; |
| 1877 /** | 1950 |
| 1878 * A hash value of the results page. You can use this property to determine if | 1951 /// A hash value of the results page. You can use this property to determine |
| 1879 * the page has changed since the last request. | 1952 /// if the page has changed since the last request. |
| 1880 */ | |
| 1881 core.String etag; | 1953 core.String etag; |
| 1882 /** | 1954 |
| 1883 * The list type. This property always returns the value | 1955 /// The list type. This property always returns the value |
| 1884 * "bigquery#datasetList". | 1956 /// "bigquery#datasetList". |
| 1885 */ | |
| 1886 core.String kind; | 1957 core.String kind; |
| 1887 /** | 1958 |
| 1888 * A token that can be used to request the next results page. This property is | 1959 /// A token that can be used to request the next results page. This property |
| 1889 * omitted on the final results page. | 1960 /// is omitted on the final results page. |
| 1890 */ | |
| 1891 core.String nextPageToken; | 1961 core.String nextPageToken; |
| 1892 | 1962 |
| 1893 DatasetList(); | 1963 DatasetList(); |
| 1894 | 1964 |
| 1895 DatasetList.fromJson(core.Map _json) { | 1965 DatasetList.fromJson(core.Map _json) { |
| 1896 if (_json.containsKey("datasets")) { | 1966 if (_json.containsKey("datasets")) { |
| 1897 datasets = _json["datasets"].map((value) => new DatasetListDatasets.fromJs
on(value)).toList(); | 1967 datasets = _json["datasets"] |
| 1968 .map((value) => new DatasetListDatasets.fromJson(value)) |
| 1969 .toList(); |
| 1898 } | 1970 } |
| 1899 if (_json.containsKey("etag")) { | 1971 if (_json.containsKey("etag")) { |
| 1900 etag = _json["etag"]; | 1972 etag = _json["etag"]; |
| 1901 } | 1973 } |
| 1902 if (_json.containsKey("kind")) { | 1974 if (_json.containsKey("kind")) { |
| 1903 kind = _json["kind"]; | 1975 kind = _json["kind"]; |
| 1904 } | 1976 } |
| 1905 if (_json.containsKey("nextPageToken")) { | 1977 if (_json.containsKey("nextPageToken")) { |
| 1906 nextPageToken = _json["nextPageToken"]; | 1978 nextPageToken = _json["nextPageToken"]; |
| 1907 } | 1979 } |
| 1908 } | 1980 } |
| 1909 | 1981 |
| 1910 core.Map<core.String, core.Object> toJson() { | 1982 core.Map<core.String, core.Object> toJson() { |
| 1911 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 1983 final core.Map<core.String, core.Object> _json = |
| 1984 new core.Map<core.String, core.Object>(); |
| 1912 if (datasets != null) { | 1985 if (datasets != null) { |
| 1913 _json["datasets"] = datasets.map((value) => (value).toJson()).toList(); | 1986 _json["datasets"] = datasets.map((value) => (value).toJson()).toList(); |
| 1914 } | 1987 } |
| 1915 if (etag != null) { | 1988 if (etag != null) { |
| 1916 _json["etag"] = etag; | 1989 _json["etag"] = etag; |
| 1917 } | 1990 } |
| 1918 if (kind != null) { | 1991 if (kind != null) { |
| 1919 _json["kind"] = kind; | 1992 _json["kind"] = kind; |
| 1920 } | 1993 } |
| 1921 if (nextPageToken != null) { | 1994 if (nextPageToken != null) { |
| 1922 _json["nextPageToken"] = nextPageToken; | 1995 _json["nextPageToken"] = nextPageToken; |
| 1923 } | 1996 } |
| 1924 return _json; | 1997 return _json; |
| 1925 } | 1998 } |
| 1926 } | 1999 } |
| 1927 | 2000 |
| 1928 class DatasetReference { | 2001 class DatasetReference { |
| 1929 /** | 2002 /// [Required] A unique ID for this dataset, without the project name. The ID |
| 1930 * [Required] A unique ID for this dataset, without the project name. The ID | 2003 /// must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). |
| 1931 * must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). | 2004 /// The maximum length is 1,024 characters. |
| 1932 * The maximum length is 1,024 characters. | |
| 1933 */ | |
| 1934 core.String datasetId; | 2005 core.String datasetId; |
| 1935 /** [Optional] The ID of the project containing this dataset. */ | 2006 |
| 2007 /// [Optional] The ID of the project containing this dataset. |
| 1936 core.String projectId; | 2008 core.String projectId; |
| 1937 | 2009 |
| 1938 DatasetReference(); | 2010 DatasetReference(); |
| 1939 | 2011 |
| 1940 DatasetReference.fromJson(core.Map _json) { | 2012 DatasetReference.fromJson(core.Map _json) { |
| 1941 if (_json.containsKey("datasetId")) { | 2013 if (_json.containsKey("datasetId")) { |
| 1942 datasetId = _json["datasetId"]; | 2014 datasetId = _json["datasetId"]; |
| 1943 } | 2015 } |
| 1944 if (_json.containsKey("projectId")) { | 2016 if (_json.containsKey("projectId")) { |
| 1945 projectId = _json["projectId"]; | 2017 projectId = _json["projectId"]; |
| 1946 } | 2018 } |
| 1947 } | 2019 } |
| 1948 | 2020 |
| 1949 core.Map<core.String, core.Object> toJson() { | 2021 core.Map<core.String, core.Object> toJson() { |
| 1950 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 2022 final core.Map<core.String, core.Object> _json = |
| 2023 new core.Map<core.String, core.Object>(); |
| 1951 if (datasetId != null) { | 2024 if (datasetId != null) { |
| 1952 _json["datasetId"] = datasetId; | 2025 _json["datasetId"] = datasetId; |
| 1953 } | 2026 } |
| 1954 if (projectId != null) { | 2027 if (projectId != null) { |
| 1955 _json["projectId"] = projectId; | 2028 _json["projectId"] = projectId; |
| 1956 } | 2029 } |
| 1957 return _json; | 2030 return _json; |
| 1958 } | 2031 } |
| 1959 } | 2032 } |
| 1960 | 2033 |
| 2034 class EncryptionConfiguration { |
| 2035 /// [Optional] Describes the Cloud KMS encryption key that will be used to |
| 2036 /// protect destination BigQuery table. The BigQuery Service Account |
| 2037 /// associated with your project requires access to this encryption key. |
| 2038 core.String kmsKeyName; |
| 2039 |
| 2040 EncryptionConfiguration(); |
| 2041 |
| 2042 EncryptionConfiguration.fromJson(core.Map _json) { |
| 2043 if (_json.containsKey("kmsKeyName")) { |
| 2044 kmsKeyName = _json["kmsKeyName"]; |
| 2045 } |
| 2046 } |
| 2047 |
| 2048 core.Map<core.String, core.Object> toJson() { |
| 2049 final core.Map<core.String, core.Object> _json = |
| 2050 new core.Map<core.String, core.Object>(); |
| 2051 if (kmsKeyName != null) { |
| 2052 _json["kmsKeyName"] = kmsKeyName; |
| 2053 } |
| 2054 return _json; |
| 2055 } |
| 2056 } |
| 2057 |
| 1961 class ErrorProto { | 2058 class ErrorProto { |
| 1962 /** | 2059 /// Debugging information. This property is internal to Google and should not |
| 1963 * Debugging information. This property is internal to Google and should not | 2060 /// be used. |
| 1964 * be used. | |
| 1965 */ | |
| 1966 core.String debugInfo; | 2061 core.String debugInfo; |
| 1967 /** Specifies where the error occurred, if present. */ | 2062 |
| 2063 /// Specifies where the error occurred, if present. |
| 1968 core.String location; | 2064 core.String location; |
| 1969 /** A human-readable description of the error. */ | 2065 |
| 2066 /// A human-readable description of the error. |
| 1970 core.String message; | 2067 core.String message; |
| 1971 /** A short error code that summarizes the error. */ | 2068 |
| 2069 /// A short error code that summarizes the error. |
| 1972 core.String reason; | 2070 core.String reason; |
| 1973 | 2071 |
| 1974 ErrorProto(); | 2072 ErrorProto(); |
| 1975 | 2073 |
| 1976 ErrorProto.fromJson(core.Map _json) { | 2074 ErrorProto.fromJson(core.Map _json) { |
| 1977 if (_json.containsKey("debugInfo")) { | 2075 if (_json.containsKey("debugInfo")) { |
| 1978 debugInfo = _json["debugInfo"]; | 2076 debugInfo = _json["debugInfo"]; |
| 1979 } | 2077 } |
| 1980 if (_json.containsKey("location")) { | 2078 if (_json.containsKey("location")) { |
| 1981 location = _json["location"]; | 2079 location = _json["location"]; |
| 1982 } | 2080 } |
| 1983 if (_json.containsKey("message")) { | 2081 if (_json.containsKey("message")) { |
| 1984 message = _json["message"]; | 2082 message = _json["message"]; |
| 1985 } | 2083 } |
| 1986 if (_json.containsKey("reason")) { | 2084 if (_json.containsKey("reason")) { |
| 1987 reason = _json["reason"]; | 2085 reason = _json["reason"]; |
| 1988 } | 2086 } |
| 1989 } | 2087 } |
| 1990 | 2088 |
| 1991 core.Map<core.String, core.Object> toJson() { | 2089 core.Map<core.String, core.Object> toJson() { |
| 1992 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 2090 final core.Map<core.String, core.Object> _json = |
| 2091 new core.Map<core.String, core.Object>(); |
| 1993 if (debugInfo != null) { | 2092 if (debugInfo != null) { |
| 1994 _json["debugInfo"] = debugInfo; | 2093 _json["debugInfo"] = debugInfo; |
| 1995 } | 2094 } |
| 1996 if (location != null) { | 2095 if (location != null) { |
| 1997 _json["location"] = location; | 2096 _json["location"] = location; |
| 1998 } | 2097 } |
| 1999 if (message != null) { | 2098 if (message != null) { |
| 2000 _json["message"] = message; | 2099 _json["message"] = message; |
| 2001 } | 2100 } |
| 2002 if (reason != null) { | 2101 if (reason != null) { |
| 2003 _json["reason"] = reason; | 2102 _json["reason"] = reason; |
| 2004 } | 2103 } |
| 2005 return _json; | 2104 return _json; |
| 2006 } | 2105 } |
| 2007 } | 2106 } |
| 2008 | 2107 |
| 2009 class ExplainQueryStage { | 2108 class ExplainQueryStage { |
| 2010 /** Milliseconds the average shard spent on CPU-bound tasks. */ | 2109 /// Milliseconds the average shard spent on CPU-bound tasks. |
| 2011 core.String computeMsAvg; | 2110 core.String computeMsAvg; |
| 2012 /** Milliseconds the slowest shard spent on CPU-bound tasks. */ | 2111 |
| 2112 /// Milliseconds the slowest shard spent on CPU-bound tasks. |
| 2013 core.String computeMsMax; | 2113 core.String computeMsMax; |
| 2014 /** Relative amount of time the average shard spent on CPU-bound tasks. */ | 2114 |
| 2115 /// Relative amount of time the average shard spent on CPU-bound tasks. |
| 2015 core.double computeRatioAvg; | 2116 core.double computeRatioAvg; |
| 2016 /** Relative amount of time the slowest shard spent on CPU-bound tasks. */ | 2117 |
| 2118 /// Relative amount of time the slowest shard spent on CPU-bound tasks. |
| 2017 core.double computeRatioMax; | 2119 core.double computeRatioMax; |
| 2018 /** Unique ID for stage within plan. */ | 2120 |
| 2121 /// Unique ID for stage within plan. |
| 2019 core.String id; | 2122 core.String id; |
| 2020 /** Human-readable name for stage. */ | 2123 |
| 2124 /// Human-readable name for stage. |
| 2021 core.String name; | 2125 core.String name; |
| 2022 /** Milliseconds the average shard spent reading input. */ | 2126 |
| 2127 /// Milliseconds the average shard spent reading input. |
| 2023 core.String readMsAvg; | 2128 core.String readMsAvg; |
| 2024 /** Milliseconds the slowest shard spent reading input. */ | 2129 |
| 2130 /// Milliseconds the slowest shard spent reading input. |
| 2025 core.String readMsMax; | 2131 core.String readMsMax; |
| 2026 /** Relative amount of time the average shard spent reading input. */ | 2132 |
| 2133 /// Relative amount of time the average shard spent reading input. |
| 2027 core.double readRatioAvg; | 2134 core.double readRatioAvg; |
| 2028 /** Relative amount of time the slowest shard spent reading input. */ | 2135 |
| 2136 /// Relative amount of time the slowest shard spent reading input. |
| 2029 core.double readRatioMax; | 2137 core.double readRatioMax; |
| 2030 /** Number of records read into the stage. */ | 2138 |
| 2139 /// Number of records read into the stage. |
| 2031 core.String recordsRead; | 2140 core.String recordsRead; |
| 2032 /** Number of records written by the stage. */ | 2141 |
| 2142 /// Number of records written by the stage. |
| 2033 core.String recordsWritten; | 2143 core.String recordsWritten; |
| 2034 /** Total number of bytes written to shuffle. */ | 2144 |
| 2145 /// Total number of bytes written to shuffle. |
| 2035 core.String shuffleOutputBytes; | 2146 core.String shuffleOutputBytes; |
| 2036 /** Total number of bytes written to shuffle and spilled to disk. */ | 2147 |
| 2148 /// Total number of bytes written to shuffle and spilled to disk. |
| 2037 core.String shuffleOutputBytesSpilled; | 2149 core.String shuffleOutputBytesSpilled; |
| 2038 /** Current status for the stage. */ | 2150 |
| 2151 /// Current status for the stage. |
| 2039 core.String status; | 2152 core.String status; |
| 2040 /** | 2153 |
| 2041 * List of operations within the stage in dependency order (approximately | 2154 /// List of operations within the stage in dependency order (approximately |
| 2042 * chronological). | 2155 /// chronological). |
| 2043 */ | |
| 2044 core.List<ExplainQueryStep> steps; | 2156 core.List<ExplainQueryStep> steps; |
| 2045 /** Milliseconds the average shard spent waiting to be scheduled. */ | 2157 |
| 2158 /// Milliseconds the average shard spent waiting to be scheduled. |
| 2046 core.String waitMsAvg; | 2159 core.String waitMsAvg; |
| 2047 /** Milliseconds the slowest shard spent waiting to be scheduled. */ | 2160 |
| 2161 /// Milliseconds the slowest shard spent waiting to be scheduled. |
| 2048 core.String waitMsMax; | 2162 core.String waitMsMax; |
| 2049 /** | 2163 |
| 2050 * Relative amount of time the average shard spent waiting to be scheduled. | 2164 /// Relative amount of time the average shard spent waiting to be scheduled. |
| 2051 */ | |
| 2052 core.double waitRatioAvg; | 2165 core.double waitRatioAvg; |
| 2053 /** | 2166 |
| 2054 * Relative amount of time the slowest shard spent waiting to be scheduled. | 2167 /// Relative amount of time the slowest shard spent waiting to be scheduled. |
| 2055 */ | |
| 2056 core.double waitRatioMax; | 2168 core.double waitRatioMax; |
| 2057 /** Milliseconds the average shard spent on writing output. */ | 2169 |
| 2170 /// Milliseconds the average shard spent on writing output. |
| 2058 core.String writeMsAvg; | 2171 core.String writeMsAvg; |
| 2059 /** Milliseconds the slowest shard spent on writing output. */ | 2172 |
| 2173 /// Milliseconds the slowest shard spent on writing output. |
| 2060 core.String writeMsMax; | 2174 core.String writeMsMax; |
| 2061 /** Relative amount of time the average shard spent on writing output. */ | 2175 |
| 2176 /// Relative amount of time the average shard spent on writing output. |
| 2062 core.double writeRatioAvg; | 2177 core.double writeRatioAvg; |
| 2063 /** Relative amount of time the slowest shard spent on writing output. */ | 2178 |
| 2179 /// Relative amount of time the slowest shard spent on writing output. |
| 2064 core.double writeRatioMax; | 2180 core.double writeRatioMax; |
| 2065 | 2181 |
| 2066 ExplainQueryStage(); | 2182 ExplainQueryStage(); |
| 2067 | 2183 |
| 2068 ExplainQueryStage.fromJson(core.Map _json) { | 2184 ExplainQueryStage.fromJson(core.Map _json) { |
| 2069 if (_json.containsKey("computeMsAvg")) { | 2185 if (_json.containsKey("computeMsAvg")) { |
| 2070 computeMsAvg = _json["computeMsAvg"]; | 2186 computeMsAvg = _json["computeMsAvg"]; |
| 2071 } | 2187 } |
| 2072 if (_json.containsKey("computeMsMax")) { | 2188 if (_json.containsKey("computeMsMax")) { |
| 2073 computeMsMax = _json["computeMsMax"]; | 2189 computeMsMax = _json["computeMsMax"]; |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2105 if (_json.containsKey("shuffleOutputBytes")) { | 2221 if (_json.containsKey("shuffleOutputBytes")) { |
| 2106 shuffleOutputBytes = _json["shuffleOutputBytes"]; | 2222 shuffleOutputBytes = _json["shuffleOutputBytes"]; |
| 2107 } | 2223 } |
| 2108 if (_json.containsKey("shuffleOutputBytesSpilled")) { | 2224 if (_json.containsKey("shuffleOutputBytesSpilled")) { |
| 2109 shuffleOutputBytesSpilled = _json["shuffleOutputBytesSpilled"]; | 2225 shuffleOutputBytesSpilled = _json["shuffleOutputBytesSpilled"]; |
| 2110 } | 2226 } |
| 2111 if (_json.containsKey("status")) { | 2227 if (_json.containsKey("status")) { |
| 2112 status = _json["status"]; | 2228 status = _json["status"]; |
| 2113 } | 2229 } |
| 2114 if (_json.containsKey("steps")) { | 2230 if (_json.containsKey("steps")) { |
| 2115 steps = _json["steps"].map((value) => new ExplainQueryStep.fromJson(value)
).toList(); | 2231 steps = _json["steps"] |
| 2232 .map((value) => new ExplainQueryStep.fromJson(value)) |
| 2233 .toList(); |
| 2116 } | 2234 } |
| 2117 if (_json.containsKey("waitMsAvg")) { | 2235 if (_json.containsKey("waitMsAvg")) { |
| 2118 waitMsAvg = _json["waitMsAvg"]; | 2236 waitMsAvg = _json["waitMsAvg"]; |
| 2119 } | 2237 } |
| 2120 if (_json.containsKey("waitMsMax")) { | 2238 if (_json.containsKey("waitMsMax")) { |
| 2121 waitMsMax = _json["waitMsMax"]; | 2239 waitMsMax = _json["waitMsMax"]; |
| 2122 } | 2240 } |
| 2123 if (_json.containsKey("waitRatioAvg")) { | 2241 if (_json.containsKey("waitRatioAvg")) { |
| 2124 waitRatioAvg = _json["waitRatioAvg"]; | 2242 waitRatioAvg = _json["waitRatioAvg"]; |
| 2125 } | 2243 } |
| 2126 if (_json.containsKey("waitRatioMax")) { | 2244 if (_json.containsKey("waitRatioMax")) { |
| 2127 waitRatioMax = _json["waitRatioMax"]; | 2245 waitRatioMax = _json["waitRatioMax"]; |
| 2128 } | 2246 } |
| 2129 if (_json.containsKey("writeMsAvg")) { | 2247 if (_json.containsKey("writeMsAvg")) { |
| 2130 writeMsAvg = _json["writeMsAvg"]; | 2248 writeMsAvg = _json["writeMsAvg"]; |
| 2131 } | 2249 } |
| 2132 if (_json.containsKey("writeMsMax")) { | 2250 if (_json.containsKey("writeMsMax")) { |
| 2133 writeMsMax = _json["writeMsMax"]; | 2251 writeMsMax = _json["writeMsMax"]; |
| 2134 } | 2252 } |
| 2135 if (_json.containsKey("writeRatioAvg")) { | 2253 if (_json.containsKey("writeRatioAvg")) { |
| 2136 writeRatioAvg = _json["writeRatioAvg"]; | 2254 writeRatioAvg = _json["writeRatioAvg"]; |
| 2137 } | 2255 } |
| 2138 if (_json.containsKey("writeRatioMax")) { | 2256 if (_json.containsKey("writeRatioMax")) { |
| 2139 writeRatioMax = _json["writeRatioMax"]; | 2257 writeRatioMax = _json["writeRatioMax"]; |
| 2140 } | 2258 } |
| 2141 } | 2259 } |
| 2142 | 2260 |
| 2143 core.Map<core.String, core.Object> toJson() { | 2261 core.Map<core.String, core.Object> toJson() { |
| 2144 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 2262 final core.Map<core.String, core.Object> _json = |
| 2263 new core.Map<core.String, core.Object>(); |
| 2145 if (computeMsAvg != null) { | 2264 if (computeMsAvg != null) { |
| 2146 _json["computeMsAvg"] = computeMsAvg; | 2265 _json["computeMsAvg"] = computeMsAvg; |
| 2147 } | 2266 } |
| 2148 if (computeMsMax != null) { | 2267 if (computeMsMax != null) { |
| 2149 _json["computeMsMax"] = computeMsMax; | 2268 _json["computeMsMax"] = computeMsMax; |
| 2150 } | 2269 } |
| 2151 if (computeRatioAvg != null) { | 2270 if (computeRatioAvg != null) { |
| 2152 _json["computeRatioAvg"] = computeRatioAvg; | 2271 _json["computeRatioAvg"] = computeRatioAvg; |
| 2153 } | 2272 } |
| 2154 if (computeRatioMax != null) { | 2273 if (computeRatioMax != null) { |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2212 _json["writeRatioAvg"] = writeRatioAvg; | 2331 _json["writeRatioAvg"] = writeRatioAvg; |
| 2213 } | 2332 } |
| 2214 if (writeRatioMax != null) { | 2333 if (writeRatioMax != null) { |
| 2215 _json["writeRatioMax"] = writeRatioMax; | 2334 _json["writeRatioMax"] = writeRatioMax; |
| 2216 } | 2335 } |
| 2217 return _json; | 2336 return _json; |
| 2218 } | 2337 } |
| 2219 } | 2338 } |
| 2220 | 2339 |
| 2221 class ExplainQueryStep { | 2340 class ExplainQueryStep { |
| 2222 /** Machine-readable operation type. */ | 2341 /// Machine-readable operation type. |
| 2223 core.String kind; | 2342 core.String kind; |
| 2224 /** Human-readable stage descriptions. */ | 2343 |
| 2344 /// Human-readable stage descriptions. |
| 2225 core.List<core.String> substeps; | 2345 core.List<core.String> substeps; |
| 2226 | 2346 |
| 2227 ExplainQueryStep(); | 2347 ExplainQueryStep(); |
| 2228 | 2348 |
| 2229 ExplainQueryStep.fromJson(core.Map _json) { | 2349 ExplainQueryStep.fromJson(core.Map _json) { |
| 2230 if (_json.containsKey("kind")) { | 2350 if (_json.containsKey("kind")) { |
| 2231 kind = _json["kind"]; | 2351 kind = _json["kind"]; |
| 2232 } | 2352 } |
| 2233 if (_json.containsKey("substeps")) { | 2353 if (_json.containsKey("substeps")) { |
| 2234 substeps = _json["substeps"]; | 2354 substeps = _json["substeps"]; |
| 2235 } | 2355 } |
| 2236 } | 2356 } |
| 2237 | 2357 |
| 2238 core.Map<core.String, core.Object> toJson() { | 2358 core.Map<core.String, core.Object> toJson() { |
| 2239 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 2359 final core.Map<core.String, core.Object> _json = |
| 2360 new core.Map<core.String, core.Object>(); |
| 2240 if (kind != null) { | 2361 if (kind != null) { |
| 2241 _json["kind"] = kind; | 2362 _json["kind"] = kind; |
| 2242 } | 2363 } |
| 2243 if (substeps != null) { | 2364 if (substeps != null) { |
| 2244 _json["substeps"] = substeps; | 2365 _json["substeps"] = substeps; |
| 2245 } | 2366 } |
| 2246 return _json; | 2367 return _json; |
| 2247 } | 2368 } |
| 2248 } | 2369 } |
| 2249 | 2370 |
| 2250 class ExternalDataConfiguration { | 2371 class ExternalDataConfiguration { |
| 2251 /** | 2372 /// Try to detect schema and format options automatically. Any option |
| 2252 * Try to detect schema and format options automatically. Any option specified | 2373 /// specified explicitly will be honored. |
| 2253 * explicitly will be honored. | |
| 2254 */ | |
| 2255 core.bool autodetect; | 2374 core.bool autodetect; |
| 2256 /** [Optional] Additional options if sourceFormat is set to BIGTABLE. */ | 2375 |
| 2376 /// [Optional] Additional options if sourceFormat is set to BIGTABLE. |
| 2257 BigtableOptions bigtableOptions; | 2377 BigtableOptions bigtableOptions; |
| 2258 /** | 2378 |
| 2259 * [Optional] The compression type of the data source. Possible values include | 2379 /// [Optional] The compression type of the data source. Possible values |
| 2260 * GZIP and NONE. The default value is NONE. This setting is ignored for | 2380 /// include GZIP and NONE. The default value is NONE. This setting is ignored |
| 2261 * Google Cloud Bigtable, Google Cloud Datastore backups and Avro formats. | 2381 /// for Google Cloud Bigtable, Google Cloud Datastore backups and Avro |
| 2262 */ | 2382 /// formats. |
| 2263 core.String compression; | 2383 core.String compression; |
| 2264 /** Additional properties to set if sourceFormat is set to CSV. */ | 2384 |
| 2385 /// Additional properties to set if sourceFormat is set to CSV. |
| 2265 CsvOptions csvOptions; | 2386 CsvOptions csvOptions; |
| 2266 /** [Optional] Additional options if sourceFormat is set to GOOGLE_SHEETS. */ | 2387 |
| 2388 /// [Optional] Additional options if sourceFormat is set to GOOGLE_SHEETS. |
| 2267 GoogleSheetsOptions googleSheetsOptions; | 2389 GoogleSheetsOptions googleSheetsOptions; |
| 2268 /** | 2390 |
| 2269 * [Optional] Indicates if BigQuery should allow extra values that are not | 2391 /// [Optional] Indicates if BigQuery should allow extra values that are not |
| 2270 * represented in the table schema. If true, the extra values are ignored. If | 2392 /// represented in the table schema. If true, the extra values are ignored. |
| 2271 * false, records with extra columns are treated as bad records, and if there | 2393 /// If false, records with extra columns are treated as bad records, and if |
| 2272 * are too many bad records, an invalid error is returned in the job result. | 2394 /// there are too many bad records, an invalid error is returned in the job |
| 2273 * The default value is false. The sourceFormat property determines what | 2395 /// result. The default value is false. The sourceFormat property determines |
| 2274 * BigQuery treats as an extra value: CSV: Trailing columns JSON: Named values | 2396 /// what BigQuery treats as an extra value: CSV: Trailing columns JSON: Named |
| 2275 * that don't match any column names Google Cloud Bigtable: This setting is | 2397 /// values that don't match any column names Google Cloud Bigtable: This |
| 2276 * ignored. Google Cloud Datastore backups: This setting is ignored. Avro: | 2398 /// setting is ignored. Google Cloud Datastore backups: This setting is |
| 2277 * This setting is ignored. | 2399 /// ignored. Avro: This setting is ignored. |
| 2278 */ | |
| 2279 core.bool ignoreUnknownValues; | 2400 core.bool ignoreUnknownValues; |
| 2280 /** | 2401 |
| 2281 * [Optional] The maximum number of bad records that BigQuery can ignore when | 2402 /// [Optional] The maximum number of bad records that BigQuery can ignore |
| 2282 * reading data. If the number of bad records exceeds this value, an invalid | 2403 /// when reading data. If the number of bad records exceeds this value, an |
| 2283 * error is returned in the job result. The default value is 0, which requires | 2404 /// invalid error is returned in the job result. The default value is 0, |
| 2284 * that all records are valid. This setting is ignored for Google Cloud | 2405 /// which requires that all records are valid. This setting is ignored for |
| 2285 * Bigtable, Google Cloud Datastore backups and Avro formats. | 2406 /// Google Cloud Bigtable, Google Cloud Datastore backups and Avro formats. |
| 2286 */ | |
| 2287 core.int maxBadRecords; | 2407 core.int maxBadRecords; |
| 2288 /** | 2408 |
| 2289 * [Optional] The schema for the data. Schema is required for CSV and JSON | 2409 /// [Optional] The schema for the data. Schema is required for CSV and JSON |
| 2290 * formats. Schema is disallowed for Google Cloud Bigtable, Cloud Datastore | 2410 /// formats. Schema is disallowed for Google Cloud Bigtable, Cloud Datastore |
| 2291 * backups, and Avro formats. | 2411 /// backups, and Avro formats. |
| 2292 */ | |
| 2293 TableSchema schema; | 2412 TableSchema schema; |
| 2294 /** | 2413 |
| 2295 * [Required] The data format. For CSV files, specify "CSV". For Google | 2414 /// [Required] The data format. For CSV files, specify "CSV". For Google |
| 2296 * sheets, specify "GOOGLE_SHEETS". For newline-delimited JSON, specify | 2415 /// sheets, specify "GOOGLE_SHEETS". For newline-delimited JSON, specify |
| 2297 * "NEWLINE_DELIMITED_JSON". For Avro files, specify "AVRO". For Google Cloud | 2416 /// "NEWLINE_DELIMITED_JSON". For Avro files, specify "AVRO". For Google |
| 2298 * Datastore backups, specify "DATASTORE_BACKUP". [Beta] For Google Cloud | 2417 /// Cloud Datastore backups, specify "DATASTORE_BACKUP". [Beta] For Google |
| 2299 * Bigtable, specify "BIGTABLE". | 2418 /// Cloud Bigtable, specify "BIGTABLE". |
| 2300 */ | |
| 2301 core.String sourceFormat; | 2419 core.String sourceFormat; |
| 2302 /** | 2420 |
| 2303 * [Required] The fully-qualified URIs that point to your data in Google | 2421 /// [Required] The fully-qualified URIs that point to your data in Google |
| 2304 * Cloud. For Google Cloud Storage URIs: Each URI can contain one '*' wildcard | 2422 /// Cloud. For Google Cloud Storage URIs: Each URI can contain one '*' |
| 2305 * character and it must come after the 'bucket' name. Size limits related to | 2423 /// wildcard character and it must come after the 'bucket' name. Size limits |
| 2306 * load jobs apply to external data sources. For Google Cloud Bigtable URIs: | 2424 /// related to load jobs apply to external data sources. For Google Cloud |
| 2307 * Exactly one URI can be specified and it has be a fully specified and valid | 2425 /// Bigtable URIs: Exactly one URI can be specified and it has be a fully |
| 2308 * HTTPS URL for a Google Cloud Bigtable table. For Google Cloud Datastore | 2426 /// specified and valid HTTPS URL for a Google Cloud Bigtable table. For |
| 2309 * backups, exactly one URI can be specified. Also, the '*' wildcard character | 2427 /// Google Cloud Datastore backups, exactly one URI can be specified. Also, |
| 2310 * is not allowed. | 2428 /// the '*' wildcard character is not allowed. |
| 2311 */ | |
| 2312 core.List<core.String> sourceUris; | 2429 core.List<core.String> sourceUris; |
| 2313 | 2430 |
| 2314 ExternalDataConfiguration(); | 2431 ExternalDataConfiguration(); |
| 2315 | 2432 |
| 2316 ExternalDataConfiguration.fromJson(core.Map _json) { | 2433 ExternalDataConfiguration.fromJson(core.Map _json) { |
| 2317 if (_json.containsKey("autodetect")) { | 2434 if (_json.containsKey("autodetect")) { |
| 2318 autodetect = _json["autodetect"]; | 2435 autodetect = _json["autodetect"]; |
| 2319 } | 2436 } |
| 2320 if (_json.containsKey("bigtableOptions")) { | 2437 if (_json.containsKey("bigtableOptions")) { |
| 2321 bigtableOptions = new BigtableOptions.fromJson(_json["bigtableOptions"]); | 2438 bigtableOptions = new BigtableOptions.fromJson(_json["bigtableOptions"]); |
| 2322 } | 2439 } |
| 2323 if (_json.containsKey("compression")) { | 2440 if (_json.containsKey("compression")) { |
| 2324 compression = _json["compression"]; | 2441 compression = _json["compression"]; |
| 2325 } | 2442 } |
| 2326 if (_json.containsKey("csvOptions")) { | 2443 if (_json.containsKey("csvOptions")) { |
| 2327 csvOptions = new CsvOptions.fromJson(_json["csvOptions"]); | 2444 csvOptions = new CsvOptions.fromJson(_json["csvOptions"]); |
| 2328 } | 2445 } |
| 2329 if (_json.containsKey("googleSheetsOptions")) { | 2446 if (_json.containsKey("googleSheetsOptions")) { |
| 2330 googleSheetsOptions = new GoogleSheetsOptions.fromJson(_json["googleSheets
Options"]); | 2447 googleSheetsOptions = |
| 2448 new GoogleSheetsOptions.fromJson(_json["googleSheetsOptions"]); |
| 2331 } | 2449 } |
| 2332 if (_json.containsKey("ignoreUnknownValues")) { | 2450 if (_json.containsKey("ignoreUnknownValues")) { |
| 2333 ignoreUnknownValues = _json["ignoreUnknownValues"]; | 2451 ignoreUnknownValues = _json["ignoreUnknownValues"]; |
| 2334 } | 2452 } |
| 2335 if (_json.containsKey("maxBadRecords")) { | 2453 if (_json.containsKey("maxBadRecords")) { |
| 2336 maxBadRecords = _json["maxBadRecords"]; | 2454 maxBadRecords = _json["maxBadRecords"]; |
| 2337 } | 2455 } |
| 2338 if (_json.containsKey("schema")) { | 2456 if (_json.containsKey("schema")) { |
| 2339 schema = new TableSchema.fromJson(_json["schema"]); | 2457 schema = new TableSchema.fromJson(_json["schema"]); |
| 2340 } | 2458 } |
| 2341 if (_json.containsKey("sourceFormat")) { | 2459 if (_json.containsKey("sourceFormat")) { |
| 2342 sourceFormat = _json["sourceFormat"]; | 2460 sourceFormat = _json["sourceFormat"]; |
| 2343 } | 2461 } |
| 2344 if (_json.containsKey("sourceUris")) { | 2462 if (_json.containsKey("sourceUris")) { |
| 2345 sourceUris = _json["sourceUris"]; | 2463 sourceUris = _json["sourceUris"]; |
| 2346 } | 2464 } |
| 2347 } | 2465 } |
| 2348 | 2466 |
| 2349 core.Map<core.String, core.Object> toJson() { | 2467 core.Map<core.String, core.Object> toJson() { |
| 2350 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 2468 final core.Map<core.String, core.Object> _json = |
| 2469 new core.Map<core.String, core.Object>(); |
| 2351 if (autodetect != null) { | 2470 if (autodetect != null) { |
| 2352 _json["autodetect"] = autodetect; | 2471 _json["autodetect"] = autodetect; |
| 2353 } | 2472 } |
| 2354 if (bigtableOptions != null) { | 2473 if (bigtableOptions != null) { |
| 2355 _json["bigtableOptions"] = (bigtableOptions).toJson(); | 2474 _json["bigtableOptions"] = (bigtableOptions).toJson(); |
| 2356 } | 2475 } |
| 2357 if (compression != null) { | 2476 if (compression != null) { |
| 2358 _json["compression"] = compression; | 2477 _json["compression"] = compression; |
| 2359 } | 2478 } |
| 2360 if (csvOptions != null) { | 2479 if (csvOptions != null) { |
| (...skipping 15 matching lines...) Expand all Loading... |
| 2376 _json["sourceFormat"] = sourceFormat; | 2495 _json["sourceFormat"] = sourceFormat; |
| 2377 } | 2496 } |
| 2378 if (sourceUris != null) { | 2497 if (sourceUris != null) { |
| 2379 _json["sourceUris"] = sourceUris; | 2498 _json["sourceUris"] = sourceUris; |
| 2380 } | 2499 } |
| 2381 return _json; | 2500 return _json; |
| 2382 } | 2501 } |
| 2383 } | 2502 } |
| 2384 | 2503 |
| 2385 class GetQueryResultsResponse { | 2504 class GetQueryResultsResponse { |
| 2386 /** Whether the query result was fetched from the query cache. */ | 2505 /// Whether the query result was fetched from the query cache. |
| 2387 core.bool cacheHit; | 2506 core.bool cacheHit; |
| 2388 /** | 2507 |
| 2389 * [Output-only] The first errors or warnings encountered during the running | 2508 /// [Output-only] The first errors or warnings encountered during the running |
| 2390 * of the job. The final message includes the number of errors that caused the | 2509 /// of the job. The final message includes the number of errors that caused |
| 2391 * process to stop. Errors here do not necessarily mean that the job has | 2510 /// the process to stop. Errors here do not necessarily mean that the job has |
| 2392 * completed or was unsuccessful. | 2511 /// completed or was unsuccessful. |
| 2393 */ | |
| 2394 core.List<ErrorProto> errors; | 2512 core.List<ErrorProto> errors; |
| 2395 /** A hash of this response. */ | 2513 |
| 2514 /// A hash of this response. |
| 2396 core.String etag; | 2515 core.String etag; |
| 2397 /** | 2516 |
| 2398 * Whether the query has completed or not. If rows or totalRows are present, | 2517 /// Whether the query has completed or not. If rows or totalRows are present, |
| 2399 * this will always be true. If this is false, totalRows will not be | 2518 /// this will always be true. If this is false, totalRows will not be |
| 2400 * available. | 2519 /// available. |
| 2401 */ | |
| 2402 core.bool jobComplete; | 2520 core.bool jobComplete; |
| 2403 /** | 2521 |
| 2404 * Reference to the BigQuery Job that was created to run the query. This field | 2522 /// Reference to the BigQuery Job that was created to run the query. This |
| 2405 * will be present even if the original request timed out, in which case | 2523 /// field will be present even if the original request timed out, in which |
| 2406 * GetQueryResults can be used to read the results once the query has | 2524 /// case GetQueryResults can be used to read the results once the query has |
| 2407 * completed. Since this API only returns the first page of results, | 2525 /// completed. Since this API only returns the first page of results, |
| 2408 * subsequent pages can be fetched via the same mechanism (GetQueryResults). | 2526 /// subsequent pages can be fetched via the same mechanism (GetQueryResults). |
| 2409 */ | |
| 2410 JobReference jobReference; | 2527 JobReference jobReference; |
| 2411 /** The resource type of the response. */ | 2528 |
| 2529 /// The resource type of the response. |
| 2412 core.String kind; | 2530 core.String kind; |
| 2413 /** | 2531 |
| 2414 * [Output-only] The number of rows affected by a DML statement. Present only | 2532 /// [Output-only] The number of rows affected by a DML statement. Present |
| 2415 * for DML statements INSERT, UPDATE or DELETE. | 2533 /// only for DML statements INSERT, UPDATE or DELETE. |
| 2416 */ | |
| 2417 core.String numDmlAffectedRows; | 2534 core.String numDmlAffectedRows; |
| 2418 /** A token used for paging results. */ | 2535 |
| 2536 /// A token used for paging results. |
| 2419 core.String pageToken; | 2537 core.String pageToken; |
| 2420 /** | 2538 |
| 2421 * An object with as many results as can be contained within the maximum | 2539 /// An object with as many results as can be contained within the maximum |
| 2422 * permitted reply size. To get any additional rows, you can call | 2540 /// permitted reply size. To get any additional rows, you can call |
| 2423 * GetQueryResults and specify the jobReference returned above. Present only | 2541 /// GetQueryResults and specify the jobReference returned above. Present only |
| 2424 * when the query completes successfully. | 2542 /// when the query completes successfully. |
| 2425 */ | |
| 2426 core.List<TableRow> rows; | 2543 core.List<TableRow> rows; |
| 2427 /** | 2544 |
| 2428 * The schema of the results. Present only when the query completes | 2545 /// The schema of the results. Present only when the query completes |
| 2429 * successfully. | 2546 /// successfully. |
| 2430 */ | |
| 2431 TableSchema schema; | 2547 TableSchema schema; |
| 2432 /** The total number of bytes processed for this query. */ | 2548 |
| 2549 /// The total number of bytes processed for this query. |
| 2433 core.String totalBytesProcessed; | 2550 core.String totalBytesProcessed; |
| 2434 /** | 2551 |
| 2435 * The total number of rows in the complete query result set, which can be | 2552 /// The total number of rows in the complete query result set, which can be |
| 2436 * more than the number of rows in this single page of results. Present only | 2553 /// more than the number of rows in this single page of results. Present only |
| 2437 * when the query completes successfully. | 2554 /// when the query completes successfully. |
| 2438 */ | |
| 2439 core.String totalRows; | 2555 core.String totalRows; |
| 2440 | 2556 |
| 2441 GetQueryResultsResponse(); | 2557 GetQueryResultsResponse(); |
| 2442 | 2558 |
| 2443 GetQueryResultsResponse.fromJson(core.Map _json) { | 2559 GetQueryResultsResponse.fromJson(core.Map _json) { |
| 2444 if (_json.containsKey("cacheHit")) { | 2560 if (_json.containsKey("cacheHit")) { |
| 2445 cacheHit = _json["cacheHit"]; | 2561 cacheHit = _json["cacheHit"]; |
| 2446 } | 2562 } |
| 2447 if (_json.containsKey("errors")) { | 2563 if (_json.containsKey("errors")) { |
| 2448 errors = _json["errors"].map((value) => new ErrorProto.fromJson(value)).to
List(); | 2564 errors = _json["errors"] |
| 2565 .map((value) => new ErrorProto.fromJson(value)) |
| 2566 .toList(); |
| 2449 } | 2567 } |
| 2450 if (_json.containsKey("etag")) { | 2568 if (_json.containsKey("etag")) { |
| 2451 etag = _json["etag"]; | 2569 etag = _json["etag"]; |
| 2452 } | 2570 } |
| 2453 if (_json.containsKey("jobComplete")) { | 2571 if (_json.containsKey("jobComplete")) { |
| 2454 jobComplete = _json["jobComplete"]; | 2572 jobComplete = _json["jobComplete"]; |
| 2455 } | 2573 } |
| 2456 if (_json.containsKey("jobReference")) { | 2574 if (_json.containsKey("jobReference")) { |
| 2457 jobReference = new JobReference.fromJson(_json["jobReference"]); | 2575 jobReference = new JobReference.fromJson(_json["jobReference"]); |
| 2458 } | 2576 } |
| 2459 if (_json.containsKey("kind")) { | 2577 if (_json.containsKey("kind")) { |
| 2460 kind = _json["kind"]; | 2578 kind = _json["kind"]; |
| 2461 } | 2579 } |
| 2462 if (_json.containsKey("numDmlAffectedRows")) { | 2580 if (_json.containsKey("numDmlAffectedRows")) { |
| 2463 numDmlAffectedRows = _json["numDmlAffectedRows"]; | 2581 numDmlAffectedRows = _json["numDmlAffectedRows"]; |
| 2464 } | 2582 } |
| 2465 if (_json.containsKey("pageToken")) { | 2583 if (_json.containsKey("pageToken")) { |
| 2466 pageToken = _json["pageToken"]; | 2584 pageToken = _json["pageToken"]; |
| 2467 } | 2585 } |
| 2468 if (_json.containsKey("rows")) { | 2586 if (_json.containsKey("rows")) { |
| 2469 rows = _json["rows"].map((value) => new TableRow.fromJson(value)).toList()
; | 2587 rows = |
| 2588 _json["rows"].map((value) => new TableRow.fromJson(value)).toList(); |
| 2470 } | 2589 } |
| 2471 if (_json.containsKey("schema")) { | 2590 if (_json.containsKey("schema")) { |
| 2472 schema = new TableSchema.fromJson(_json["schema"]); | 2591 schema = new TableSchema.fromJson(_json["schema"]); |
| 2473 } | 2592 } |
| 2474 if (_json.containsKey("totalBytesProcessed")) { | 2593 if (_json.containsKey("totalBytesProcessed")) { |
| 2475 totalBytesProcessed = _json["totalBytesProcessed"]; | 2594 totalBytesProcessed = _json["totalBytesProcessed"]; |
| 2476 } | 2595 } |
| 2477 if (_json.containsKey("totalRows")) { | 2596 if (_json.containsKey("totalRows")) { |
| 2478 totalRows = _json["totalRows"]; | 2597 totalRows = _json["totalRows"]; |
| 2479 } | 2598 } |
| 2480 } | 2599 } |
| 2481 | 2600 |
| 2482 core.Map<core.String, core.Object> toJson() { | 2601 core.Map<core.String, core.Object> toJson() { |
| 2483 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 2602 final core.Map<core.String, core.Object> _json = |
| 2603 new core.Map<core.String, core.Object>(); |
| 2484 if (cacheHit != null) { | 2604 if (cacheHit != null) { |
| 2485 _json["cacheHit"] = cacheHit; | 2605 _json["cacheHit"] = cacheHit; |
| 2486 } | 2606 } |
| 2487 if (errors != null) { | 2607 if (errors != null) { |
| 2488 _json["errors"] = errors.map((value) => (value).toJson()).toList(); | 2608 _json["errors"] = errors.map((value) => (value).toJson()).toList(); |
| 2489 } | 2609 } |
| 2490 if (etag != null) { | 2610 if (etag != null) { |
| 2491 _json["etag"] = etag; | 2611 _json["etag"] = etag; |
| 2492 } | 2612 } |
| 2493 if (jobComplete != null) { | 2613 if (jobComplete != null) { |
| (...skipping 20 matching lines...) Expand all Loading... |
| 2514 if (totalBytesProcessed != null) { | 2634 if (totalBytesProcessed != null) { |
| 2515 _json["totalBytesProcessed"] = totalBytesProcessed; | 2635 _json["totalBytesProcessed"] = totalBytesProcessed; |
| 2516 } | 2636 } |
| 2517 if (totalRows != null) { | 2637 if (totalRows != null) { |
| 2518 _json["totalRows"] = totalRows; | 2638 _json["totalRows"] = totalRows; |
| 2519 } | 2639 } |
| 2520 return _json; | 2640 return _json; |
| 2521 } | 2641 } |
| 2522 } | 2642 } |
| 2523 | 2643 |
| 2644 class GetServiceAccountResponse { |
| 2645 /// The service account email address. |
| 2646 core.String email; |
| 2647 |
| 2648 /// The resource type of the response. |
| 2649 core.String kind; |
| 2650 |
| 2651 GetServiceAccountResponse(); |
| 2652 |
| 2653 GetServiceAccountResponse.fromJson(core.Map _json) { |
| 2654 if (_json.containsKey("email")) { |
| 2655 email = _json["email"]; |
| 2656 } |
| 2657 if (_json.containsKey("kind")) { |
| 2658 kind = _json["kind"]; |
| 2659 } |
| 2660 } |
| 2661 |
| 2662 core.Map<core.String, core.Object> toJson() { |
| 2663 final core.Map<core.String, core.Object> _json = |
| 2664 new core.Map<core.String, core.Object>(); |
| 2665 if (email != null) { |
| 2666 _json["email"] = email; |
| 2667 } |
| 2668 if (kind != null) { |
| 2669 _json["kind"] = kind; |
| 2670 } |
| 2671 return _json; |
| 2672 } |
| 2673 } |
| 2674 |
| 2524 class GoogleSheetsOptions { | 2675 class GoogleSheetsOptions { |
| 2525 /** | 2676 /// [Optional] The number of rows at the top of a sheet that BigQuery will |
| 2526 * [Optional] The number of rows at the top of a sheet that BigQuery will skip | 2677 /// skip when reading the data. The default value is 0. This property is |
| 2527 * when reading the data. The default value is 0. This property is useful if | 2678 /// useful if you have header rows that should be skipped. When autodetect is |
| 2528 * you have header rows that should be skipped. When autodetect is on, | 2679 /// on, behavior is the following: * skipLeadingRows unspecified - Autodetect |
| 2529 * behavior is the following: * skipLeadingRows unspecified - Autodetect tries | 2680 /// tries to detect headers in the first row. If they are not detected, the |
| 2530 * to detect headers in the first row. If they are not detected, the row is | 2681 /// row is read as data. Otherwise data is read starting from the second row. |
| 2531 * read as data. Otherwise data is read starting from the second row. * | 2682 /// * skipLeadingRows is 0 - Instructs autodetect that there are no headers |
| 2532 * skipLeadingRows is 0 - Instructs autodetect that there are no headers and | 2683 /// and data should be read starting from the first row. * skipLeadingRows = |
| 2533 * data should be read starting from the first row. * skipLeadingRows = N > 0 | 2684 /// N > 0 - Autodetect skips N-1 rows and tries to detect headers in row N. |
| 2534 * - Autodetect skips N-1 rows and tries to detect headers in row N. If | 2685 /// If headers are not detected, row N is just skipped. Otherwise row N is |
| 2535 * headers are not detected, row N is just skipped. Otherwise row N is used to | 2686 /// used to extract column names for the detected schema. |
| 2536 * extract column names for the detected schema. | |
| 2537 */ | |
| 2538 core.String skipLeadingRows; | 2687 core.String skipLeadingRows; |
| 2539 | 2688 |
| 2540 GoogleSheetsOptions(); | 2689 GoogleSheetsOptions(); |
| 2541 | 2690 |
| 2542 GoogleSheetsOptions.fromJson(core.Map _json) { | 2691 GoogleSheetsOptions.fromJson(core.Map _json) { |
| 2543 if (_json.containsKey("skipLeadingRows")) { | 2692 if (_json.containsKey("skipLeadingRows")) { |
| 2544 skipLeadingRows = _json["skipLeadingRows"]; | 2693 skipLeadingRows = _json["skipLeadingRows"]; |
| 2545 } | 2694 } |
| 2546 } | 2695 } |
| 2547 | 2696 |
| 2548 core.Map<core.String, core.Object> toJson() { | 2697 core.Map<core.String, core.Object> toJson() { |
| 2549 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 2698 final core.Map<core.String, core.Object> _json = |
| 2699 new core.Map<core.String, core.Object>(); |
| 2550 if (skipLeadingRows != null) { | 2700 if (skipLeadingRows != null) { |
| 2551 _json["skipLeadingRows"] = skipLeadingRows; | 2701 _json["skipLeadingRows"] = skipLeadingRows; |
| 2552 } | 2702 } |
| 2553 return _json; | 2703 return _json; |
| 2554 } | 2704 } |
| 2555 } | 2705 } |
| 2556 | 2706 |
| 2557 class Job { | 2707 class Job { |
| 2558 /** [Required] Describes the job configuration. */ | 2708 /// [Required] Describes the job configuration. |
| 2559 JobConfiguration configuration; | 2709 JobConfiguration configuration; |
| 2560 /** [Output-only] A hash of this resource. */ | 2710 |
| 2711 /// [Output-only] A hash of this resource. |
| 2561 core.String etag; | 2712 core.String etag; |
| 2562 /** [Output-only] Opaque ID field of the job */ | 2713 |
| 2714 /// [Output-only] Opaque ID field of the job |
| 2563 core.String id; | 2715 core.String id; |
| 2564 /** [Optional] Reference describing the unique-per-user name of the job. */ | 2716 |
| 2717 /// [Optional] Reference describing the unique-per-user name of the job. |
| 2565 JobReference jobReference; | 2718 JobReference jobReference; |
| 2566 /** [Output-only] The type of the resource. */ | 2719 |
| 2720 /// [Output-only] The type of the resource. |
| 2567 core.String kind; | 2721 core.String kind; |
| 2568 /** [Output-only] A URL that can be used to access this resource again. */ | 2722 |
| 2723 /// [Output-only] A URL that can be used to access this resource again. |
| 2569 core.String selfLink; | 2724 core.String selfLink; |
| 2570 /** | 2725 |
| 2571 * [Output-only] Information about the job, including starting time and ending | 2726 /// [Output-only] Information about the job, including starting time and |
| 2572 * time of the job. | 2727 /// ending time of the job. |
| 2573 */ | |
| 2574 JobStatistics statistics; | 2728 JobStatistics statistics; |
| 2575 /** | 2729 |
| 2576 * [Output-only] The status of this job. Examine this value when polling an | 2730 /// [Output-only] The status of this job. Examine this value when polling an |
| 2577 * asynchronous job to see if the job is complete. | 2731 /// asynchronous job to see if the job is complete. |
| 2578 */ | |
| 2579 JobStatus status; | 2732 JobStatus status; |
| 2580 /** [Output-only] Email address of the user who ran the job. */ | 2733 |
| 2734 /// [Output-only] Email address of the user who ran the job. |
| 2581 core.String userEmail; | 2735 core.String userEmail; |
| 2582 | 2736 |
| 2583 Job(); | 2737 Job(); |
| 2584 | 2738 |
| 2585 Job.fromJson(core.Map _json) { | 2739 Job.fromJson(core.Map _json) { |
| 2586 if (_json.containsKey("configuration")) { | 2740 if (_json.containsKey("configuration")) { |
| 2587 configuration = new JobConfiguration.fromJson(_json["configuration"]); | 2741 configuration = new JobConfiguration.fromJson(_json["configuration"]); |
| 2588 } | 2742 } |
| 2589 if (_json.containsKey("etag")) { | 2743 if (_json.containsKey("etag")) { |
| 2590 etag = _json["etag"]; | 2744 etag = _json["etag"]; |
| (...skipping 15 matching lines...) Expand all Loading... |
| 2606 } | 2760 } |
| 2607 if (_json.containsKey("status")) { | 2761 if (_json.containsKey("status")) { |
| 2608 status = new JobStatus.fromJson(_json["status"]); | 2762 status = new JobStatus.fromJson(_json["status"]); |
| 2609 } | 2763 } |
| 2610 if (_json.containsKey("user_email")) { | 2764 if (_json.containsKey("user_email")) { |
| 2611 userEmail = _json["user_email"]; | 2765 userEmail = _json["user_email"]; |
| 2612 } | 2766 } |
| 2613 } | 2767 } |
| 2614 | 2768 |
| 2615 core.Map<core.String, core.Object> toJson() { | 2769 core.Map<core.String, core.Object> toJson() { |
| 2616 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 2770 final core.Map<core.String, core.Object> _json = |
| 2771 new core.Map<core.String, core.Object>(); |
| 2617 if (configuration != null) { | 2772 if (configuration != null) { |
| 2618 _json["configuration"] = (configuration).toJson(); | 2773 _json["configuration"] = (configuration).toJson(); |
| 2619 } | 2774 } |
| 2620 if (etag != null) { | 2775 if (etag != null) { |
| 2621 _json["etag"] = etag; | 2776 _json["etag"] = etag; |
| 2622 } | 2777 } |
| 2623 if (id != null) { | 2778 if (id != null) { |
| 2624 _json["id"] = id; | 2779 _json["id"] = id; |
| 2625 } | 2780 } |
| 2626 if (jobReference != null) { | 2781 if (jobReference != null) { |
| (...skipping 12 matching lines...) Expand all Loading... |
| 2639 _json["status"] = (status).toJson(); | 2794 _json["status"] = (status).toJson(); |
| 2640 } | 2795 } |
| 2641 if (userEmail != null) { | 2796 if (userEmail != null) { |
| 2642 _json["user_email"] = userEmail; | 2797 _json["user_email"] = userEmail; |
| 2643 } | 2798 } |
| 2644 return _json; | 2799 return _json; |
| 2645 } | 2800 } |
| 2646 } | 2801 } |
| 2647 | 2802 |
| 2648 class JobCancelResponse { | 2803 class JobCancelResponse { |
| 2649 /** The final state of the job. */ | 2804 /// The final state of the job. |
| 2650 Job job; | 2805 Job job; |
| 2651 /** The resource type of the response. */ | 2806 |
| 2807 /// The resource type of the response. |
| 2652 core.String kind; | 2808 core.String kind; |
| 2653 | 2809 |
| 2654 JobCancelResponse(); | 2810 JobCancelResponse(); |
| 2655 | 2811 |
| 2656 JobCancelResponse.fromJson(core.Map _json) { | 2812 JobCancelResponse.fromJson(core.Map _json) { |
| 2657 if (_json.containsKey("job")) { | 2813 if (_json.containsKey("job")) { |
| 2658 job = new Job.fromJson(_json["job"]); | 2814 job = new Job.fromJson(_json["job"]); |
| 2659 } | 2815 } |
| 2660 if (_json.containsKey("kind")) { | 2816 if (_json.containsKey("kind")) { |
| 2661 kind = _json["kind"]; | 2817 kind = _json["kind"]; |
| 2662 } | 2818 } |
| 2663 } | 2819 } |
| 2664 | 2820 |
| 2665 core.Map<core.String, core.Object> toJson() { | 2821 core.Map<core.String, core.Object> toJson() { |
| 2666 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 2822 final core.Map<core.String, core.Object> _json = |
| 2823 new core.Map<core.String, core.Object>(); |
| 2667 if (job != null) { | 2824 if (job != null) { |
| 2668 _json["job"] = (job).toJson(); | 2825 _json["job"] = (job).toJson(); |
| 2669 } | 2826 } |
| 2670 if (kind != null) { | 2827 if (kind != null) { |
| 2671 _json["kind"] = kind; | 2828 _json["kind"] = kind; |
| 2672 } | 2829 } |
| 2673 return _json; | 2830 return _json; |
| 2674 } | 2831 } |
| 2675 } | 2832 } |
| 2676 | 2833 |
| 2677 class JobConfiguration { | 2834 class JobConfiguration { |
| 2678 /** [Pick one] Copies a table. */ | 2835 /// [Pick one] Copies a table. |
| 2679 JobConfigurationTableCopy copy; | 2836 JobConfigurationTableCopy copy; |
| 2680 /** | 2837 |
| 2681 * [Optional] If set, don't actually run this job. A valid query will return a | 2838 /// [Optional] If set, don't actually run this job. A valid query will return |
| 2682 * mostly empty response with some processing statistics, while an invalid | 2839 /// a mostly empty response with some processing statistics, while an invalid |
| 2683 * query will return the same error it would if it wasn't a dry run. Behavior | 2840 /// query will return the same error it would if it wasn't a dry run. |
| 2684 * of non-query jobs is undefined. | 2841 /// Behavior of non-query jobs is undefined. |
| 2685 */ | |
| 2686 core.bool dryRun; | 2842 core.bool dryRun; |
| 2687 /** [Pick one] Configures an extract job. */ | 2843 |
| 2844 /// [Pick one] Configures an extract job. |
| 2688 JobConfigurationExtract extract; | 2845 JobConfigurationExtract extract; |
| 2689 /** | 2846 |
| 2690 * [Experimental] The labels associated with this job. You can use these to | 2847 /// [Experimental] The labels associated with this job. You can use these to |
| 2691 * organize and group your jobs. Label keys and values can be no longer than | 2848 /// organize and group your jobs. Label keys and values can be no longer than |
| 2692 * 63 characters, can only contain lowercase letters, numeric characters, | 2849 /// 63 characters, can only contain lowercase letters, numeric characters, |
| 2693 * underscores and dashes. International characters are allowed. Label values | 2850 /// underscores and dashes. International characters are allowed. Label |
| 2694 * are optional. Label keys must start with a letter and each label in the | 2851 /// values are optional. Label keys must start with a letter and each label |
| 2695 * list must have a different key. | 2852 /// in the list must have a different key. |
| 2696 */ | |
| 2697 core.Map<core.String, core.String> labels; | 2853 core.Map<core.String, core.String> labels; |
| 2698 /** [Pick one] Configures a load job. */ | 2854 |
| 2855 /// [Pick one] Configures a load job. |
| 2699 JobConfigurationLoad load; | 2856 JobConfigurationLoad load; |
| 2700 /** [Pick one] Configures a query job. */ | 2857 |
| 2858 /// [Pick one] Configures a query job. |
| 2701 JobConfigurationQuery query; | 2859 JobConfigurationQuery query; |
| 2702 | 2860 |
| 2703 JobConfiguration(); | 2861 JobConfiguration(); |
| 2704 | 2862 |
| 2705 JobConfiguration.fromJson(core.Map _json) { | 2863 JobConfiguration.fromJson(core.Map _json) { |
| 2706 if (_json.containsKey("copy")) { | 2864 if (_json.containsKey("copy")) { |
| 2707 copy = new JobConfigurationTableCopy.fromJson(_json["copy"]); | 2865 copy = new JobConfigurationTableCopy.fromJson(_json["copy"]); |
| 2708 } | 2866 } |
| 2709 if (_json.containsKey("dryRun")) { | 2867 if (_json.containsKey("dryRun")) { |
| 2710 dryRun = _json["dryRun"]; | 2868 dryRun = _json["dryRun"]; |
| 2711 } | 2869 } |
| 2712 if (_json.containsKey("extract")) { | 2870 if (_json.containsKey("extract")) { |
| 2713 extract = new JobConfigurationExtract.fromJson(_json["extract"]); | 2871 extract = new JobConfigurationExtract.fromJson(_json["extract"]); |
| 2714 } | 2872 } |
| 2715 if (_json.containsKey("labels")) { | 2873 if (_json.containsKey("labels")) { |
| 2716 labels = _json["labels"]; | 2874 labels = _json["labels"]; |
| 2717 } | 2875 } |
| 2718 if (_json.containsKey("load")) { | 2876 if (_json.containsKey("load")) { |
| 2719 load = new JobConfigurationLoad.fromJson(_json["load"]); | 2877 load = new JobConfigurationLoad.fromJson(_json["load"]); |
| 2720 } | 2878 } |
| 2721 if (_json.containsKey("query")) { | 2879 if (_json.containsKey("query")) { |
| 2722 query = new JobConfigurationQuery.fromJson(_json["query"]); | 2880 query = new JobConfigurationQuery.fromJson(_json["query"]); |
| 2723 } | 2881 } |
| 2724 } | 2882 } |
| 2725 | 2883 |
| 2726 core.Map<core.String, core.Object> toJson() { | 2884 core.Map<core.String, core.Object> toJson() { |
| 2727 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 2885 final core.Map<core.String, core.Object> _json = |
| 2886 new core.Map<core.String, core.Object>(); |
| 2728 if (copy != null) { | 2887 if (copy != null) { |
| 2729 _json["copy"] = (copy).toJson(); | 2888 _json["copy"] = (copy).toJson(); |
| 2730 } | 2889 } |
| 2731 if (dryRun != null) { | 2890 if (dryRun != null) { |
| 2732 _json["dryRun"] = dryRun; | 2891 _json["dryRun"] = dryRun; |
| 2733 } | 2892 } |
| 2734 if (extract != null) { | 2893 if (extract != null) { |
| 2735 _json["extract"] = (extract).toJson(); | 2894 _json["extract"] = (extract).toJson(); |
| 2736 } | 2895 } |
| 2737 if (labels != null) { | 2896 if (labels != null) { |
| 2738 _json["labels"] = labels; | 2897 _json["labels"] = labels; |
| 2739 } | 2898 } |
| 2740 if (load != null) { | 2899 if (load != null) { |
| 2741 _json["load"] = (load).toJson(); | 2900 _json["load"] = (load).toJson(); |
| 2742 } | 2901 } |
| 2743 if (query != null) { | 2902 if (query != null) { |
| 2744 _json["query"] = (query).toJson(); | 2903 _json["query"] = (query).toJson(); |
| 2745 } | 2904 } |
| 2746 return _json; | 2905 return _json; |
| 2747 } | 2906 } |
| 2748 } | 2907 } |
| 2749 | 2908 |
| 2750 class JobConfigurationExtract { | 2909 class JobConfigurationExtract { |
| 2751 /** | 2910 /// [Optional] The compression type to use for exported files. Possible |
| 2752 * [Optional] The compression type to use for exported files. Possible values | 2911 /// values include GZIP and NONE. The default value is NONE. |
| 2753 * include GZIP and NONE. The default value is NONE. | |
| 2754 */ | |
| 2755 core.String compression; | 2912 core.String compression; |
| 2756 /** | 2913 |
| 2757 * [Optional] The exported file format. Possible values include CSV, | 2914 /// [Optional] The exported file format. Possible values include CSV, |
| 2758 * NEWLINE_DELIMITED_JSON and AVRO. The default value is CSV. Tables with | 2915 /// NEWLINE_DELIMITED_JSON and AVRO. The default value is CSV. Tables with |
| 2759 * nested or repeated fields cannot be exported as CSV. | 2916 /// nested or repeated fields cannot be exported as CSV. |
| 2760 */ | |
| 2761 core.String destinationFormat; | 2917 core.String destinationFormat; |
| 2762 /** | 2918 |
| 2763 * [Pick one] DEPRECATED: Use destinationUris instead, passing only one URI as | 2919 /// [Pick one] DEPRECATED: Use destinationUris instead, passing only one URI |
| 2764 * necessary. The fully-qualified Google Cloud Storage URI where the extracted | 2920 /// as necessary. The fully-qualified Google Cloud Storage URI where the |
| 2765 * table should be written. | 2921 /// extracted table should be written. |
| 2766 */ | |
| 2767 core.String destinationUri; | 2922 core.String destinationUri; |
| 2768 /** | 2923 |
| 2769 * [Pick one] A list of fully-qualified Google Cloud Storage URIs where the | 2924 /// [Pick one] A list of fully-qualified Google Cloud Storage URIs where the |
| 2770 * extracted table should be written. | 2925 /// extracted table should be written. |
| 2771 */ | |
| 2772 core.List<core.String> destinationUris; | 2926 core.List<core.String> destinationUris; |
| 2773 /** | 2927 |
| 2774 * [Optional] Delimiter to use between fields in the exported data. Default is | 2928 /// [Optional] Delimiter to use between fields in the exported data. Default |
| 2775 * ',' | 2929 /// is ',' |
| 2776 */ | |
| 2777 core.String fieldDelimiter; | 2930 core.String fieldDelimiter; |
| 2778 /** | 2931 |
| 2779 * [Optional] Whether to print out a header row in the results. Default is | 2932 /// [Optional] Whether to print out a header row in the results. Default is |
| 2780 * true. | 2933 /// true. |
| 2781 */ | |
| 2782 core.bool printHeader; | 2934 core.bool printHeader; |
| 2783 /** [Required] A reference to the table being exported. */ | 2935 |
| 2936 /// [Required] A reference to the table being exported. |
| 2784 TableReference sourceTable; | 2937 TableReference sourceTable; |
| 2785 | 2938 |
| 2786 JobConfigurationExtract(); | 2939 JobConfigurationExtract(); |
| 2787 | 2940 |
| 2788 JobConfigurationExtract.fromJson(core.Map _json) { | 2941 JobConfigurationExtract.fromJson(core.Map _json) { |
| 2789 if (_json.containsKey("compression")) { | 2942 if (_json.containsKey("compression")) { |
| 2790 compression = _json["compression"]; | 2943 compression = _json["compression"]; |
| 2791 } | 2944 } |
| 2792 if (_json.containsKey("destinationFormat")) { | 2945 if (_json.containsKey("destinationFormat")) { |
| 2793 destinationFormat = _json["destinationFormat"]; | 2946 destinationFormat = _json["destinationFormat"]; |
| 2794 } | 2947 } |
| 2795 if (_json.containsKey("destinationUri")) { | 2948 if (_json.containsKey("destinationUri")) { |
| 2796 destinationUri = _json["destinationUri"]; | 2949 destinationUri = _json["destinationUri"]; |
| 2797 } | 2950 } |
| 2798 if (_json.containsKey("destinationUris")) { | 2951 if (_json.containsKey("destinationUris")) { |
| 2799 destinationUris = _json["destinationUris"]; | 2952 destinationUris = _json["destinationUris"]; |
| 2800 } | 2953 } |
| 2801 if (_json.containsKey("fieldDelimiter")) { | 2954 if (_json.containsKey("fieldDelimiter")) { |
| 2802 fieldDelimiter = _json["fieldDelimiter"]; | 2955 fieldDelimiter = _json["fieldDelimiter"]; |
| 2803 } | 2956 } |
| 2804 if (_json.containsKey("printHeader")) { | 2957 if (_json.containsKey("printHeader")) { |
| 2805 printHeader = _json["printHeader"]; | 2958 printHeader = _json["printHeader"]; |
| 2806 } | 2959 } |
| 2807 if (_json.containsKey("sourceTable")) { | 2960 if (_json.containsKey("sourceTable")) { |
| 2808 sourceTable = new TableReference.fromJson(_json["sourceTable"]); | 2961 sourceTable = new TableReference.fromJson(_json["sourceTable"]); |
| 2809 } | 2962 } |
| 2810 } | 2963 } |
| 2811 | 2964 |
| 2812 core.Map<core.String, core.Object> toJson() { | 2965 core.Map<core.String, core.Object> toJson() { |
| 2813 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 2966 final core.Map<core.String, core.Object> _json = |
| 2967 new core.Map<core.String, core.Object>(); |
| 2814 if (compression != null) { | 2968 if (compression != null) { |
| 2815 _json["compression"] = compression; | 2969 _json["compression"] = compression; |
| 2816 } | 2970 } |
| 2817 if (destinationFormat != null) { | 2971 if (destinationFormat != null) { |
| 2818 _json["destinationFormat"] = destinationFormat; | 2972 _json["destinationFormat"] = destinationFormat; |
| 2819 } | 2973 } |
| 2820 if (destinationUri != null) { | 2974 if (destinationUri != null) { |
| 2821 _json["destinationUri"] = destinationUri; | 2975 _json["destinationUri"] = destinationUri; |
| 2822 } | 2976 } |
| 2823 if (destinationUris != null) { | 2977 if (destinationUris != null) { |
| 2824 _json["destinationUris"] = destinationUris; | 2978 _json["destinationUris"] = destinationUris; |
| 2825 } | 2979 } |
| 2826 if (fieldDelimiter != null) { | 2980 if (fieldDelimiter != null) { |
| 2827 _json["fieldDelimiter"] = fieldDelimiter; | 2981 _json["fieldDelimiter"] = fieldDelimiter; |
| 2828 } | 2982 } |
| 2829 if (printHeader != null) { | 2983 if (printHeader != null) { |
| 2830 _json["printHeader"] = printHeader; | 2984 _json["printHeader"] = printHeader; |
| 2831 } | 2985 } |
| 2832 if (sourceTable != null) { | 2986 if (sourceTable != null) { |
| 2833 _json["sourceTable"] = (sourceTable).toJson(); | 2987 _json["sourceTable"] = (sourceTable).toJson(); |
| 2834 } | 2988 } |
| 2835 return _json; | 2989 return _json; |
| 2836 } | 2990 } |
| 2837 } | 2991 } |
| 2838 | 2992 |
| 2839 class JobConfigurationLoad { | 2993 class JobConfigurationLoad { |
| 2840 /** | 2994 /// [Optional] Accept rows that are missing trailing optional columns. The |
| 2841 * [Optional] Accept rows that are missing trailing optional columns. The | 2995 /// missing values are treated as nulls. If false, records with missing |
| 2842 * missing values are treated as nulls. If false, records with missing | 2996 /// trailing columns are treated as bad records, and if there are too many |
| 2843 * trailing columns are treated as bad records, and if there are too many bad | 2997 /// bad records, an invalid error is returned in the job result. The default |
| 2844 * records, an invalid error is returned in the job result. The default value | 2998 /// value is false. Only applicable to CSV, ignored for other formats. |
| 2845 * is false. Only applicable to CSV, ignored for other formats. | |
| 2846 */ | |
| 2847 core.bool allowJaggedRows; | 2999 core.bool allowJaggedRows; |
| 2848 /** | 3000 |
| 2849 * Indicates if BigQuery should allow quoted data sections that contain | 3001 /// Indicates if BigQuery should allow quoted data sections that contain |
| 2850 * newline characters in a CSV file. The default value is false. | 3002 /// newline characters in a CSV file. The default value is false. |
| 2851 */ | |
| 2852 core.bool allowQuotedNewlines; | 3003 core.bool allowQuotedNewlines; |
| 2853 /** | 3004 |
| 2854 * Indicates if we should automatically infer the options and schema for CSV | 3005 /// Indicates if we should automatically infer the options and schema for CSV |
| 2855 * and JSON sources. | 3006 /// and JSON sources. |
| 2856 */ | |
| 2857 core.bool autodetect; | 3007 core.bool autodetect; |
| 2858 /** | 3008 |
| 2859 * [Optional] Specifies whether the job is allowed to create new tables. The | 3009 /// [Optional] Specifies whether the job is allowed to create new tables. The |
| 2860 * following values are supported: CREATE_IF_NEEDED: If the table does not | 3010 /// following values are supported: CREATE_IF_NEEDED: If the table does not |
| 2861 * exist, BigQuery creates the table. CREATE_NEVER: The table must already | 3011 /// exist, BigQuery creates the table. CREATE_NEVER: The table must already |
| 2862 * exist. If it does not, a 'notFound' error is returned in the job result. | 3012 /// exist. If it does not, a 'notFound' error is returned in the job result. |
| 2863 * The default value is CREATE_IF_NEEDED. Creation, truncation and append | 3013 /// The default value is CREATE_IF_NEEDED. Creation, truncation and append |
| 2864 * actions occur as one atomic update upon job completion. | 3014 /// actions occur as one atomic update upon job completion. |
| 2865 */ | |
| 2866 core.String createDisposition; | 3015 core.String createDisposition; |
| 2867 /** [Required] The destination table to load the data into. */ | 3016 |
| 3017 /// [Experimental] Custom encryption configuration (e.g., Cloud KMS keys). |
| 3018 EncryptionConfiguration destinationEncryptionConfiguration; |
| 3019 |
| 3020 /// [Required] The destination table to load the data into. |
| 2868 TableReference destinationTable; | 3021 TableReference destinationTable; |
| 2869 /** | 3022 |
| 2870 * [Optional] The character encoding of the data. The supported values are | 3023 /// [Optional] The character encoding of the data. The supported values are |
| 2871 * UTF-8 or ISO-8859-1. The default value is UTF-8. BigQuery decodes the data | 3024 /// UTF-8 or ISO-8859-1. The default value is UTF-8. BigQuery decodes the |
| 2872 * after the raw, binary data has been split using the values of the quote and | 3025 /// data after the raw, binary data has been split using the values of the |
| 2873 * fieldDelimiter properties. | 3026 /// quote and fieldDelimiter properties. |
| 2874 */ | |
| 2875 core.String encoding; | 3027 core.String encoding; |
| 2876 /** | 3028 |
| 2877 * [Optional] The separator for fields in a CSV file. The separator can be any | 3029 /// [Optional] The separator for fields in a CSV file. The separator can be |
| 2878 * ISO-8859-1 single-byte character. To use a character in the range 128-255, | 3030 /// any ISO-8859-1 single-byte character. To use a character in the range |
| 2879 * you must encode the character as UTF8. BigQuery converts the string to | 3031 /// 128-255, you must encode the character as UTF8. BigQuery converts the |
| 2880 * ISO-8859-1 encoding, and then uses the first byte of the encoded string to | 3032 /// string to ISO-8859-1 encoding, and then uses the first byte of the |
| 2881 * split the data in its raw, binary state. BigQuery also supports the escape | 3033 /// encoded string to split the data in its raw, binary state. BigQuery also |
| 2882 * sequence "\t" to specify a tab separator. The default value is a comma | 3034 /// supports the escape sequence "\t" to specify a tab separator. The default |
| 2883 * (','). | 3035 /// value is a comma (','). |
| 2884 */ | |
| 2885 core.String fieldDelimiter; | 3036 core.String fieldDelimiter; |
| 2886 /** | 3037 |
| 2887 * [Optional] Indicates if BigQuery should allow extra values that are not | 3038 /// [Optional] Indicates if BigQuery should allow extra values that are not |
| 2888 * represented in the table schema. If true, the extra values are ignored. If | 3039 /// represented in the table schema. If true, the extra values are ignored. |
| 2889 * false, records with extra columns are treated as bad records, and if there | 3040 /// If false, records with extra columns are treated as bad records, and if |
| 2890 * are too many bad records, an invalid error is returned in the job result. | 3041 /// there are too many bad records, an invalid error is returned in the job |
| 2891 * The default value is false. The sourceFormat property determines what | 3042 /// result. The default value is false. The sourceFormat property determines |
| 2892 * BigQuery treats as an extra value: CSV: Trailing columns JSON: Named values | 3043 /// what BigQuery treats as an extra value: CSV: Trailing columns JSON: Named |
| 2893 * that don't match any column names | 3044 /// values that don't match any column names |
| 2894 */ | |
| 2895 core.bool ignoreUnknownValues; | 3045 core.bool ignoreUnknownValues; |
| 2896 /** | 3046 |
| 2897 * [Optional] The maximum number of bad records that BigQuery can ignore when | 3047 /// [Optional] The maximum number of bad records that BigQuery can ignore |
| 2898 * running the job. If the number of bad records exceeds this value, an | 3048 /// when running the job. If the number of bad records exceeds this value, an |
| 2899 * invalid error is returned in the job result. The default value is 0, which | 3049 /// invalid error is returned in the job result. The default value is 0, |
| 2900 * requires that all records are valid. | 3050 /// which requires that all records are valid. |
| 2901 */ | |
| 2902 core.int maxBadRecords; | 3051 core.int maxBadRecords; |
| 2903 /** | 3052 |
| 2904 * [Optional] Specifies a string that represents a null value in a CSV file. | 3053 /// [Optional] Specifies a string that represents a null value in a CSV file. |
| 2905 * For example, if you specify "\N", BigQuery interprets "\N" as a null value | 3054 /// For example, if you specify "\N", BigQuery interprets "\N" as a null |
| 2906 * when loading a CSV file. The default value is the empty string. If you set | 3055 /// value when loading a CSV file. The default value is the empty string. If |
| 2907 * this property to a custom value, BigQuery throws an error if an empty | 3056 /// you set this property to a custom value, BigQuery throws an error if an |
| 2908 * string is present for all data types except for STRING and BYTE. For STRING | 3057 /// empty string is present for all data types except for STRING and BYTE. |
| 2909 * and BYTE columns, BigQuery interprets the empty string as an empty value. | 3058 /// For STRING and BYTE columns, BigQuery interprets the empty string as an |
| 2910 */ | 3059 /// empty value. |
| 2911 core.String nullMarker; | 3060 core.String nullMarker; |
| 2912 /** | 3061 |
| 2913 * If sourceFormat is set to "DATASTORE_BACKUP", indicates which entity | 3062 /// If sourceFormat is set to "DATASTORE_BACKUP", indicates which entity |
| 2914 * properties to load into BigQuery from a Cloud Datastore backup. Property | 3063 /// properties to load into BigQuery from a Cloud Datastore backup. Property |
| 2915 * names are case sensitive and must be top-level properties. If no properties | 3064 /// names are case sensitive and must be top-level properties. If no |
| 2916 * are specified, BigQuery loads all properties. If any named property isn't | 3065 /// properties are specified, BigQuery loads all properties. If any named |
| 2917 * found in the Cloud Datastore backup, an invalid error is returned in the | 3066 /// property isn't found in the Cloud Datastore backup, an invalid error is |
| 2918 * job result. | 3067 /// returned in the job result. |
| 2919 */ | |
| 2920 core.List<core.String> projectionFields; | 3068 core.List<core.String> projectionFields; |
| 2921 /** | 3069 |
| 2922 * [Optional] The value that is used to quote data sections in a CSV file. | 3070 /// [Optional] The value that is used to quote data sections in a CSV file. |
| 2923 * BigQuery converts the string to ISO-8859-1 encoding, and then uses the | 3071 /// BigQuery converts the string to ISO-8859-1 encoding, and then uses the |
| 2924 * first byte of the encoded string to split the data in its raw, binary | 3072 /// first byte of the encoded string to split the data in its raw, binary |
| 2925 * state. The default value is a double-quote ('"'). If your data does not | 3073 /// state. The default value is a double-quote ('"'). If your data does not |
| 2926 * contain quoted sections, set the property value to an empty string. If your | 3074 /// contain quoted sections, set the property value to an empty string. If |
| 2927 * data contains quoted newline characters, you must also set the | 3075 /// your data contains quoted newline characters, you must also set the |
| 2928 * allowQuotedNewlines property to true. | 3076 /// allowQuotedNewlines property to true. |
| 2929 */ | |
| 2930 core.String quote; | 3077 core.String quote; |
| 2931 /** | 3078 |
| 2932 * [Optional] The schema for the destination table. The schema can be omitted | 3079 /// [Optional] The schema for the destination table. The schema can be |
| 2933 * if the destination table already exists, or if you're loading data from | 3080 /// omitted if the destination table already exists, or if you're loading |
| 2934 * Google Cloud Datastore. | 3081 /// data from Google Cloud Datastore. |
| 2935 */ | |
| 2936 TableSchema schema; | 3082 TableSchema schema; |
| 2937 /** | 3083 |
| 2938 * [Deprecated] The inline schema. For CSV schemas, specify as | 3084 /// [Deprecated] The inline schema. For CSV schemas, specify as |
| 2939 * "Field1:Type1[,Field2:Type2]*". For example, "foo:STRING, bar:INTEGER, | 3085 /// "Field1:Type1[,Field2:Type2]*". For example, "foo:STRING, bar:INTEGER, |
| 2940 * baz:FLOAT". | 3086 /// baz:FLOAT". |
| 2941 */ | |
| 2942 core.String schemaInline; | 3087 core.String schemaInline; |
| 2943 /** [Deprecated] The format of the schemaInline property. */ | 3088 |
| 3089 /// [Deprecated] The format of the schemaInline property. |
| 2944 core.String schemaInlineFormat; | 3090 core.String schemaInlineFormat; |
| 2945 /** | 3091 |
| 2946 * [Experimental] Allows the schema of the desitination table to be updated as | 3092 /// [Experimental] Allows the schema of the desitination table to be updated |
| 2947 * a side effect of the load job if a schema is autodetected or supplied in | 3093 /// as a side effect of the load job if a schema is autodetected or supplied |
| 2948 * the job configuration. Schema update options are supported in two cases: | 3094 /// in the job configuration. Schema update options are supported in two |
| 2949 * when writeDisposition is WRITE_APPEND; when writeDisposition is | 3095 /// cases: when writeDisposition is WRITE_APPEND; when writeDisposition is |
| 2950 * WRITE_TRUNCATE and the destination table is a partition of a table, | 3096 /// WRITE_TRUNCATE and the destination table is a partition of a table, |
| 2951 * specified by partition decorators. For normal tables, WRITE_TRUNCATE will | 3097 /// specified by partition decorators. For normal tables, WRITE_TRUNCATE will |
| 2952 * always overwrite the schema. One or more of the following values are | 3098 /// always overwrite the schema. One or more of the following values are |
| 2953 * specified: ALLOW_FIELD_ADDITION: allow adding a nullable field to the | 3099 /// specified: ALLOW_FIELD_ADDITION: allow adding a nullable field to the |
| 2954 * schema. ALLOW_FIELD_RELAXATION: allow relaxing a required field in the | 3100 /// schema. ALLOW_FIELD_RELAXATION: allow relaxing a required field in the |
| 2955 * original schema to nullable. | 3101 /// original schema to nullable. |
| 2956 */ | |
| 2957 core.List<core.String> schemaUpdateOptions; | 3102 core.List<core.String> schemaUpdateOptions; |
| 2958 /** | 3103 |
| 2959 * [Optional] The number of rows at the top of a CSV file that BigQuery will | 3104 /// [Optional] The number of rows at the top of a CSV file that BigQuery will |
| 2960 * skip when loading the data. The default value is 0. This property is useful | 3105 /// skip when loading the data. The default value is 0. This property is |
| 2961 * if you have header rows in the file that should be skipped. | 3106 /// useful if you have header rows in the file that should be skipped. |
| 2962 */ | |
| 2963 core.int skipLeadingRows; | 3107 core.int skipLeadingRows; |
| 2964 /** | 3108 |
| 2965 * [Optional] The format of the data files. For CSV files, specify "CSV". For | 3109 /// [Optional] The format of the data files. For CSV files, specify "CSV". |
| 2966 * datastore backups, specify "DATASTORE_BACKUP". For newline-delimited JSON, | 3110 /// For datastore backups, specify "DATASTORE_BACKUP". For newline-delimited |
| 2967 * specify "NEWLINE_DELIMITED_JSON". For Avro, specify "AVRO". The default | 3111 /// JSON, specify "NEWLINE_DELIMITED_JSON". For Avro, specify "AVRO". The |
| 2968 * value is CSV. | 3112 /// default value is CSV. |
| 2969 */ | |
| 2970 core.String sourceFormat; | 3113 core.String sourceFormat; |
| 2971 /** | 3114 |
| 2972 * [Required] The fully-qualified URIs that point to your data in Google | 3115 /// [Required] The fully-qualified URIs that point to your data in Google |
| 2973 * Cloud. For Google Cloud Storage URIs: Each URI can contain one '*' wildcard | 3116 /// Cloud. For Google Cloud Storage URIs: Each URI can contain one '*' |
| 2974 * character and it must come after the 'bucket' name. Size limits related to | 3117 /// wildcard character and it must come after the 'bucket' name. Size limits |
| 2975 * load jobs apply to external data sources. For Google Cloud Bigtable URIs: | 3118 /// related to load jobs apply to external data sources. For Google Cloud |
| 2976 * Exactly one URI can be specified and it has be a fully specified and valid | 3119 /// Bigtable URIs: Exactly one URI can be specified and it has be a fully |
| 2977 * HTTPS URL for a Google Cloud Bigtable table. For Google Cloud Datastore | 3120 /// specified and valid HTTPS URL for a Google Cloud Bigtable table. For |
| 2978 * backups: Exactly one URI can be specified. Also, the '*' wildcard character | 3121 /// Google Cloud Datastore backups: Exactly one URI can be specified. Also, |
| 2979 * is not allowed. | 3122 /// the '*' wildcard character is not allowed. |
| 2980 */ | |
| 2981 core.List<core.String> sourceUris; | 3123 core.List<core.String> sourceUris; |
| 2982 /** | 3124 |
| 2983 * [Experimental] If specified, configures time-based partitioning for the | 3125 /// [Experimental] If specified, configures time-based partitioning for the |
| 2984 * destination table. | 3126 /// destination table. |
| 2985 */ | |
| 2986 TimePartitioning timePartitioning; | 3127 TimePartitioning timePartitioning; |
| 2987 /** | 3128 |
| 2988 * [Optional] Specifies the action that occurs if the destination table | 3129 /// [Optional] Specifies the action that occurs if the destination table |
| 2989 * already exists. The following values are supported: WRITE_TRUNCATE: If the | 3130 /// already exists. The following values are supported: WRITE_TRUNCATE: If |
| 2990 * table already exists, BigQuery overwrites the table data. WRITE_APPEND: If | 3131 /// the table already exists, BigQuery overwrites the table data. |
| 2991 * the table already exists, BigQuery appends the data to the table. | 3132 /// WRITE_APPEND: If the table already exists, BigQuery appends the data to |
| 2992 * WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' | 3133 /// the table. WRITE_EMPTY: If the table already exists and contains data, a |
| 2993 * error is returned in the job result. The default value is WRITE_APPEND. | 3134 /// 'duplicate' error is returned in the job result. The default value is |
| 2994 * Each action is atomic and only occurs if BigQuery is able to complete the | 3135 /// WRITE_APPEND. Each action is atomic and only occurs if BigQuery is able |
| 2995 * job successfully. Creation, truncation and append actions occur as one | 3136 /// to complete the job successfully. Creation, truncation and append actions |
| 2996 * atomic update upon job completion. | 3137 /// occur as one atomic update upon job completion. |
| 2997 */ | |
| 2998 core.String writeDisposition; | 3138 core.String writeDisposition; |
| 2999 | 3139 |
| 3000 JobConfigurationLoad(); | 3140 JobConfigurationLoad(); |
| 3001 | 3141 |
| 3002 JobConfigurationLoad.fromJson(core.Map _json) { | 3142 JobConfigurationLoad.fromJson(core.Map _json) { |
| 3003 if (_json.containsKey("allowJaggedRows")) { | 3143 if (_json.containsKey("allowJaggedRows")) { |
| 3004 allowJaggedRows = _json["allowJaggedRows"]; | 3144 allowJaggedRows = _json["allowJaggedRows"]; |
| 3005 } | 3145 } |
| 3006 if (_json.containsKey("allowQuotedNewlines")) { | 3146 if (_json.containsKey("allowQuotedNewlines")) { |
| 3007 allowQuotedNewlines = _json["allowQuotedNewlines"]; | 3147 allowQuotedNewlines = _json["allowQuotedNewlines"]; |
| 3008 } | 3148 } |
| 3009 if (_json.containsKey("autodetect")) { | 3149 if (_json.containsKey("autodetect")) { |
| 3010 autodetect = _json["autodetect"]; | 3150 autodetect = _json["autodetect"]; |
| 3011 } | 3151 } |
| 3012 if (_json.containsKey("createDisposition")) { | 3152 if (_json.containsKey("createDisposition")) { |
| 3013 createDisposition = _json["createDisposition"]; | 3153 createDisposition = _json["createDisposition"]; |
| 3014 } | 3154 } |
| 3155 if (_json.containsKey("destinationEncryptionConfiguration")) { |
| 3156 destinationEncryptionConfiguration = new EncryptionConfiguration.fromJson( |
| 3157 _json["destinationEncryptionConfiguration"]); |
| 3158 } |
| 3015 if (_json.containsKey("destinationTable")) { | 3159 if (_json.containsKey("destinationTable")) { |
| 3016 destinationTable = new TableReference.fromJson(_json["destinationTable"]); | 3160 destinationTable = new TableReference.fromJson(_json["destinationTable"]); |
| 3017 } | 3161 } |
| 3018 if (_json.containsKey("encoding")) { | 3162 if (_json.containsKey("encoding")) { |
| 3019 encoding = _json["encoding"]; | 3163 encoding = _json["encoding"]; |
| 3020 } | 3164 } |
| 3021 if (_json.containsKey("fieldDelimiter")) { | 3165 if (_json.containsKey("fieldDelimiter")) { |
| 3022 fieldDelimiter = _json["fieldDelimiter"]; | 3166 fieldDelimiter = _json["fieldDelimiter"]; |
| 3023 } | 3167 } |
| 3024 if (_json.containsKey("ignoreUnknownValues")) { | 3168 if (_json.containsKey("ignoreUnknownValues")) { |
| (...skipping 26 matching lines...) Expand all Loading... |
| 3051 if (_json.containsKey("skipLeadingRows")) { | 3195 if (_json.containsKey("skipLeadingRows")) { |
| 3052 skipLeadingRows = _json["skipLeadingRows"]; | 3196 skipLeadingRows = _json["skipLeadingRows"]; |
| 3053 } | 3197 } |
| 3054 if (_json.containsKey("sourceFormat")) { | 3198 if (_json.containsKey("sourceFormat")) { |
| 3055 sourceFormat = _json["sourceFormat"]; | 3199 sourceFormat = _json["sourceFormat"]; |
| 3056 } | 3200 } |
| 3057 if (_json.containsKey("sourceUris")) { | 3201 if (_json.containsKey("sourceUris")) { |
| 3058 sourceUris = _json["sourceUris"]; | 3202 sourceUris = _json["sourceUris"]; |
| 3059 } | 3203 } |
| 3060 if (_json.containsKey("timePartitioning")) { | 3204 if (_json.containsKey("timePartitioning")) { |
| 3061 timePartitioning = new TimePartitioning.fromJson(_json["timePartitioning"]
); | 3205 timePartitioning = |
| 3206 new TimePartitioning.fromJson(_json["timePartitioning"]); |
| 3062 } | 3207 } |
| 3063 if (_json.containsKey("writeDisposition")) { | 3208 if (_json.containsKey("writeDisposition")) { |
| 3064 writeDisposition = _json["writeDisposition"]; | 3209 writeDisposition = _json["writeDisposition"]; |
| 3065 } | 3210 } |
| 3066 } | 3211 } |
| 3067 | 3212 |
| 3068 core.Map<core.String, core.Object> toJson() { | 3213 core.Map<core.String, core.Object> toJson() { |
| 3069 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 3214 final core.Map<core.String, core.Object> _json = |
| 3215 new core.Map<core.String, core.Object>(); |
| 3070 if (allowJaggedRows != null) { | 3216 if (allowJaggedRows != null) { |
| 3071 _json["allowJaggedRows"] = allowJaggedRows; | 3217 _json["allowJaggedRows"] = allowJaggedRows; |
| 3072 } | 3218 } |
| 3073 if (allowQuotedNewlines != null) { | 3219 if (allowQuotedNewlines != null) { |
| 3074 _json["allowQuotedNewlines"] = allowQuotedNewlines; | 3220 _json["allowQuotedNewlines"] = allowQuotedNewlines; |
| 3075 } | 3221 } |
| 3076 if (autodetect != null) { | 3222 if (autodetect != null) { |
| 3077 _json["autodetect"] = autodetect; | 3223 _json["autodetect"] = autodetect; |
| 3078 } | 3224 } |
| 3079 if (createDisposition != null) { | 3225 if (createDisposition != null) { |
| 3080 _json["createDisposition"] = createDisposition; | 3226 _json["createDisposition"] = createDisposition; |
| 3081 } | 3227 } |
| 3228 if (destinationEncryptionConfiguration != null) { |
| 3229 _json["destinationEncryptionConfiguration"] = |
| 3230 (destinationEncryptionConfiguration).toJson(); |
| 3231 } |
| 3082 if (destinationTable != null) { | 3232 if (destinationTable != null) { |
| 3083 _json["destinationTable"] = (destinationTable).toJson(); | 3233 _json["destinationTable"] = (destinationTable).toJson(); |
| 3084 } | 3234 } |
| 3085 if (encoding != null) { | 3235 if (encoding != null) { |
| 3086 _json["encoding"] = encoding; | 3236 _json["encoding"] = encoding; |
| 3087 } | 3237 } |
| 3088 if (fieldDelimiter != null) { | 3238 if (fieldDelimiter != null) { |
| 3089 _json["fieldDelimiter"] = fieldDelimiter; | 3239 _json["fieldDelimiter"] = fieldDelimiter; |
| 3090 } | 3240 } |
| 3091 if (ignoreUnknownValues != null) { | 3241 if (ignoreUnknownValues != null) { |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3128 _json["timePartitioning"] = (timePartitioning).toJson(); | 3278 _json["timePartitioning"] = (timePartitioning).toJson(); |
| 3129 } | 3279 } |
| 3130 if (writeDisposition != null) { | 3280 if (writeDisposition != null) { |
| 3131 _json["writeDisposition"] = writeDisposition; | 3281 _json["writeDisposition"] = writeDisposition; |
| 3132 } | 3282 } |
| 3133 return _json; | 3283 return _json; |
| 3134 } | 3284 } |
| 3135 } | 3285 } |
| 3136 | 3286 |
| 3137 class JobConfigurationQuery { | 3287 class JobConfigurationQuery { |
| 3138 /** | 3288 /// [Optional] If true and query uses legacy SQL dialect, allows the query to |
| 3139 * [Optional] If true and query uses legacy SQL dialect, allows the query to | 3289 /// produce arbitrarily large result tables at a slight cost in performance. |
| 3140 * produce arbitrarily large result tables at a slight cost in performance. | 3290 /// Requires destinationTable to be set. For standard SQL queries, this flag |
| 3141 * Requires destinationTable to be set. For standard SQL queries, this flag is | 3291 /// is ignored and large results are always allowed. However, you must still |
| 3142 * ignored and large results are always allowed. However, you must still set | 3292 /// set destinationTable when result size exceeds the allowed maximum |
| 3143 * destinationTable when result size exceeds the allowed maximum response | 3293 /// response size. |
| 3144 * size. | |
| 3145 */ | |
| 3146 core.bool allowLargeResults; | 3294 core.bool allowLargeResults; |
| 3147 /** | 3295 |
| 3148 * [Optional] Specifies whether the job is allowed to create new tables. The | 3296 /// [Optional] Specifies whether the job is allowed to create new tables. The |
| 3149 * following values are supported: CREATE_IF_NEEDED: If the table does not | 3297 /// following values are supported: CREATE_IF_NEEDED: If the table does not |
| 3150 * exist, BigQuery creates the table. CREATE_NEVER: The table must already | 3298 /// exist, BigQuery creates the table. CREATE_NEVER: The table must already |
| 3151 * exist. If it does not, a 'notFound' error is returned in the job result. | 3299 /// exist. If it does not, a 'notFound' error is returned in the job result. |
| 3152 * The default value is CREATE_IF_NEEDED. Creation, truncation and append | 3300 /// The default value is CREATE_IF_NEEDED. Creation, truncation and append |
| 3153 * actions occur as one atomic update upon job completion. | 3301 /// actions occur as one atomic update upon job completion. |
| 3154 */ | |
| 3155 core.String createDisposition; | 3302 core.String createDisposition; |
| 3156 /** | 3303 |
| 3157 * [Optional] Specifies the default dataset to use for unqualified table names | 3304 /// [Optional] Specifies the default dataset to use for unqualified table |
| 3158 * in the query. | 3305 /// names in the query. |
| 3159 */ | |
| 3160 DatasetReference defaultDataset; | 3306 DatasetReference defaultDataset; |
| 3161 /** | 3307 |
| 3162 * [Optional] Describes the table where the query results should be stored. If | 3308 /// [Experimental] Custom encryption configuration (e.g., Cloud KMS keys). |
| 3163 * not present, a new table will be created to store the results. This | 3309 EncryptionConfiguration destinationEncryptionConfiguration; |
| 3164 * property must be set for large results that exceed the maximum response | 3310 |
| 3165 * size. | 3311 /// [Optional] Describes the table where the query results should be stored. |
| 3166 */ | 3312 /// If not present, a new table will be created to store the results. This |
| 3313 /// property must be set for large results that exceed the maximum response |
| 3314 /// size. |
| 3167 TableReference destinationTable; | 3315 TableReference destinationTable; |
| 3168 /** | 3316 |
| 3169 * [Optional] If true and query uses legacy SQL dialect, flattens all nested | 3317 /// [Optional] If true and query uses legacy SQL dialect, flattens all nested |
| 3170 * and repeated fields in the query results. allowLargeResults must be true if | 3318 /// and repeated fields in the query results. allowLargeResults must be true |
| 3171 * this is set to false. For standard SQL queries, this flag is ignored and | 3319 /// if this is set to false. For standard SQL queries, this flag is ignored |
| 3172 * results are never flattened. | 3320 /// and results are never flattened. |
| 3173 */ | |
| 3174 core.bool flattenResults; | 3321 core.bool flattenResults; |
| 3175 /** | 3322 |
| 3176 * [Optional] Limits the billing tier for this job. Queries that have resource | 3323 /// [Optional] Limits the billing tier for this job. Queries that have |
| 3177 * usage beyond this tier will fail (without incurring a charge). If | 3324 /// resource usage beyond this tier will fail (without incurring a charge). |
| 3178 * unspecified, this will be set to your project default. | 3325 /// If unspecified, this will be set to your project default. |
| 3179 */ | |
| 3180 core.int maximumBillingTier; | 3326 core.int maximumBillingTier; |
| 3181 /** | 3327 |
| 3182 * [Optional] Limits the bytes billed for this job. Queries that will have | 3328 /// [Optional] Limits the bytes billed for this job. Queries that will have |
| 3183 * bytes billed beyond this limit will fail (without incurring a charge). If | 3329 /// bytes billed beyond this limit will fail (without incurring a charge). If |
| 3184 * unspecified, this will be set to your project default. | 3330 /// unspecified, this will be set to your project default. |
| 3185 */ | |
| 3186 core.String maximumBytesBilled; | 3331 core.String maximumBytesBilled; |
| 3187 /** | 3332 |
| 3188 * Standard SQL only. Set to POSITIONAL to use positional (?) query parameters | 3333 /// Standard SQL only. Set to POSITIONAL to use positional (?) query |
| 3189 * or to NAMED to use named (@myparam) query parameters in this query. | 3334 /// parameters or to NAMED to use named (@myparam) query parameters in this |
| 3190 */ | 3335 /// query. |
| 3191 core.String parameterMode; | 3336 core.String parameterMode; |
| 3192 /** [Deprecated] This property is deprecated. */ | 3337 |
| 3338 /// [Deprecated] This property is deprecated. |
| 3193 core.bool preserveNulls; | 3339 core.bool preserveNulls; |
| 3194 /** | 3340 |
| 3195 * [Optional] Specifies a priority for the query. Possible values include | 3341 /// [Optional] Specifies a priority for the query. Possible values include |
| 3196 * INTERACTIVE and BATCH. The default value is INTERACTIVE. | 3342 /// INTERACTIVE and BATCH. The default value is INTERACTIVE. |
| 3197 */ | |
| 3198 core.String priority; | 3343 core.String priority; |
| 3199 /** | 3344 |
| 3200 * [Required] SQL query text to execute. The useLegacySql field can be used to | 3345 /// [Required] SQL query text to execute. The useLegacySql field can be used |
| 3201 * indicate whether the query uses legacy SQL or standard SQL. | 3346 /// to indicate whether the query uses legacy SQL or standard SQL. |
| 3202 */ | |
| 3203 core.String query; | 3347 core.String query; |
| 3204 /** Query parameters for standard SQL queries. */ | 3348 |
| 3349 /// Query parameters for standard SQL queries. |
| 3205 core.List<QueryParameter> queryParameters; | 3350 core.List<QueryParameter> queryParameters; |
| 3206 /** | 3351 |
| 3207 * [Experimental] Allows the schema of the destination table to be updated as | 3352 /// [Experimental] Allows the schema of the destination table to be updated |
| 3208 * a side effect of the query job. Schema update options are supported in two | 3353 /// as a side effect of the query job. Schema update options are supported in |
| 3209 * cases: when writeDisposition is WRITE_APPEND; when writeDisposition is | 3354 /// two cases: when writeDisposition is WRITE_APPEND; when writeDisposition |
| 3210 * WRITE_TRUNCATE and the destination table is a partition of a table, | 3355 /// is WRITE_TRUNCATE and the destination table is a partition of a table, |
| 3211 * specified by partition decorators. For normal tables, WRITE_TRUNCATE will | 3356 /// specified by partition decorators. For normal tables, WRITE_TRUNCATE will |
| 3212 * always overwrite the schema. One or more of the following values are | 3357 /// always overwrite the schema. One or more of the following values are |
| 3213 * specified: ALLOW_FIELD_ADDITION: allow adding a nullable field to the | 3358 /// specified: ALLOW_FIELD_ADDITION: allow adding a nullable field to the |
| 3214 * schema. ALLOW_FIELD_RELAXATION: allow relaxing a required field in the | 3359 /// schema. ALLOW_FIELD_RELAXATION: allow relaxing a required field in the |
| 3215 * original schema to nullable. | 3360 /// original schema to nullable. |
| 3216 */ | |
| 3217 core.List<core.String> schemaUpdateOptions; | 3361 core.List<core.String> schemaUpdateOptions; |
| 3218 /** | 3362 |
| 3219 * [Optional] If querying an external data source outside of BigQuery, | 3363 /// [Optional] If querying an external data source outside of BigQuery, |
| 3220 * describes the data format, location and other properties of the data | 3364 /// describes the data format, location and other properties of the data |
| 3221 * source. By defining these properties, the data source can then be queried | 3365 /// source. By defining these properties, the data source can then be queried |
| 3222 * as if it were a standard BigQuery table. | 3366 /// as if it were a standard BigQuery table. |
| 3223 */ | |
| 3224 core.Map<core.String, ExternalDataConfiguration> tableDefinitions; | 3367 core.Map<core.String, ExternalDataConfiguration> tableDefinitions; |
| 3225 /** | 3368 |
| 3226 * [Experimental] If specified, configures time-based partitioning for the | 3369 /// [Experimental] If specified, configures time-based partitioning for the |
| 3227 * destination table. | 3370 /// destination table. |
| 3228 */ | |
| 3229 TimePartitioning timePartitioning; | 3371 TimePartitioning timePartitioning; |
| 3230 /** | 3372 |
| 3231 * Specifies whether to use BigQuery's legacy SQL dialect for this query. The | 3373 /// Specifies whether to use BigQuery's legacy SQL dialect for this query. |
| 3232 * default value is true. If set to false, the query will use BigQuery's | 3374 /// The default value is true. If set to false, the query will use BigQuery's |
| 3233 * standard SQL: https://cloud.google.com/bigquery/sql-reference/ When | 3375 /// standard SQL: https://cloud.google.com/bigquery/sql-reference/ When |
| 3234 * useLegacySql is set to false, the value of flattenResults is ignored; query | 3376 /// useLegacySql is set to false, the value of flattenResults is ignored; |
| 3235 * will be run as if flattenResults is false. | 3377 /// query will be run as if flattenResults is false. |
| 3236 */ | |
| 3237 core.bool useLegacySql; | 3378 core.bool useLegacySql; |
| 3238 /** | 3379 |
| 3239 * [Optional] Whether to look for the result in the query cache. The query | 3380 /// [Optional] Whether to look for the result in the query cache. The query |
| 3240 * cache is a best-effort cache that will be flushed whenever tables in the | 3381 /// cache is a best-effort cache that will be flushed whenever tables in the |
| 3241 * query are modified. Moreover, the query cache is only available when a | 3382 /// query are modified. Moreover, the query cache is only available when a |
| 3242 * query does not have a destination table specified. The default value is | 3383 /// query does not have a destination table specified. The default value is |
| 3243 * true. | 3384 /// true. |
| 3244 */ | |
| 3245 core.bool useQueryCache; | 3385 core.bool useQueryCache; |
| 3246 /** Describes user-defined function resources used in the query. */ | 3386 |
| 3387 /// Describes user-defined function resources used in the query. |
| 3247 core.List<UserDefinedFunctionResource> userDefinedFunctionResources; | 3388 core.List<UserDefinedFunctionResource> userDefinedFunctionResources; |
| 3248 /** | 3389 |
| 3249 * [Optional] Specifies the action that occurs if the destination table | 3390 /// [Optional] Specifies the action that occurs if the destination table |
| 3250 * already exists. The following values are supported: WRITE_TRUNCATE: If the | 3391 /// already exists. The following values are supported: WRITE_TRUNCATE: If |
| 3251 * table already exists, BigQuery overwrites the table data and uses the | 3392 /// the table already exists, BigQuery overwrites the table data and uses the |
| 3252 * schema from the query result. WRITE_APPEND: If the table already exists, | 3393 /// schema from the query result. WRITE_APPEND: If the table already exists, |
| 3253 * BigQuery appends the data to the table. WRITE_EMPTY: If the table already | 3394 /// BigQuery appends the data to the table. WRITE_EMPTY: If the table already |
| 3254 * exists and contains data, a 'duplicate' error is returned in the job | 3395 /// exists and contains data, a 'duplicate' error is returned in the job |
| 3255 * result. The default value is WRITE_EMPTY. Each action is atomic and only | 3396 /// result. The default value is WRITE_EMPTY. Each action is atomic and only |
| 3256 * occurs if BigQuery is able to complete the job successfully. Creation, | 3397 /// occurs if BigQuery is able to complete the job successfully. Creation, |
| 3257 * truncation and append actions occur as one atomic update upon job | 3398 /// truncation and append actions occur as one atomic update upon job |
| 3258 * completion. | 3399 /// completion. |
| 3259 */ | |
| 3260 core.String writeDisposition; | 3400 core.String writeDisposition; |
| 3261 | 3401 |
| 3262 JobConfigurationQuery(); | 3402 JobConfigurationQuery(); |
| 3263 | 3403 |
| 3264 JobConfigurationQuery.fromJson(core.Map _json) { | 3404 JobConfigurationQuery.fromJson(core.Map _json) { |
| 3265 if (_json.containsKey("allowLargeResults")) { | 3405 if (_json.containsKey("allowLargeResults")) { |
| 3266 allowLargeResults = _json["allowLargeResults"]; | 3406 allowLargeResults = _json["allowLargeResults"]; |
| 3267 } | 3407 } |
| 3268 if (_json.containsKey("createDisposition")) { | 3408 if (_json.containsKey("createDisposition")) { |
| 3269 createDisposition = _json["createDisposition"]; | 3409 createDisposition = _json["createDisposition"]; |
| 3270 } | 3410 } |
| 3271 if (_json.containsKey("defaultDataset")) { | 3411 if (_json.containsKey("defaultDataset")) { |
| 3272 defaultDataset = new DatasetReference.fromJson(_json["defaultDataset"]); | 3412 defaultDataset = new DatasetReference.fromJson(_json["defaultDataset"]); |
| 3273 } | 3413 } |
| 3414 if (_json.containsKey("destinationEncryptionConfiguration")) { |
| 3415 destinationEncryptionConfiguration = new EncryptionConfiguration.fromJson( |
| 3416 _json["destinationEncryptionConfiguration"]); |
| 3417 } |
| 3274 if (_json.containsKey("destinationTable")) { | 3418 if (_json.containsKey("destinationTable")) { |
| 3275 destinationTable = new TableReference.fromJson(_json["destinationTable"]); | 3419 destinationTable = new TableReference.fromJson(_json["destinationTable"]); |
| 3276 } | 3420 } |
| 3277 if (_json.containsKey("flattenResults")) { | 3421 if (_json.containsKey("flattenResults")) { |
| 3278 flattenResults = _json["flattenResults"]; | 3422 flattenResults = _json["flattenResults"]; |
| 3279 } | 3423 } |
| 3280 if (_json.containsKey("maximumBillingTier")) { | 3424 if (_json.containsKey("maximumBillingTier")) { |
| 3281 maximumBillingTier = _json["maximumBillingTier"]; | 3425 maximumBillingTier = _json["maximumBillingTier"]; |
| 3282 } | 3426 } |
| 3283 if (_json.containsKey("maximumBytesBilled")) { | 3427 if (_json.containsKey("maximumBytesBilled")) { |
| 3284 maximumBytesBilled = _json["maximumBytesBilled"]; | 3428 maximumBytesBilled = _json["maximumBytesBilled"]; |
| 3285 } | 3429 } |
| 3286 if (_json.containsKey("parameterMode")) { | 3430 if (_json.containsKey("parameterMode")) { |
| 3287 parameterMode = _json["parameterMode"]; | 3431 parameterMode = _json["parameterMode"]; |
| 3288 } | 3432 } |
| 3289 if (_json.containsKey("preserveNulls")) { | 3433 if (_json.containsKey("preserveNulls")) { |
| 3290 preserveNulls = _json["preserveNulls"]; | 3434 preserveNulls = _json["preserveNulls"]; |
| 3291 } | 3435 } |
| 3292 if (_json.containsKey("priority")) { | 3436 if (_json.containsKey("priority")) { |
| 3293 priority = _json["priority"]; | 3437 priority = _json["priority"]; |
| 3294 } | 3438 } |
| 3295 if (_json.containsKey("query")) { | 3439 if (_json.containsKey("query")) { |
| 3296 query = _json["query"]; | 3440 query = _json["query"]; |
| 3297 } | 3441 } |
| 3298 if (_json.containsKey("queryParameters")) { | 3442 if (_json.containsKey("queryParameters")) { |
| 3299 queryParameters = _json["queryParameters"].map((value) => new QueryParamet
er.fromJson(value)).toList(); | 3443 queryParameters = _json["queryParameters"] |
| 3444 .map((value) => new QueryParameter.fromJson(value)) |
| 3445 .toList(); |
| 3300 } | 3446 } |
| 3301 if (_json.containsKey("schemaUpdateOptions")) { | 3447 if (_json.containsKey("schemaUpdateOptions")) { |
| 3302 schemaUpdateOptions = _json["schemaUpdateOptions"]; | 3448 schemaUpdateOptions = _json["schemaUpdateOptions"]; |
| 3303 } | 3449 } |
| 3304 if (_json.containsKey("tableDefinitions")) { | 3450 if (_json.containsKey("tableDefinitions")) { |
| 3305 tableDefinitions = commons.mapMap<core.Map<core.String, core.Object>, Exte
rnalDataConfiguration>(_json["tableDefinitions"], (core.Map<core.String, core.Ob
ject> item) => new ExternalDataConfiguration.fromJson(item)); | 3451 tableDefinitions = commons.mapMap<core.Map<core.String, core.Object>, |
| 3452 ExternalDataConfiguration>( |
| 3453 _json["tableDefinitions"], |
| 3454 (core.Map<core.String, core.Object> item) => |
| 3455 new ExternalDataConfiguration.fromJson(item)); |
| 3306 } | 3456 } |
| 3307 if (_json.containsKey("timePartitioning")) { | 3457 if (_json.containsKey("timePartitioning")) { |
| 3308 timePartitioning = new TimePartitioning.fromJson(_json["timePartitioning"]
); | 3458 timePartitioning = |
| 3459 new TimePartitioning.fromJson(_json["timePartitioning"]); |
| 3309 } | 3460 } |
| 3310 if (_json.containsKey("useLegacySql")) { | 3461 if (_json.containsKey("useLegacySql")) { |
| 3311 useLegacySql = _json["useLegacySql"]; | 3462 useLegacySql = _json["useLegacySql"]; |
| 3312 } | 3463 } |
| 3313 if (_json.containsKey("useQueryCache")) { | 3464 if (_json.containsKey("useQueryCache")) { |
| 3314 useQueryCache = _json["useQueryCache"]; | 3465 useQueryCache = _json["useQueryCache"]; |
| 3315 } | 3466 } |
| 3316 if (_json.containsKey("userDefinedFunctionResources")) { | 3467 if (_json.containsKey("userDefinedFunctionResources")) { |
| 3317 userDefinedFunctionResources = _json["userDefinedFunctionResources"].map((
value) => new UserDefinedFunctionResource.fromJson(value)).toList(); | 3468 userDefinedFunctionResources = _json["userDefinedFunctionResources"] |
| 3469 .map((value) => new UserDefinedFunctionResource.fromJson(value)) |
| 3470 .toList(); |
| 3318 } | 3471 } |
| 3319 if (_json.containsKey("writeDisposition")) { | 3472 if (_json.containsKey("writeDisposition")) { |
| 3320 writeDisposition = _json["writeDisposition"]; | 3473 writeDisposition = _json["writeDisposition"]; |
| 3321 } | 3474 } |
| 3322 } | 3475 } |
| 3323 | 3476 |
| 3324 core.Map<core.String, core.Object> toJson() { | 3477 core.Map<core.String, core.Object> toJson() { |
| 3325 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 3478 final core.Map<core.String, core.Object> _json = |
| 3479 new core.Map<core.String, core.Object>(); |
| 3326 if (allowLargeResults != null) { | 3480 if (allowLargeResults != null) { |
| 3327 _json["allowLargeResults"] = allowLargeResults; | 3481 _json["allowLargeResults"] = allowLargeResults; |
| 3328 } | 3482 } |
| 3329 if (createDisposition != null) { | 3483 if (createDisposition != null) { |
| 3330 _json["createDisposition"] = createDisposition; | 3484 _json["createDisposition"] = createDisposition; |
| 3331 } | 3485 } |
| 3332 if (defaultDataset != null) { | 3486 if (defaultDataset != null) { |
| 3333 _json["defaultDataset"] = (defaultDataset).toJson(); | 3487 _json["defaultDataset"] = (defaultDataset).toJson(); |
| 3334 } | 3488 } |
| 3489 if (destinationEncryptionConfiguration != null) { |
| 3490 _json["destinationEncryptionConfiguration"] = |
| 3491 (destinationEncryptionConfiguration).toJson(); |
| 3492 } |
| 3335 if (destinationTable != null) { | 3493 if (destinationTable != null) { |
| 3336 _json["destinationTable"] = (destinationTable).toJson(); | 3494 _json["destinationTable"] = (destinationTable).toJson(); |
| 3337 } | 3495 } |
| 3338 if (flattenResults != null) { | 3496 if (flattenResults != null) { |
| 3339 _json["flattenResults"] = flattenResults; | 3497 _json["flattenResults"] = flattenResults; |
| 3340 } | 3498 } |
| 3341 if (maximumBillingTier != null) { | 3499 if (maximumBillingTier != null) { |
| 3342 _json["maximumBillingTier"] = maximumBillingTier; | 3500 _json["maximumBillingTier"] = maximumBillingTier; |
| 3343 } | 3501 } |
| 3344 if (maximumBytesBilled != null) { | 3502 if (maximumBytesBilled != null) { |
| 3345 _json["maximumBytesBilled"] = maximumBytesBilled; | 3503 _json["maximumBytesBilled"] = maximumBytesBilled; |
| 3346 } | 3504 } |
| 3347 if (parameterMode != null) { | 3505 if (parameterMode != null) { |
| 3348 _json["parameterMode"] = parameterMode; | 3506 _json["parameterMode"] = parameterMode; |
| 3349 } | 3507 } |
| 3350 if (preserveNulls != null) { | 3508 if (preserveNulls != null) { |
| 3351 _json["preserveNulls"] = preserveNulls; | 3509 _json["preserveNulls"] = preserveNulls; |
| 3352 } | 3510 } |
| 3353 if (priority != null) { | 3511 if (priority != null) { |
| 3354 _json["priority"] = priority; | 3512 _json["priority"] = priority; |
| 3355 } | 3513 } |
| 3356 if (query != null) { | 3514 if (query != null) { |
| 3357 _json["query"] = query; | 3515 _json["query"] = query; |
| 3358 } | 3516 } |
| 3359 if (queryParameters != null) { | 3517 if (queryParameters != null) { |
| 3360 _json["queryParameters"] = queryParameters.map((value) => (value).toJson()
).toList(); | 3518 _json["queryParameters"] = |
| 3519 queryParameters.map((value) => (value).toJson()).toList(); |
| 3361 } | 3520 } |
| 3362 if (schemaUpdateOptions != null) { | 3521 if (schemaUpdateOptions != null) { |
| 3363 _json["schemaUpdateOptions"] = schemaUpdateOptions; | 3522 _json["schemaUpdateOptions"] = schemaUpdateOptions; |
| 3364 } | 3523 } |
| 3365 if (tableDefinitions != null) { | 3524 if (tableDefinitions != null) { |
| 3366 _json["tableDefinitions"] = commons.mapMap<ExternalDataConfiguration, core
.Map<core.String, core.Object>>(tableDefinitions, (ExternalDataConfiguration ite
m) => (item).toJson()); | 3525 _json["tableDefinitions"] = commons.mapMap<ExternalDataConfiguration, |
| 3526 core.Map<core.String, core.Object>>(tableDefinitions, |
| 3527 (ExternalDataConfiguration item) => (item).toJson()); |
| 3367 } | 3528 } |
| 3368 if (timePartitioning != null) { | 3529 if (timePartitioning != null) { |
| 3369 _json["timePartitioning"] = (timePartitioning).toJson(); | 3530 _json["timePartitioning"] = (timePartitioning).toJson(); |
| 3370 } | 3531 } |
| 3371 if (useLegacySql != null) { | 3532 if (useLegacySql != null) { |
| 3372 _json["useLegacySql"] = useLegacySql; | 3533 _json["useLegacySql"] = useLegacySql; |
| 3373 } | 3534 } |
| 3374 if (useQueryCache != null) { | 3535 if (useQueryCache != null) { |
| 3375 _json["useQueryCache"] = useQueryCache; | 3536 _json["useQueryCache"] = useQueryCache; |
| 3376 } | 3537 } |
| 3377 if (userDefinedFunctionResources != null) { | 3538 if (userDefinedFunctionResources != null) { |
| 3378 _json["userDefinedFunctionResources"] = userDefinedFunctionResources.map((
value) => (value).toJson()).toList(); | 3539 _json["userDefinedFunctionResources"] = userDefinedFunctionResources |
| 3540 .map((value) => (value).toJson()) |
| 3541 .toList(); |
| 3379 } | 3542 } |
| 3380 if (writeDisposition != null) { | 3543 if (writeDisposition != null) { |
| 3381 _json["writeDisposition"] = writeDisposition; | 3544 _json["writeDisposition"] = writeDisposition; |
| 3382 } | 3545 } |
| 3383 return _json; | 3546 return _json; |
| 3384 } | 3547 } |
| 3385 } | 3548 } |
| 3386 | 3549 |
| 3387 class JobConfigurationTableCopy { | 3550 class JobConfigurationTableCopy { |
| 3388 /** | 3551 /// [Optional] Specifies whether the job is allowed to create new tables. The |
| 3389 * [Optional] Specifies whether the job is allowed to create new tables. The | 3552 /// following values are supported: CREATE_IF_NEEDED: If the table does not |
| 3390 * following values are supported: CREATE_IF_NEEDED: If the table does not | 3553 /// exist, BigQuery creates the table. CREATE_NEVER: The table must already |
| 3391 * exist, BigQuery creates the table. CREATE_NEVER: The table must already | 3554 /// exist. If it does not, a 'notFound' error is returned in the job result. |
| 3392 * exist. If it does not, a 'notFound' error is returned in the job result. | 3555 /// The default value is CREATE_IF_NEEDED. Creation, truncation and append |
| 3393 * The default value is CREATE_IF_NEEDED. Creation, truncation and append | 3556 /// actions occur as one atomic update upon job completion. |
| 3394 * actions occur as one atomic update upon job completion. | |
| 3395 */ | |
| 3396 core.String createDisposition; | 3557 core.String createDisposition; |
| 3397 /** [Required] The destination table */ | 3558 |
| 3559 /// [Experimental] Custom encryption configuration (e.g., Cloud KMS keys). |
| 3560 EncryptionConfiguration destinationEncryptionConfiguration; |
| 3561 |
| 3562 /// [Required] The destination table |
| 3398 TableReference destinationTable; | 3563 TableReference destinationTable; |
| 3399 /** [Pick one] Source table to copy. */ | 3564 |
| 3565 /// [Pick one] Source table to copy. |
| 3400 TableReference sourceTable; | 3566 TableReference sourceTable; |
| 3401 /** [Pick one] Source tables to copy. */ | 3567 |
| 3568 /// [Pick one] Source tables to copy. |
| 3402 core.List<TableReference> sourceTables; | 3569 core.List<TableReference> sourceTables; |
| 3403 /** | 3570 |
| 3404 * [Optional] Specifies the action that occurs if the destination table | 3571 /// [Optional] Specifies the action that occurs if the destination table |
| 3405 * already exists. The following values are supported: WRITE_TRUNCATE: If the | 3572 /// already exists. The following values are supported: WRITE_TRUNCATE: If |
| 3406 * table already exists, BigQuery overwrites the table data. WRITE_APPEND: If | 3573 /// the table already exists, BigQuery overwrites the table data. |
| 3407 * the table already exists, BigQuery appends the data to the table. | 3574 /// WRITE_APPEND: If the table already exists, BigQuery appends the data to |
| 3408 * WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' | 3575 /// the table. WRITE_EMPTY: If the table already exists and contains data, a |
| 3409 * error is returned in the job result. The default value is WRITE_EMPTY. Each | 3576 /// 'duplicate' error is returned in the job result. The default value is |
| 3410 * action is atomic and only occurs if BigQuery is able to complete the job | 3577 /// WRITE_EMPTY. Each action is atomic and only occurs if BigQuery is able to |
| 3411 * successfully. Creation, truncation and append actions occur as one atomic | 3578 /// complete the job successfully. Creation, truncation and append actions |
| 3412 * update upon job completion. | 3579 /// occur as one atomic update upon job completion. |
| 3413 */ | |
| 3414 core.String writeDisposition; | 3580 core.String writeDisposition; |
| 3415 | 3581 |
| 3416 JobConfigurationTableCopy(); | 3582 JobConfigurationTableCopy(); |
| 3417 | 3583 |
| 3418 JobConfigurationTableCopy.fromJson(core.Map _json) { | 3584 JobConfigurationTableCopy.fromJson(core.Map _json) { |
| 3419 if (_json.containsKey("createDisposition")) { | 3585 if (_json.containsKey("createDisposition")) { |
| 3420 createDisposition = _json["createDisposition"]; | 3586 createDisposition = _json["createDisposition"]; |
| 3421 } | 3587 } |
| 3588 if (_json.containsKey("destinationEncryptionConfiguration")) { |
| 3589 destinationEncryptionConfiguration = new EncryptionConfiguration.fromJson( |
| 3590 _json["destinationEncryptionConfiguration"]); |
| 3591 } |
| 3422 if (_json.containsKey("destinationTable")) { | 3592 if (_json.containsKey("destinationTable")) { |
| 3423 destinationTable = new TableReference.fromJson(_json["destinationTable"]); | 3593 destinationTable = new TableReference.fromJson(_json["destinationTable"]); |
| 3424 } | 3594 } |
| 3425 if (_json.containsKey("sourceTable")) { | 3595 if (_json.containsKey("sourceTable")) { |
| 3426 sourceTable = new TableReference.fromJson(_json["sourceTable"]); | 3596 sourceTable = new TableReference.fromJson(_json["sourceTable"]); |
| 3427 } | 3597 } |
| 3428 if (_json.containsKey("sourceTables")) { | 3598 if (_json.containsKey("sourceTables")) { |
| 3429 sourceTables = _json["sourceTables"].map((value) => new TableReference.fro
mJson(value)).toList(); | 3599 sourceTables = _json["sourceTables"] |
| 3600 .map((value) => new TableReference.fromJson(value)) |
| 3601 .toList(); |
| 3430 } | 3602 } |
| 3431 if (_json.containsKey("writeDisposition")) { | 3603 if (_json.containsKey("writeDisposition")) { |
| 3432 writeDisposition = _json["writeDisposition"]; | 3604 writeDisposition = _json["writeDisposition"]; |
| 3433 } | 3605 } |
| 3434 } | 3606 } |
| 3435 | 3607 |
| 3436 core.Map<core.String, core.Object> toJson() { | 3608 core.Map<core.String, core.Object> toJson() { |
| 3437 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 3609 final core.Map<core.String, core.Object> _json = |
| 3610 new core.Map<core.String, core.Object>(); |
| 3438 if (createDisposition != null) { | 3611 if (createDisposition != null) { |
| 3439 _json["createDisposition"] = createDisposition; | 3612 _json["createDisposition"] = createDisposition; |
| 3440 } | 3613 } |
| 3614 if (destinationEncryptionConfiguration != null) { |
| 3615 _json["destinationEncryptionConfiguration"] = |
| 3616 (destinationEncryptionConfiguration).toJson(); |
| 3617 } |
| 3441 if (destinationTable != null) { | 3618 if (destinationTable != null) { |
| 3442 _json["destinationTable"] = (destinationTable).toJson(); | 3619 _json["destinationTable"] = (destinationTable).toJson(); |
| 3443 } | 3620 } |
| 3444 if (sourceTable != null) { | 3621 if (sourceTable != null) { |
| 3445 _json["sourceTable"] = (sourceTable).toJson(); | 3622 _json["sourceTable"] = (sourceTable).toJson(); |
| 3446 } | 3623 } |
| 3447 if (sourceTables != null) { | 3624 if (sourceTables != null) { |
| 3448 _json["sourceTables"] = sourceTables.map((value) => (value).toJson()).toLi
st(); | 3625 _json["sourceTables"] = |
| 3626 sourceTables.map((value) => (value).toJson()).toList(); |
| 3449 } | 3627 } |
| 3450 if (writeDisposition != null) { | 3628 if (writeDisposition != null) { |
| 3451 _json["writeDisposition"] = writeDisposition; | 3629 _json["writeDisposition"] = writeDisposition; |
| 3452 } | 3630 } |
| 3453 return _json; | 3631 return _json; |
| 3454 } | 3632 } |
| 3455 } | 3633 } |
| 3456 | 3634 |
| 3457 class JobListJobs { | 3635 class JobListJobs { |
| 3458 /** [Full-projection-only] Specifies the job configuration. */ | 3636 /// [Full-projection-only] Specifies the job configuration. |
| 3459 JobConfiguration configuration; | 3637 JobConfiguration configuration; |
| 3460 /** A result object that will be present only if the job has failed. */ | 3638 |
| 3639 /// A result object that will be present only if the job has failed. |
| 3461 ErrorProto errorResult; | 3640 ErrorProto errorResult; |
| 3462 /** Unique opaque ID of the job. */ | 3641 |
| 3642 /// Unique opaque ID of the job. |
| 3463 core.String id; | 3643 core.String id; |
| 3464 /** Job reference uniquely identifying the job. */ | 3644 |
| 3645 /// Job reference uniquely identifying the job. |
| 3465 JobReference jobReference; | 3646 JobReference jobReference; |
| 3466 /** The resource type. */ | 3647 |
| 3648 /// The resource type. |
| 3467 core.String kind; | 3649 core.String kind; |
| 3468 /** | 3650 |
| 3469 * Running state of the job. When the state is DONE, errorResult can be | 3651 /// Running state of the job. When the state is DONE, errorResult can be |
| 3470 * checked to determine whether the job succeeded or failed. | 3652 /// checked to determine whether the job succeeded or failed. |
| 3471 */ | |
| 3472 core.String state; | 3653 core.String state; |
| 3473 /** | 3654 |
| 3474 * [Output-only] Information about the job, including starting time and ending | 3655 /// [Output-only] Information about the job, including starting time and |
| 3475 * time of the job. | 3656 /// ending time of the job. |
| 3476 */ | |
| 3477 JobStatistics statistics; | 3657 JobStatistics statistics; |
| 3478 /** [Full-projection-only] Describes the state of the job. */ | 3658 |
| 3659 /// [Full-projection-only] Describes the state of the job. |
| 3479 JobStatus status; | 3660 JobStatus status; |
| 3480 /** [Full-projection-only] Email address of the user who ran the job. */ | 3661 |
| 3662 /// [Full-projection-only] Email address of the user who ran the job. |
| 3481 core.String userEmail; | 3663 core.String userEmail; |
| 3482 | 3664 |
| 3483 JobListJobs(); | 3665 JobListJobs(); |
| 3484 | 3666 |
| 3485 JobListJobs.fromJson(core.Map _json) { | 3667 JobListJobs.fromJson(core.Map _json) { |
| 3486 if (_json.containsKey("configuration")) { | 3668 if (_json.containsKey("configuration")) { |
| 3487 configuration = new JobConfiguration.fromJson(_json["configuration"]); | 3669 configuration = new JobConfiguration.fromJson(_json["configuration"]); |
| 3488 } | 3670 } |
| 3489 if (_json.containsKey("errorResult")) { | 3671 if (_json.containsKey("errorResult")) { |
| 3490 errorResult = new ErrorProto.fromJson(_json["errorResult"]); | 3672 errorResult = new ErrorProto.fromJson(_json["errorResult"]); |
| (...skipping 15 matching lines...) Expand all Loading... |
| 3506 } | 3688 } |
| 3507 if (_json.containsKey("status")) { | 3689 if (_json.containsKey("status")) { |
| 3508 status = new JobStatus.fromJson(_json["status"]); | 3690 status = new JobStatus.fromJson(_json["status"]); |
| 3509 } | 3691 } |
| 3510 if (_json.containsKey("user_email")) { | 3692 if (_json.containsKey("user_email")) { |
| 3511 userEmail = _json["user_email"]; | 3693 userEmail = _json["user_email"]; |
| 3512 } | 3694 } |
| 3513 } | 3695 } |
| 3514 | 3696 |
| 3515 core.Map<core.String, core.Object> toJson() { | 3697 core.Map<core.String, core.Object> toJson() { |
| 3516 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 3698 final core.Map<core.String, core.Object> _json = |
| 3699 new core.Map<core.String, core.Object>(); |
| 3517 if (configuration != null) { | 3700 if (configuration != null) { |
| 3518 _json["configuration"] = (configuration).toJson(); | 3701 _json["configuration"] = (configuration).toJson(); |
| 3519 } | 3702 } |
| 3520 if (errorResult != null) { | 3703 if (errorResult != null) { |
| 3521 _json["errorResult"] = (errorResult).toJson(); | 3704 _json["errorResult"] = (errorResult).toJson(); |
| 3522 } | 3705 } |
| 3523 if (id != null) { | 3706 if (id != null) { |
| 3524 _json["id"] = id; | 3707 _json["id"] = id; |
| 3525 } | 3708 } |
| 3526 if (jobReference != null) { | 3709 if (jobReference != null) { |
| (...skipping 12 matching lines...) Expand all Loading... |
| 3539 _json["status"] = (status).toJson(); | 3722 _json["status"] = (status).toJson(); |
| 3540 } | 3723 } |
| 3541 if (userEmail != null) { | 3724 if (userEmail != null) { |
| 3542 _json["user_email"] = userEmail; | 3725 _json["user_email"] = userEmail; |
| 3543 } | 3726 } |
| 3544 return _json; | 3727 return _json; |
| 3545 } | 3728 } |
| 3546 } | 3729 } |
| 3547 | 3730 |
| 3548 class JobList { | 3731 class JobList { |
| 3549 /** A hash of this page of results. */ | 3732 /// A hash of this page of results. |
| 3550 core.String etag; | 3733 core.String etag; |
| 3551 /** List of jobs that were requested. */ | 3734 |
| 3735 /// List of jobs that were requested. |
| 3552 core.List<JobListJobs> jobs; | 3736 core.List<JobListJobs> jobs; |
| 3553 /** The resource type of the response. */ | 3737 |
| 3738 /// The resource type of the response. |
| 3554 core.String kind; | 3739 core.String kind; |
| 3555 /** A token to request the next page of results. */ | 3740 |
| 3741 /// A token to request the next page of results. |
| 3556 core.String nextPageToken; | 3742 core.String nextPageToken; |
| 3557 | 3743 |
| 3558 JobList(); | 3744 JobList(); |
| 3559 | 3745 |
| 3560 JobList.fromJson(core.Map _json) { | 3746 JobList.fromJson(core.Map _json) { |
| 3561 if (_json.containsKey("etag")) { | 3747 if (_json.containsKey("etag")) { |
| 3562 etag = _json["etag"]; | 3748 etag = _json["etag"]; |
| 3563 } | 3749 } |
| 3564 if (_json.containsKey("jobs")) { | 3750 if (_json.containsKey("jobs")) { |
| 3565 jobs = _json["jobs"].map((value) => new JobListJobs.fromJson(value)).toLis
t(); | 3751 jobs = _json["jobs"] |
| 3752 .map((value) => new JobListJobs.fromJson(value)) |
| 3753 .toList(); |
| 3566 } | 3754 } |
| 3567 if (_json.containsKey("kind")) { | 3755 if (_json.containsKey("kind")) { |
| 3568 kind = _json["kind"]; | 3756 kind = _json["kind"]; |
| 3569 } | 3757 } |
| 3570 if (_json.containsKey("nextPageToken")) { | 3758 if (_json.containsKey("nextPageToken")) { |
| 3571 nextPageToken = _json["nextPageToken"]; | 3759 nextPageToken = _json["nextPageToken"]; |
| 3572 } | 3760 } |
| 3573 } | 3761 } |
| 3574 | 3762 |
| 3575 core.Map<core.String, core.Object> toJson() { | 3763 core.Map<core.String, core.Object> toJson() { |
| 3576 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 3764 final core.Map<core.String, core.Object> _json = |
| 3765 new core.Map<core.String, core.Object>(); |
| 3577 if (etag != null) { | 3766 if (etag != null) { |
| 3578 _json["etag"] = etag; | 3767 _json["etag"] = etag; |
| 3579 } | 3768 } |
| 3580 if (jobs != null) { | 3769 if (jobs != null) { |
| 3581 _json["jobs"] = jobs.map((value) => (value).toJson()).toList(); | 3770 _json["jobs"] = jobs.map((value) => (value).toJson()).toList(); |
| 3582 } | 3771 } |
| 3583 if (kind != null) { | 3772 if (kind != null) { |
| 3584 _json["kind"] = kind; | 3773 _json["kind"] = kind; |
| 3585 } | 3774 } |
| 3586 if (nextPageToken != null) { | 3775 if (nextPageToken != null) { |
| 3587 _json["nextPageToken"] = nextPageToken; | 3776 _json["nextPageToken"] = nextPageToken; |
| 3588 } | 3777 } |
| 3589 return _json; | 3778 return _json; |
| 3590 } | 3779 } |
| 3591 } | 3780 } |
| 3592 | 3781 |
| 3593 class JobReference { | 3782 class JobReference { |
| 3594 /** | 3783 /// [Required] The ID of the job. The ID must contain only letters (a-z, |
| 3595 * [Required] The ID of the job. The ID must contain only letters (a-z, A-Z), | 3784 /// A-Z), numbers (0-9), underscores (_), or dashes (-). The maximum length |
| 3596 * numbers (0-9), underscores (_), or dashes (-). The maximum length is 1,024 | 3785 /// is 1,024 characters. |
| 3597 * characters. | |
| 3598 */ | |
| 3599 core.String jobId; | 3786 core.String jobId; |
| 3600 /** [Required] The ID of the project containing this job. */ | 3787 |
| 3788 /// [Required] The ID of the project containing this job. |
| 3601 core.String projectId; | 3789 core.String projectId; |
| 3602 | 3790 |
| 3603 JobReference(); | 3791 JobReference(); |
| 3604 | 3792 |
| 3605 JobReference.fromJson(core.Map _json) { | 3793 JobReference.fromJson(core.Map _json) { |
| 3606 if (_json.containsKey("jobId")) { | 3794 if (_json.containsKey("jobId")) { |
| 3607 jobId = _json["jobId"]; | 3795 jobId = _json["jobId"]; |
| 3608 } | 3796 } |
| 3609 if (_json.containsKey("projectId")) { | 3797 if (_json.containsKey("projectId")) { |
| 3610 projectId = _json["projectId"]; | 3798 projectId = _json["projectId"]; |
| 3611 } | 3799 } |
| 3612 } | 3800 } |
| 3613 | 3801 |
| 3614 core.Map<core.String, core.Object> toJson() { | 3802 core.Map<core.String, core.Object> toJson() { |
| 3615 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 3803 final core.Map<core.String, core.Object> _json = |
| 3804 new core.Map<core.String, core.Object>(); |
| 3616 if (jobId != null) { | 3805 if (jobId != null) { |
| 3617 _json["jobId"] = jobId; | 3806 _json["jobId"] = jobId; |
| 3618 } | 3807 } |
| 3619 if (projectId != null) { | 3808 if (projectId != null) { |
| 3620 _json["projectId"] = projectId; | 3809 _json["projectId"] = projectId; |
| 3621 } | 3810 } |
| 3622 return _json; | 3811 return _json; |
| 3623 } | 3812 } |
| 3624 } | 3813 } |
| 3625 | 3814 |
| 3626 class JobStatistics { | 3815 class JobStatistics { |
| 3627 /** | 3816 /// [Output-only] Creation time of this job, in milliseconds since the epoch. |
| 3628 * [Output-only] Creation time of this job, in milliseconds since the epoch. | 3817 /// This field will be present on all jobs. |
| 3629 * This field will be present on all jobs. | |
| 3630 */ | |
| 3631 core.String creationTime; | 3818 core.String creationTime; |
| 3632 /** | 3819 |
| 3633 * [Output-only] End time of this job, in milliseconds since the epoch. This | 3820 /// [Output-only] End time of this job, in milliseconds since the epoch. This |
| 3634 * field will be present whenever a job is in the DONE state. | 3821 /// field will be present whenever a job is in the DONE state. |
| 3635 */ | |
| 3636 core.String endTime; | 3822 core.String endTime; |
| 3637 /** [Output-only] Statistics for an extract job. */ | 3823 |
| 3824 /// [Output-only] Statistics for an extract job. |
| 3638 JobStatistics4 extract; | 3825 JobStatistics4 extract; |
| 3639 /** [Output-only] Statistics for a load job. */ | 3826 |
| 3827 /// [Output-only] Statistics for a load job. |
| 3640 JobStatistics3 load; | 3828 JobStatistics3 load; |
| 3641 /** [Output-only] Statistics for a query job. */ | 3829 |
| 3830 /// [Output-only] Statistics for a query job. |
| 3642 JobStatistics2 query; | 3831 JobStatistics2 query; |
| 3643 /** | 3832 |
| 3644 * [Output-only] Start time of this job, in milliseconds since the epoch. This | 3833 /// [Output-only] Start time of this job, in milliseconds since the epoch. |
| 3645 * field will be present when the job transitions from the PENDING state to | 3834 /// This field will be present when the job transitions from the PENDING |
| 3646 * either RUNNING or DONE. | 3835 /// state to either RUNNING or DONE. |
| 3647 */ | |
| 3648 core.String startTime; | 3836 core.String startTime; |
| 3649 /** | 3837 |
| 3650 * [Output-only] [Deprecated] Use the bytes processed in the query statistics | 3838 /// [Output-only] [Deprecated] Use the bytes processed in the query |
| 3651 * instead. | 3839 /// statistics instead. |
| 3652 */ | |
| 3653 core.String totalBytesProcessed; | 3840 core.String totalBytesProcessed; |
| 3654 | 3841 |
| 3655 JobStatistics(); | 3842 JobStatistics(); |
| 3656 | 3843 |
| 3657 JobStatistics.fromJson(core.Map _json) { | 3844 JobStatistics.fromJson(core.Map _json) { |
| 3658 if (_json.containsKey("creationTime")) { | 3845 if (_json.containsKey("creationTime")) { |
| 3659 creationTime = _json["creationTime"]; | 3846 creationTime = _json["creationTime"]; |
| 3660 } | 3847 } |
| 3661 if (_json.containsKey("endTime")) { | 3848 if (_json.containsKey("endTime")) { |
| 3662 endTime = _json["endTime"]; | 3849 endTime = _json["endTime"]; |
| 3663 } | 3850 } |
| 3664 if (_json.containsKey("extract")) { | 3851 if (_json.containsKey("extract")) { |
| 3665 extract = new JobStatistics4.fromJson(_json["extract"]); | 3852 extract = new JobStatistics4.fromJson(_json["extract"]); |
| 3666 } | 3853 } |
| 3667 if (_json.containsKey("load")) { | 3854 if (_json.containsKey("load")) { |
| 3668 load = new JobStatistics3.fromJson(_json["load"]); | 3855 load = new JobStatistics3.fromJson(_json["load"]); |
| 3669 } | 3856 } |
| 3670 if (_json.containsKey("query")) { | 3857 if (_json.containsKey("query")) { |
| 3671 query = new JobStatistics2.fromJson(_json["query"]); | 3858 query = new JobStatistics2.fromJson(_json["query"]); |
| 3672 } | 3859 } |
| 3673 if (_json.containsKey("startTime")) { | 3860 if (_json.containsKey("startTime")) { |
| 3674 startTime = _json["startTime"]; | 3861 startTime = _json["startTime"]; |
| 3675 } | 3862 } |
| 3676 if (_json.containsKey("totalBytesProcessed")) { | 3863 if (_json.containsKey("totalBytesProcessed")) { |
| 3677 totalBytesProcessed = _json["totalBytesProcessed"]; | 3864 totalBytesProcessed = _json["totalBytesProcessed"]; |
| 3678 } | 3865 } |
| 3679 } | 3866 } |
| 3680 | 3867 |
| 3681 core.Map<core.String, core.Object> toJson() { | 3868 core.Map<core.String, core.Object> toJson() { |
| 3682 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 3869 final core.Map<core.String, core.Object> _json = |
| 3870 new core.Map<core.String, core.Object>(); |
| 3683 if (creationTime != null) { | 3871 if (creationTime != null) { |
| 3684 _json["creationTime"] = creationTime; | 3872 _json["creationTime"] = creationTime; |
| 3685 } | 3873 } |
| 3686 if (endTime != null) { | 3874 if (endTime != null) { |
| 3687 _json["endTime"] = endTime; | 3875 _json["endTime"] = endTime; |
| 3688 } | 3876 } |
| 3689 if (extract != null) { | 3877 if (extract != null) { |
| 3690 _json["extract"] = (extract).toJson(); | 3878 _json["extract"] = (extract).toJson(); |
| 3691 } | 3879 } |
| 3692 if (load != null) { | 3880 if (load != null) { |
| 3693 _json["load"] = (load).toJson(); | 3881 _json["load"] = (load).toJson(); |
| 3694 } | 3882 } |
| 3695 if (query != null) { | 3883 if (query != null) { |
| 3696 _json["query"] = (query).toJson(); | 3884 _json["query"] = (query).toJson(); |
| 3697 } | 3885 } |
| 3698 if (startTime != null) { | 3886 if (startTime != null) { |
| 3699 _json["startTime"] = startTime; | 3887 _json["startTime"] = startTime; |
| 3700 } | 3888 } |
| 3701 if (totalBytesProcessed != null) { | 3889 if (totalBytesProcessed != null) { |
| 3702 _json["totalBytesProcessed"] = totalBytesProcessed; | 3890 _json["totalBytesProcessed"] = totalBytesProcessed; |
| 3703 } | 3891 } |
| 3704 return _json; | 3892 return _json; |
| 3705 } | 3893 } |
| 3706 } | 3894 } |
| 3707 | 3895 |
| 3708 class JobStatistics2 { | 3896 class JobStatistics2 { |
| 3709 /** [Output-only] Billing tier for the job. */ | 3897 /// [Output-only] Billing tier for the job. |
| 3710 core.int billingTier; | 3898 core.int billingTier; |
| 3711 /** | 3899 |
| 3712 * [Output-only] Whether the query result was fetched from the query cache. | 3900 /// [Output-only] Whether the query result was fetched from the query cache. |
| 3713 */ | |
| 3714 core.bool cacheHit; | 3901 core.bool cacheHit; |
| 3715 /** | 3902 |
| 3716 * [Output-only] The number of rows affected by a DML statement. Present only | 3903 /// [Output-only] The number of rows affected by a DML statement. Present |
| 3717 * for DML statements INSERT, UPDATE or DELETE. | 3904 /// only for DML statements INSERT, UPDATE or DELETE. |
| 3718 */ | |
| 3719 core.String numDmlAffectedRows; | 3905 core.String numDmlAffectedRows; |
| 3720 /** [Output-only] Describes execution plan for the query. */ | 3906 |
| 3907 /// [Output-only] Describes execution plan for the query. |
| 3721 core.List<ExplainQueryStage> queryPlan; | 3908 core.List<ExplainQueryStage> queryPlan; |
| 3722 /** | 3909 |
| 3723 * [Output-only, Experimental] Referenced tables for the job. Queries that | 3910 /// [Output-only, Experimental] Referenced tables for the job. Queries that |
| 3724 * reference more than 50 tables will not have a complete list. | 3911 /// reference more than 50 tables will not have a complete list. |
| 3725 */ | |
| 3726 core.List<TableReference> referencedTables; | 3912 core.List<TableReference> referencedTables; |
| 3727 /** | 3913 |
| 3728 * [Output-only, Experimental] The schema of the results. Present only for | 3914 /// [Output-only, Experimental] The schema of the results. Present only for |
| 3729 * successful dry run of non-legacy SQL queries. | 3915 /// successful dry run of non-legacy SQL queries. |
| 3730 */ | |
| 3731 TableSchema schema; | 3916 TableSchema schema; |
| 3732 /** [Output-only, Experimental] The type of query statement, if valid. */ | 3917 |
| 3918 /// [Output-only, Experimental] The type of query statement, if valid. |
| 3733 core.String statementType; | 3919 core.String statementType; |
| 3734 /** [Output-only] Total bytes billed for the job. */ | 3920 |
| 3921 /// [Output-only] Total bytes billed for the job. |
| 3735 core.String totalBytesBilled; | 3922 core.String totalBytesBilled; |
| 3736 /** [Output-only] Total bytes processed for the job. */ | 3923 |
| 3924 /// [Output-only] Total bytes processed for the job. |
| 3737 core.String totalBytesProcessed; | 3925 core.String totalBytesProcessed; |
| 3738 /** | 3926 |
| 3739 * [Output-only, Experimental] Standard SQL only: list of undeclared query | 3927 /// [Output-only, Experimental] Standard SQL only: list of undeclared query |
| 3740 * parameters detected during a dry run validation. | 3928 /// parameters detected during a dry run validation. |
| 3741 */ | |
| 3742 core.List<QueryParameter> undeclaredQueryParameters; | 3929 core.List<QueryParameter> undeclaredQueryParameters; |
| 3743 | 3930 |
| 3744 JobStatistics2(); | 3931 JobStatistics2(); |
| 3745 | 3932 |
| 3746 JobStatistics2.fromJson(core.Map _json) { | 3933 JobStatistics2.fromJson(core.Map _json) { |
| 3747 if (_json.containsKey("billingTier")) { | 3934 if (_json.containsKey("billingTier")) { |
| 3748 billingTier = _json["billingTier"]; | 3935 billingTier = _json["billingTier"]; |
| 3749 } | 3936 } |
| 3750 if (_json.containsKey("cacheHit")) { | 3937 if (_json.containsKey("cacheHit")) { |
| 3751 cacheHit = _json["cacheHit"]; | 3938 cacheHit = _json["cacheHit"]; |
| 3752 } | 3939 } |
| 3753 if (_json.containsKey("numDmlAffectedRows")) { | 3940 if (_json.containsKey("numDmlAffectedRows")) { |
| 3754 numDmlAffectedRows = _json["numDmlAffectedRows"]; | 3941 numDmlAffectedRows = _json["numDmlAffectedRows"]; |
| 3755 } | 3942 } |
| 3756 if (_json.containsKey("queryPlan")) { | 3943 if (_json.containsKey("queryPlan")) { |
| 3757 queryPlan = _json["queryPlan"].map((value) => new ExplainQueryStage.fromJs
on(value)).toList(); | 3944 queryPlan = _json["queryPlan"] |
| 3945 .map((value) => new ExplainQueryStage.fromJson(value)) |
| 3946 .toList(); |
| 3758 } | 3947 } |
| 3759 if (_json.containsKey("referencedTables")) { | 3948 if (_json.containsKey("referencedTables")) { |
| 3760 referencedTables = _json["referencedTables"].map((value) => new TableRefer
ence.fromJson(value)).toList(); | 3949 referencedTables = _json["referencedTables"] |
| 3950 .map((value) => new TableReference.fromJson(value)) |
| 3951 .toList(); |
| 3761 } | 3952 } |
| 3762 if (_json.containsKey("schema")) { | 3953 if (_json.containsKey("schema")) { |
| 3763 schema = new TableSchema.fromJson(_json["schema"]); | 3954 schema = new TableSchema.fromJson(_json["schema"]); |
| 3764 } | 3955 } |
| 3765 if (_json.containsKey("statementType")) { | 3956 if (_json.containsKey("statementType")) { |
| 3766 statementType = _json["statementType"]; | 3957 statementType = _json["statementType"]; |
| 3767 } | 3958 } |
| 3768 if (_json.containsKey("totalBytesBilled")) { | 3959 if (_json.containsKey("totalBytesBilled")) { |
| 3769 totalBytesBilled = _json["totalBytesBilled"]; | 3960 totalBytesBilled = _json["totalBytesBilled"]; |
| 3770 } | 3961 } |
| 3771 if (_json.containsKey("totalBytesProcessed")) { | 3962 if (_json.containsKey("totalBytesProcessed")) { |
| 3772 totalBytesProcessed = _json["totalBytesProcessed"]; | 3963 totalBytesProcessed = _json["totalBytesProcessed"]; |
| 3773 } | 3964 } |
| 3774 if (_json.containsKey("undeclaredQueryParameters")) { | 3965 if (_json.containsKey("undeclaredQueryParameters")) { |
| 3775 undeclaredQueryParameters = _json["undeclaredQueryParameters"].map((value)
=> new QueryParameter.fromJson(value)).toList(); | 3966 undeclaredQueryParameters = _json["undeclaredQueryParameters"] |
| 3967 .map((value) => new QueryParameter.fromJson(value)) |
| 3968 .toList(); |
| 3776 } | 3969 } |
| 3777 } | 3970 } |
| 3778 | 3971 |
| 3779 core.Map<core.String, core.Object> toJson() { | 3972 core.Map<core.String, core.Object> toJson() { |
| 3780 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 3973 final core.Map<core.String, core.Object> _json = |
| 3974 new core.Map<core.String, core.Object>(); |
| 3781 if (billingTier != null) { | 3975 if (billingTier != null) { |
| 3782 _json["billingTier"] = billingTier; | 3976 _json["billingTier"] = billingTier; |
| 3783 } | 3977 } |
| 3784 if (cacheHit != null) { | 3978 if (cacheHit != null) { |
| 3785 _json["cacheHit"] = cacheHit; | 3979 _json["cacheHit"] = cacheHit; |
| 3786 } | 3980 } |
| 3787 if (numDmlAffectedRows != null) { | 3981 if (numDmlAffectedRows != null) { |
| 3788 _json["numDmlAffectedRows"] = numDmlAffectedRows; | 3982 _json["numDmlAffectedRows"] = numDmlAffectedRows; |
| 3789 } | 3983 } |
| 3790 if (queryPlan != null) { | 3984 if (queryPlan != null) { |
| 3791 _json["queryPlan"] = queryPlan.map((value) => (value).toJson()).toList(); | 3985 _json["queryPlan"] = queryPlan.map((value) => (value).toJson()).toList(); |
| 3792 } | 3986 } |
| 3793 if (referencedTables != null) { | 3987 if (referencedTables != null) { |
| 3794 _json["referencedTables"] = referencedTables.map((value) => (value).toJson
()).toList(); | 3988 _json["referencedTables"] = |
| 3989 referencedTables.map((value) => (value).toJson()).toList(); |
| 3795 } | 3990 } |
| 3796 if (schema != null) { | 3991 if (schema != null) { |
| 3797 _json["schema"] = (schema).toJson(); | 3992 _json["schema"] = (schema).toJson(); |
| 3798 } | 3993 } |
| 3799 if (statementType != null) { | 3994 if (statementType != null) { |
| 3800 _json["statementType"] = statementType; | 3995 _json["statementType"] = statementType; |
| 3801 } | 3996 } |
| 3802 if (totalBytesBilled != null) { | 3997 if (totalBytesBilled != null) { |
| 3803 _json["totalBytesBilled"] = totalBytesBilled; | 3998 _json["totalBytesBilled"] = totalBytesBilled; |
| 3804 } | 3999 } |
| 3805 if (totalBytesProcessed != null) { | 4000 if (totalBytesProcessed != null) { |
| 3806 _json["totalBytesProcessed"] = totalBytesProcessed; | 4001 _json["totalBytesProcessed"] = totalBytesProcessed; |
| 3807 } | 4002 } |
| 3808 if (undeclaredQueryParameters != null) { | 4003 if (undeclaredQueryParameters != null) { |
| 3809 _json["undeclaredQueryParameters"] = undeclaredQueryParameters.map((value)
=> (value).toJson()).toList(); | 4004 _json["undeclaredQueryParameters"] = |
| 4005 undeclaredQueryParameters.map((value) => (value).toJson()).toList(); |
| 3810 } | 4006 } |
| 3811 return _json; | 4007 return _json; |
| 3812 } | 4008 } |
| 3813 } | 4009 } |
| 3814 | 4010 |
| 3815 class JobStatistics3 { | 4011 class JobStatistics3 { |
| 3816 /** | 4012 /// [Output-only] The number of bad records encountered. Note that if the job |
| 3817 * [Output-only] The number of bad records encountered. Note that if the job | 4013 /// has failed because of more bad records encountered than the maximum |
| 3818 * has failed because of more bad records encountered than the maximum allowed | 4014 /// allowed in the load job configuration, then this number can be less than |
| 3819 * in the load job configuration, then this number can be less than the total | 4015 /// the total number of bad records present in the input data. |
| 3820 * number of bad records present in the input data. | |
| 3821 */ | |
| 3822 core.String badRecords; | 4016 core.String badRecords; |
| 3823 /** [Output-only] Number of bytes of source data in a load job. */ | 4017 |
| 4018 /// [Output-only] Number of bytes of source data in a load job. |
| 3824 core.String inputFileBytes; | 4019 core.String inputFileBytes; |
| 3825 /** [Output-only] Number of source files in a load job. */ | 4020 |
| 4021 /// [Output-only] Number of source files in a load job. |
| 3826 core.String inputFiles; | 4022 core.String inputFiles; |
| 3827 /** | 4023 |
| 3828 * [Output-only] Size of the loaded data in bytes. Note that while a load job | 4024 /// [Output-only] Size of the loaded data in bytes. Note that while a load |
| 3829 * is in the running state, this value may change. | 4025 /// job is in the running state, this value may change. |
| 3830 */ | |
| 3831 core.String outputBytes; | 4026 core.String outputBytes; |
| 3832 /** | 4027 |
| 3833 * [Output-only] Number of rows imported in a load job. Note that while an | 4028 /// [Output-only] Number of rows imported in a load job. Note that while an |
| 3834 * import job is in the running state, this value may change. | 4029 /// import job is in the running state, this value may change. |
| 3835 */ | |
| 3836 core.String outputRows; | 4030 core.String outputRows; |
| 3837 | 4031 |
| 3838 JobStatistics3(); | 4032 JobStatistics3(); |
| 3839 | 4033 |
| 3840 JobStatistics3.fromJson(core.Map _json) { | 4034 JobStatistics3.fromJson(core.Map _json) { |
| 3841 if (_json.containsKey("badRecords")) { | 4035 if (_json.containsKey("badRecords")) { |
| 3842 badRecords = _json["badRecords"]; | 4036 badRecords = _json["badRecords"]; |
| 3843 } | 4037 } |
| 3844 if (_json.containsKey("inputFileBytes")) { | 4038 if (_json.containsKey("inputFileBytes")) { |
| 3845 inputFileBytes = _json["inputFileBytes"]; | 4039 inputFileBytes = _json["inputFileBytes"]; |
| 3846 } | 4040 } |
| 3847 if (_json.containsKey("inputFiles")) { | 4041 if (_json.containsKey("inputFiles")) { |
| 3848 inputFiles = _json["inputFiles"]; | 4042 inputFiles = _json["inputFiles"]; |
| 3849 } | 4043 } |
| 3850 if (_json.containsKey("outputBytes")) { | 4044 if (_json.containsKey("outputBytes")) { |
| 3851 outputBytes = _json["outputBytes"]; | 4045 outputBytes = _json["outputBytes"]; |
| 3852 } | 4046 } |
| 3853 if (_json.containsKey("outputRows")) { | 4047 if (_json.containsKey("outputRows")) { |
| 3854 outputRows = _json["outputRows"]; | 4048 outputRows = _json["outputRows"]; |
| 3855 } | 4049 } |
| 3856 } | 4050 } |
| 3857 | 4051 |
| 3858 core.Map<core.String, core.Object> toJson() { | 4052 core.Map<core.String, core.Object> toJson() { |
| 3859 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 4053 final core.Map<core.String, core.Object> _json = |
| 4054 new core.Map<core.String, core.Object>(); |
| 3860 if (badRecords != null) { | 4055 if (badRecords != null) { |
| 3861 _json["badRecords"] = badRecords; | 4056 _json["badRecords"] = badRecords; |
| 3862 } | 4057 } |
| 3863 if (inputFileBytes != null) { | 4058 if (inputFileBytes != null) { |
| 3864 _json["inputFileBytes"] = inputFileBytes; | 4059 _json["inputFileBytes"] = inputFileBytes; |
| 3865 } | 4060 } |
| 3866 if (inputFiles != null) { | 4061 if (inputFiles != null) { |
| 3867 _json["inputFiles"] = inputFiles; | 4062 _json["inputFiles"] = inputFiles; |
| 3868 } | 4063 } |
| 3869 if (outputBytes != null) { | 4064 if (outputBytes != null) { |
| 3870 _json["outputBytes"] = outputBytes; | 4065 _json["outputBytes"] = outputBytes; |
| 3871 } | 4066 } |
| 3872 if (outputRows != null) { | 4067 if (outputRows != null) { |
| 3873 _json["outputRows"] = outputRows; | 4068 _json["outputRows"] = outputRows; |
| 3874 } | 4069 } |
| 3875 return _json; | 4070 return _json; |
| 3876 } | 4071 } |
| 3877 } | 4072 } |
| 3878 | 4073 |
| 3879 class JobStatistics4 { | 4074 class JobStatistics4 { |
| 3880 /** | 4075 /// [Output-only] Number of files per destination URI or URI pattern |
| 3881 * [Output-only] Number of files per destination URI or URI pattern specified | 4076 /// specified in the extract configuration. These values will be in the same |
| 3882 * in the extract configuration. These values will be in the same order as the | 4077 /// order as the URIs specified in the 'destinationUris' field. |
| 3883 * URIs specified in the 'destinationUris' field. | |
| 3884 */ | |
| 3885 core.List<core.String> destinationUriFileCounts; | 4078 core.List<core.String> destinationUriFileCounts; |
| 3886 | 4079 |
| 3887 JobStatistics4(); | 4080 JobStatistics4(); |
| 3888 | 4081 |
| 3889 JobStatistics4.fromJson(core.Map _json) { | 4082 JobStatistics4.fromJson(core.Map _json) { |
| 3890 if (_json.containsKey("destinationUriFileCounts")) { | 4083 if (_json.containsKey("destinationUriFileCounts")) { |
| 3891 destinationUriFileCounts = _json["destinationUriFileCounts"]; | 4084 destinationUriFileCounts = _json["destinationUriFileCounts"]; |
| 3892 } | 4085 } |
| 3893 } | 4086 } |
| 3894 | 4087 |
| 3895 core.Map<core.String, core.Object> toJson() { | 4088 core.Map<core.String, core.Object> toJson() { |
| 3896 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 4089 final core.Map<core.String, core.Object> _json = |
| 4090 new core.Map<core.String, core.Object>(); |
| 3897 if (destinationUriFileCounts != null) { | 4091 if (destinationUriFileCounts != null) { |
| 3898 _json["destinationUriFileCounts"] = destinationUriFileCounts; | 4092 _json["destinationUriFileCounts"] = destinationUriFileCounts; |
| 3899 } | 4093 } |
| 3900 return _json; | 4094 return _json; |
| 3901 } | 4095 } |
| 3902 } | 4096 } |
| 3903 | 4097 |
| 3904 class JobStatus { | 4098 class JobStatus { |
| 3905 /** | 4099 /// [Output-only] Final error result of the job. If present, indicates that |
| 3906 * [Output-only] Final error result of the job. If present, indicates that the | 4100 /// the job has completed and was unsuccessful. |
| 3907 * job has completed and was unsuccessful. | |
| 3908 */ | |
| 3909 ErrorProto errorResult; | 4101 ErrorProto errorResult; |
| 3910 /** | 4102 |
| 3911 * [Output-only] The first errors encountered during the running of the job. | 4103 /// [Output-only] The first errors encountered during the running of the job. |
| 3912 * The final message includes the number of errors that caused the process to | 4104 /// The final message includes the number of errors that caused the process |
| 3913 * stop. Errors here do not necessarily mean that the job has completed or was | 4105 /// to stop. Errors here do not necessarily mean that the job has completed |
| 3914 * unsuccessful. | 4106 /// or was unsuccessful. |
| 3915 */ | |
| 3916 core.List<ErrorProto> errors; | 4107 core.List<ErrorProto> errors; |
| 3917 /** [Output-only] Running state of the job. */ | 4108 |
| 4109 /// [Output-only] Running state of the job. |
| 3918 core.String state; | 4110 core.String state; |
| 3919 | 4111 |
| 3920 JobStatus(); | 4112 JobStatus(); |
| 3921 | 4113 |
| 3922 JobStatus.fromJson(core.Map _json) { | 4114 JobStatus.fromJson(core.Map _json) { |
| 3923 if (_json.containsKey("errorResult")) { | 4115 if (_json.containsKey("errorResult")) { |
| 3924 errorResult = new ErrorProto.fromJson(_json["errorResult"]); | 4116 errorResult = new ErrorProto.fromJson(_json["errorResult"]); |
| 3925 } | 4117 } |
| 3926 if (_json.containsKey("errors")) { | 4118 if (_json.containsKey("errors")) { |
| 3927 errors = _json["errors"].map((value) => new ErrorProto.fromJson(value)).to
List(); | 4119 errors = _json["errors"] |
| 4120 .map((value) => new ErrorProto.fromJson(value)) |
| 4121 .toList(); |
| 3928 } | 4122 } |
| 3929 if (_json.containsKey("state")) { | 4123 if (_json.containsKey("state")) { |
| 3930 state = _json["state"]; | 4124 state = _json["state"]; |
| 3931 } | 4125 } |
| 3932 } | 4126 } |
| 3933 | 4127 |
| 3934 core.Map<core.String, core.Object> toJson() { | 4128 core.Map<core.String, core.Object> toJson() { |
| 3935 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 4129 final core.Map<core.String, core.Object> _json = |
| 4130 new core.Map<core.String, core.Object>(); |
| 3936 if (errorResult != null) { | 4131 if (errorResult != null) { |
| 3937 _json["errorResult"] = (errorResult).toJson(); | 4132 _json["errorResult"] = (errorResult).toJson(); |
| 3938 } | 4133 } |
| 3939 if (errors != null) { | 4134 if (errors != null) { |
| 3940 _json["errors"] = errors.map((value) => (value).toJson()).toList(); | 4135 _json["errors"] = errors.map((value) => (value).toJson()).toList(); |
| 3941 } | 4136 } |
| 3942 if (state != null) { | 4137 if (state != null) { |
| 3943 _json["state"] = state; | 4138 _json["state"] = state; |
| 3944 } | 4139 } |
| 3945 return _json; | 4140 return _json; |
| 3946 } | 4141 } |
| 3947 } | 4142 } |
| 3948 | 4143 |
| 3949 /** Represents a single JSON object. */ | 4144 /// Represents a single JSON object. |
| 3950 class JsonObject | 4145 class JsonObject extends collection.MapBase<core.String, core.Object> { |
| 3951 extends collection.MapBase<core.String, core.Object> { | |
| 3952 final core.Map _innerMap = {}; | 4146 final core.Map _innerMap = {}; |
| 3953 | 4147 |
| 3954 JsonObject(); | 4148 JsonObject(); |
| 3955 | 4149 |
| 3956 JsonObject.fromJson(core.Map _json) { | 4150 JsonObject.fromJson(core.Map _json) { |
| 3957 _json.forEach((core.String key, value) { | 4151 _json.forEach((core.String key, value) { |
| 3958 this[key] = value; | 4152 this[key] = value; |
| 3959 }); | 4153 }); |
| 3960 } | 4154 } |
| 3961 | 4155 |
| 3962 core.Map<core.String, core.Object> toJson() { | 4156 core.Map<core.String, core.Object> toJson() { |
| 3963 final core.Map<core.String, core.Object> _json = <core.String, core.Object>{
}; | 4157 final core.Map<core.String, core.Object> _json = |
| 4158 <core.String, core.Object>{}; |
| 3964 this.forEach((core.String key, value) { | 4159 this.forEach((core.String key, value) { |
| 3965 _json[key] = value; | 4160 _json[key] = value; |
| 3966 }); | 4161 }); |
| 3967 return _json; | 4162 return _json; |
| 3968 } | 4163 } |
| 3969 | 4164 |
| 3970 core.Object operator [](core.Object key) | 4165 core.Object operator [](core.Object key) => _innerMap[key]; |
| 3971 => _innerMap[key]; | |
| 3972 | 4166 |
| 3973 operator []=(core.String key, core.Object value) { | 4167 operator []=(core.String key, core.Object value) { |
| 3974 _innerMap[key] = value; | 4168 _innerMap[key] = value; |
| 3975 } | 4169 } |
| 3976 | 4170 |
| 3977 void clear() { | 4171 void clear() { |
| 3978 _innerMap.clear(); | 4172 _innerMap.clear(); |
| 3979 } | 4173 } |
| 3980 | 4174 |
| 3981 core.Iterable<core.String> get keys => _innerMap.keys; | 4175 core.Iterable<core.String> get keys => _innerMap.keys; |
| 3982 | 4176 |
| 3983 core.Object remove(core.Object key) => _innerMap.remove(key); | 4177 core.Object remove(core.Object key) => _innerMap.remove(key); |
| 3984 } | 4178 } |
| 3985 | 4179 |
| 3986 class ProjectListProjects { | 4180 class ProjectListProjects { |
| 3987 /** A descriptive name for this project. */ | 4181 /// A descriptive name for this project. |
| 3988 core.String friendlyName; | 4182 core.String friendlyName; |
| 3989 /** An opaque ID of this project. */ | 4183 |
| 4184 /// An opaque ID of this project. |
| 3990 core.String id; | 4185 core.String id; |
| 3991 /** The resource type. */ | 4186 |
| 4187 /// The resource type. |
| 3992 core.String kind; | 4188 core.String kind; |
| 3993 /** The numeric ID of this project. */ | 4189 |
| 4190 /// The numeric ID of this project. |
| 3994 core.String numericId; | 4191 core.String numericId; |
| 3995 /** A unique reference to this project. */ | 4192 |
| 4193 /// A unique reference to this project. |
| 3996 ProjectReference projectReference; | 4194 ProjectReference projectReference; |
| 3997 | 4195 |
| 3998 ProjectListProjects(); | 4196 ProjectListProjects(); |
| 3999 | 4197 |
| 4000 ProjectListProjects.fromJson(core.Map _json) { | 4198 ProjectListProjects.fromJson(core.Map _json) { |
| 4001 if (_json.containsKey("friendlyName")) { | 4199 if (_json.containsKey("friendlyName")) { |
| 4002 friendlyName = _json["friendlyName"]; | 4200 friendlyName = _json["friendlyName"]; |
| 4003 } | 4201 } |
| 4004 if (_json.containsKey("id")) { | 4202 if (_json.containsKey("id")) { |
| 4005 id = _json["id"]; | 4203 id = _json["id"]; |
| 4006 } | 4204 } |
| 4007 if (_json.containsKey("kind")) { | 4205 if (_json.containsKey("kind")) { |
| 4008 kind = _json["kind"]; | 4206 kind = _json["kind"]; |
| 4009 } | 4207 } |
| 4010 if (_json.containsKey("numericId")) { | 4208 if (_json.containsKey("numericId")) { |
| 4011 numericId = _json["numericId"]; | 4209 numericId = _json["numericId"]; |
| 4012 } | 4210 } |
| 4013 if (_json.containsKey("projectReference")) { | 4211 if (_json.containsKey("projectReference")) { |
| 4014 projectReference = new ProjectReference.fromJson(_json["projectReference"]
); | 4212 projectReference = |
| 4213 new ProjectReference.fromJson(_json["projectReference"]); |
| 4015 } | 4214 } |
| 4016 } | 4215 } |
| 4017 | 4216 |
| 4018 core.Map<core.String, core.Object> toJson() { | 4217 core.Map<core.String, core.Object> toJson() { |
| 4019 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 4218 final core.Map<core.String, core.Object> _json = |
| 4219 new core.Map<core.String, core.Object>(); |
| 4020 if (friendlyName != null) { | 4220 if (friendlyName != null) { |
| 4021 _json["friendlyName"] = friendlyName; | 4221 _json["friendlyName"] = friendlyName; |
| 4022 } | 4222 } |
| 4023 if (id != null) { | 4223 if (id != null) { |
| 4024 _json["id"] = id; | 4224 _json["id"] = id; |
| 4025 } | 4225 } |
| 4026 if (kind != null) { | 4226 if (kind != null) { |
| 4027 _json["kind"] = kind; | 4227 _json["kind"] = kind; |
| 4028 } | 4228 } |
| 4029 if (numericId != null) { | 4229 if (numericId != null) { |
| 4030 _json["numericId"] = numericId; | 4230 _json["numericId"] = numericId; |
| 4031 } | 4231 } |
| 4032 if (projectReference != null) { | 4232 if (projectReference != null) { |
| 4033 _json["projectReference"] = (projectReference).toJson(); | 4233 _json["projectReference"] = (projectReference).toJson(); |
| 4034 } | 4234 } |
| 4035 return _json; | 4235 return _json; |
| 4036 } | 4236 } |
| 4037 } | 4237 } |
| 4038 | 4238 |
| 4039 class ProjectList { | 4239 class ProjectList { |
| 4040 /** A hash of the page of results */ | 4240 /// A hash of the page of results |
| 4041 core.String etag; | 4241 core.String etag; |
| 4042 /** The type of list. */ | 4242 |
| 4243 /// The type of list. |
| 4043 core.String kind; | 4244 core.String kind; |
| 4044 /** A token to request the next page of results. */ | 4245 |
| 4246 /// A token to request the next page of results. |
| 4045 core.String nextPageToken; | 4247 core.String nextPageToken; |
| 4046 /** Projects to which you have at least READ access. */ | 4248 |
| 4249 /// Projects to which you have at least READ access. |
| 4047 core.List<ProjectListProjects> projects; | 4250 core.List<ProjectListProjects> projects; |
| 4048 /** The total number of projects in the list. */ | 4251 |
| 4252 /// The total number of projects in the list. |
| 4049 core.int totalItems; | 4253 core.int totalItems; |
| 4050 | 4254 |
| 4051 ProjectList(); | 4255 ProjectList(); |
| 4052 | 4256 |
| 4053 ProjectList.fromJson(core.Map _json) { | 4257 ProjectList.fromJson(core.Map _json) { |
| 4054 if (_json.containsKey("etag")) { | 4258 if (_json.containsKey("etag")) { |
| 4055 etag = _json["etag"]; | 4259 etag = _json["etag"]; |
| 4056 } | 4260 } |
| 4057 if (_json.containsKey("kind")) { | 4261 if (_json.containsKey("kind")) { |
| 4058 kind = _json["kind"]; | 4262 kind = _json["kind"]; |
| 4059 } | 4263 } |
| 4060 if (_json.containsKey("nextPageToken")) { | 4264 if (_json.containsKey("nextPageToken")) { |
| 4061 nextPageToken = _json["nextPageToken"]; | 4265 nextPageToken = _json["nextPageToken"]; |
| 4062 } | 4266 } |
| 4063 if (_json.containsKey("projects")) { | 4267 if (_json.containsKey("projects")) { |
| 4064 projects = _json["projects"].map((value) => new ProjectListProjects.fromJs
on(value)).toList(); | 4268 projects = _json["projects"] |
| 4269 .map((value) => new ProjectListProjects.fromJson(value)) |
| 4270 .toList(); |
| 4065 } | 4271 } |
| 4066 if (_json.containsKey("totalItems")) { | 4272 if (_json.containsKey("totalItems")) { |
| 4067 totalItems = _json["totalItems"]; | 4273 totalItems = _json["totalItems"]; |
| 4068 } | 4274 } |
| 4069 } | 4275 } |
| 4070 | 4276 |
| 4071 core.Map<core.String, core.Object> toJson() { | 4277 core.Map<core.String, core.Object> toJson() { |
| 4072 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 4278 final core.Map<core.String, core.Object> _json = |
| 4279 new core.Map<core.String, core.Object>(); |
| 4073 if (etag != null) { | 4280 if (etag != null) { |
| 4074 _json["etag"] = etag; | 4281 _json["etag"] = etag; |
| 4075 } | 4282 } |
| 4076 if (kind != null) { | 4283 if (kind != null) { |
| 4077 _json["kind"] = kind; | 4284 _json["kind"] = kind; |
| 4078 } | 4285 } |
| 4079 if (nextPageToken != null) { | 4286 if (nextPageToken != null) { |
| 4080 _json["nextPageToken"] = nextPageToken; | 4287 _json["nextPageToken"] = nextPageToken; |
| 4081 } | 4288 } |
| 4082 if (projects != null) { | 4289 if (projects != null) { |
| 4083 _json["projects"] = projects.map((value) => (value).toJson()).toList(); | 4290 _json["projects"] = projects.map((value) => (value).toJson()).toList(); |
| 4084 } | 4291 } |
| 4085 if (totalItems != null) { | 4292 if (totalItems != null) { |
| 4086 _json["totalItems"] = totalItems; | 4293 _json["totalItems"] = totalItems; |
| 4087 } | 4294 } |
| 4088 return _json; | 4295 return _json; |
| 4089 } | 4296 } |
| 4090 } | 4297 } |
| 4091 | 4298 |
| 4092 class ProjectReference { | 4299 class ProjectReference { |
| 4093 /** | 4300 /// [Required] ID of the project. Can be either the numeric ID or the |
| 4094 * [Required] ID of the project. Can be either the numeric ID or the assigned | 4301 /// assigned ID of the project. |
| 4095 * ID of the project. | |
| 4096 */ | |
| 4097 core.String projectId; | 4302 core.String projectId; |
| 4098 | 4303 |
| 4099 ProjectReference(); | 4304 ProjectReference(); |
| 4100 | 4305 |
| 4101 ProjectReference.fromJson(core.Map _json) { | 4306 ProjectReference.fromJson(core.Map _json) { |
| 4102 if (_json.containsKey("projectId")) { | 4307 if (_json.containsKey("projectId")) { |
| 4103 projectId = _json["projectId"]; | 4308 projectId = _json["projectId"]; |
| 4104 } | 4309 } |
| 4105 } | 4310 } |
| 4106 | 4311 |
| 4107 core.Map<core.String, core.Object> toJson() { | 4312 core.Map<core.String, core.Object> toJson() { |
| 4108 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 4313 final core.Map<core.String, core.Object> _json = |
| 4314 new core.Map<core.String, core.Object>(); |
| 4109 if (projectId != null) { | 4315 if (projectId != null) { |
| 4110 _json["projectId"] = projectId; | 4316 _json["projectId"] = projectId; |
| 4111 } | 4317 } |
| 4112 return _json; | 4318 return _json; |
| 4113 } | 4319 } |
| 4114 } | 4320 } |
| 4115 | 4321 |
| 4116 class QueryParameter { | 4322 class QueryParameter { |
| 4117 /** | 4323 /// [Optional] If unset, this is a positional parameter. Otherwise, should be |
| 4118 * [Optional] If unset, this is a positional parameter. Otherwise, should be | 4324 /// unique within a query. |
| 4119 * unique within a query. | |
| 4120 */ | |
| 4121 core.String name; | 4325 core.String name; |
| 4122 /** [Required] The type of this parameter. */ | 4326 |
| 4327 /// [Required] The type of this parameter. |
| 4123 QueryParameterType parameterType; | 4328 QueryParameterType parameterType; |
| 4124 /** [Required] The value of this parameter. */ | 4329 |
| 4330 /// [Required] The value of this parameter. |
| 4125 QueryParameterValue parameterValue; | 4331 QueryParameterValue parameterValue; |
| 4126 | 4332 |
| 4127 QueryParameter(); | 4333 QueryParameter(); |
| 4128 | 4334 |
| 4129 QueryParameter.fromJson(core.Map _json) { | 4335 QueryParameter.fromJson(core.Map _json) { |
| 4130 if (_json.containsKey("name")) { | 4336 if (_json.containsKey("name")) { |
| 4131 name = _json["name"]; | 4337 name = _json["name"]; |
| 4132 } | 4338 } |
| 4133 if (_json.containsKey("parameterType")) { | 4339 if (_json.containsKey("parameterType")) { |
| 4134 parameterType = new QueryParameterType.fromJson(_json["parameterType"]); | 4340 parameterType = new QueryParameterType.fromJson(_json["parameterType"]); |
| 4135 } | 4341 } |
| 4136 if (_json.containsKey("parameterValue")) { | 4342 if (_json.containsKey("parameterValue")) { |
| 4137 parameterValue = new QueryParameterValue.fromJson(_json["parameterValue"])
; | 4343 parameterValue = |
| 4344 new QueryParameterValue.fromJson(_json["parameterValue"]); |
| 4138 } | 4345 } |
| 4139 } | 4346 } |
| 4140 | 4347 |
| 4141 core.Map<core.String, core.Object> toJson() { | 4348 core.Map<core.String, core.Object> toJson() { |
| 4142 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 4349 final core.Map<core.String, core.Object> _json = |
| 4350 new core.Map<core.String, core.Object>(); |
| 4143 if (name != null) { | 4351 if (name != null) { |
| 4144 _json["name"] = name; | 4352 _json["name"] = name; |
| 4145 } | 4353 } |
| 4146 if (parameterType != null) { | 4354 if (parameterType != null) { |
| 4147 _json["parameterType"] = (parameterType).toJson(); | 4355 _json["parameterType"] = (parameterType).toJson(); |
| 4148 } | 4356 } |
| 4149 if (parameterValue != null) { | 4357 if (parameterValue != null) { |
| 4150 _json["parameterValue"] = (parameterValue).toJson(); | 4358 _json["parameterValue"] = (parameterValue).toJson(); |
| 4151 } | 4359 } |
| 4152 return _json; | 4360 return _json; |
| 4153 } | 4361 } |
| 4154 } | 4362 } |
| 4155 | 4363 |
| 4156 class QueryParameterTypeStructTypes { | 4364 class QueryParameterTypeStructTypes { |
| 4157 /** [Optional] Human-oriented description of the field. */ | 4365 /// [Optional] Human-oriented description of the field. |
| 4158 core.String description; | 4366 core.String description; |
| 4159 /** [Optional] The name of this field. */ | 4367 |
| 4368 /// [Optional] The name of this field. |
| 4160 core.String name; | 4369 core.String name; |
| 4161 /** [Required] The type of this field. */ | 4370 |
| 4371 /// [Required] The type of this field. |
| 4162 QueryParameterType type; | 4372 QueryParameterType type; |
| 4163 | 4373 |
| 4164 QueryParameterTypeStructTypes(); | 4374 QueryParameterTypeStructTypes(); |
| 4165 | 4375 |
| 4166 QueryParameterTypeStructTypes.fromJson(core.Map _json) { | 4376 QueryParameterTypeStructTypes.fromJson(core.Map _json) { |
| 4167 if (_json.containsKey("description")) { | 4377 if (_json.containsKey("description")) { |
| 4168 description = _json["description"]; | 4378 description = _json["description"]; |
| 4169 } | 4379 } |
| 4170 if (_json.containsKey("name")) { | 4380 if (_json.containsKey("name")) { |
| 4171 name = _json["name"]; | 4381 name = _json["name"]; |
| 4172 } | 4382 } |
| 4173 if (_json.containsKey("type")) { | 4383 if (_json.containsKey("type")) { |
| 4174 type = new QueryParameterType.fromJson(_json["type"]); | 4384 type = new QueryParameterType.fromJson(_json["type"]); |
| 4175 } | 4385 } |
| 4176 } | 4386 } |
| 4177 | 4387 |
| 4178 core.Map<core.String, core.Object> toJson() { | 4388 core.Map<core.String, core.Object> toJson() { |
| 4179 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 4389 final core.Map<core.String, core.Object> _json = |
| 4390 new core.Map<core.String, core.Object>(); |
| 4180 if (description != null) { | 4391 if (description != null) { |
| 4181 _json["description"] = description; | 4392 _json["description"] = description; |
| 4182 } | 4393 } |
| 4183 if (name != null) { | 4394 if (name != null) { |
| 4184 _json["name"] = name; | 4395 _json["name"] = name; |
| 4185 } | 4396 } |
| 4186 if (type != null) { | 4397 if (type != null) { |
| 4187 _json["type"] = (type).toJson(); | 4398 _json["type"] = (type).toJson(); |
| 4188 } | 4399 } |
| 4189 return _json; | 4400 return _json; |
| 4190 } | 4401 } |
| 4191 } | 4402 } |
| 4192 | 4403 |
| 4193 class QueryParameterType { | 4404 class QueryParameterType { |
| 4194 /** [Optional] The type of the array's elements, if this is an array. */ | 4405 /// [Optional] The type of the array's elements, if this is an array. |
| 4195 QueryParameterType arrayType; | 4406 QueryParameterType arrayType; |
| 4196 /** | 4407 |
| 4197 * [Optional] The types of the fields of this struct, in order, if this is a | 4408 /// [Optional] The types of the fields of this struct, in order, if this is a |
| 4198 * struct. | 4409 /// struct. |
| 4199 */ | |
| 4200 core.List<QueryParameterTypeStructTypes> structTypes; | 4410 core.List<QueryParameterTypeStructTypes> structTypes; |
| 4201 /** [Required] The top level type of this field. */ | 4411 |
| 4412 /// [Required] The top level type of this field. |
| 4202 core.String type; | 4413 core.String type; |
| 4203 | 4414 |
| 4204 QueryParameterType(); | 4415 QueryParameterType(); |
| 4205 | 4416 |
| 4206 QueryParameterType.fromJson(core.Map _json) { | 4417 QueryParameterType.fromJson(core.Map _json) { |
| 4207 if (_json.containsKey("arrayType")) { | 4418 if (_json.containsKey("arrayType")) { |
| 4208 arrayType = new QueryParameterType.fromJson(_json["arrayType"]); | 4419 arrayType = new QueryParameterType.fromJson(_json["arrayType"]); |
| 4209 } | 4420 } |
| 4210 if (_json.containsKey("structTypes")) { | 4421 if (_json.containsKey("structTypes")) { |
| 4211 structTypes = _json["structTypes"].map((value) => new QueryParameterTypeSt
ructTypes.fromJson(value)).toList(); | 4422 structTypes = _json["structTypes"] |
| 4423 .map((value) => new QueryParameterTypeStructTypes.fromJson(value)) |
| 4424 .toList(); |
| 4212 } | 4425 } |
| 4213 if (_json.containsKey("type")) { | 4426 if (_json.containsKey("type")) { |
| 4214 type = _json["type"]; | 4427 type = _json["type"]; |
| 4215 } | 4428 } |
| 4216 } | 4429 } |
| 4217 | 4430 |
| 4218 core.Map<core.String, core.Object> toJson() { | 4431 core.Map<core.String, core.Object> toJson() { |
| 4219 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 4432 final core.Map<core.String, core.Object> _json = |
| 4433 new core.Map<core.String, core.Object>(); |
| 4220 if (arrayType != null) { | 4434 if (arrayType != null) { |
| 4221 _json["arrayType"] = (arrayType).toJson(); | 4435 _json["arrayType"] = (arrayType).toJson(); |
| 4222 } | 4436 } |
| 4223 if (structTypes != null) { | 4437 if (structTypes != null) { |
| 4224 _json["structTypes"] = structTypes.map((value) => (value).toJson()).toList
(); | 4438 _json["structTypes"] = |
| 4439 structTypes.map((value) => (value).toJson()).toList(); |
| 4225 } | 4440 } |
| 4226 if (type != null) { | 4441 if (type != null) { |
| 4227 _json["type"] = type; | 4442 _json["type"] = type; |
| 4228 } | 4443 } |
| 4229 return _json; | 4444 return _json; |
| 4230 } | 4445 } |
| 4231 } | 4446 } |
| 4232 | 4447 |
| 4233 class QueryParameterValue { | 4448 class QueryParameterValue { |
| 4234 /** [Optional] The array values, if this is an array type. */ | 4449 /// [Optional] The array values, if this is an array type. |
| 4235 core.List<QueryParameterValue> arrayValues; | 4450 core.List<QueryParameterValue> arrayValues; |
| 4236 /** | 4451 |
| 4237 * [Optional] The struct field values, in order of the struct type's | 4452 /// [Optional] The struct field values, in order of the struct type's |
| 4238 * declaration. | 4453 /// declaration. |
| 4239 */ | |
| 4240 core.Map<core.String, QueryParameterValue> structValues; | 4454 core.Map<core.String, QueryParameterValue> structValues; |
| 4241 /** [Optional] The value of this value, if a simple scalar type. */ | 4455 |
| 4456 /// [Optional] The value of this value, if a simple scalar type. |
| 4242 core.String value; | 4457 core.String value; |
| 4243 | 4458 |
| 4244 QueryParameterValue(); | 4459 QueryParameterValue(); |
| 4245 | 4460 |
| 4246 QueryParameterValue.fromJson(core.Map _json) { | 4461 QueryParameterValue.fromJson(core.Map _json) { |
| 4247 if (_json.containsKey("arrayValues")) { | 4462 if (_json.containsKey("arrayValues")) { |
| 4248 arrayValues = _json["arrayValues"].map((value) => new QueryParameterValue.
fromJson(value)).toList(); | 4463 arrayValues = _json["arrayValues"] |
| 4464 .map((value) => new QueryParameterValue.fromJson(value)) |
| 4465 .toList(); |
| 4249 } | 4466 } |
| 4250 if (_json.containsKey("structValues")) { | 4467 if (_json.containsKey("structValues")) { |
| 4251 structValues = commons.mapMap<core.Map<core.String, core.Object>, QueryPar
ameterValue>(_json["structValues"], (core.Map<core.String, core.Object> item) =>
new QueryParameterValue.fromJson(item)); | 4468 structValues = commons |
| 4469 .mapMap<core.Map<core.String, core.Object>, QueryParameterValue>( |
| 4470 _json["structValues"], |
| 4471 (core.Map<core.String, core.Object> item) => |
| 4472 new QueryParameterValue.fromJson(item)); |
| 4252 } | 4473 } |
| 4253 if (_json.containsKey("value")) { | 4474 if (_json.containsKey("value")) { |
| 4254 value = _json["value"]; | 4475 value = _json["value"]; |
| 4255 } | 4476 } |
| 4256 } | 4477 } |
| 4257 | 4478 |
| 4258 core.Map<core.String, core.Object> toJson() { | 4479 core.Map<core.String, core.Object> toJson() { |
| 4259 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 4480 final core.Map<core.String, core.Object> _json = |
| 4481 new core.Map<core.String, core.Object>(); |
| 4260 if (arrayValues != null) { | 4482 if (arrayValues != null) { |
| 4261 _json["arrayValues"] = arrayValues.map((value) => (value).toJson()).toList
(); | 4483 _json["arrayValues"] = |
| 4484 arrayValues.map((value) => (value).toJson()).toList(); |
| 4262 } | 4485 } |
| 4263 if (structValues != null) { | 4486 if (structValues != null) { |
| 4264 _json["structValues"] = commons.mapMap<QueryParameterValue, core.Map<core.
String, core.Object>>(structValues, (QueryParameterValue item) => (item).toJson(
)); | 4487 _json["structValues"] = commons |
| 4488 .mapMap<QueryParameterValue, core.Map<core.String, core.Object>>( |
| 4489 structValues, (QueryParameterValue item) => (item).toJson()); |
| 4265 } | 4490 } |
| 4266 if (value != null) { | 4491 if (value != null) { |
| 4267 _json["value"] = value; | 4492 _json["value"] = value; |
| 4268 } | 4493 } |
| 4269 return _json; | 4494 return _json; |
| 4270 } | 4495 } |
| 4271 } | 4496 } |
| 4272 | 4497 |
| 4273 class QueryRequest { | 4498 class QueryRequest { |
| 4274 /** | 4499 /// [Optional] Specifies the default datasetId and projectId to assume for |
| 4275 * [Optional] Specifies the default datasetId and projectId to assume for any | 4500 /// any unqualified table names in the query. If not set, all table names in |
| 4276 * unqualified table names in the query. If not set, all table names in the | 4501 /// the query string must be qualified in the format 'datasetId.tableId'. |
| 4277 * query string must be qualified in the format 'datasetId.tableId'. | |
| 4278 */ | |
| 4279 DatasetReference defaultDataset; | 4502 DatasetReference defaultDataset; |
| 4280 /** | 4503 |
| 4281 * [Optional] If set to true, BigQuery doesn't run the job. Instead, if the | 4504 /// [Optional] If set to true, BigQuery doesn't run the job. Instead, if the |
| 4282 * query is valid, BigQuery returns statistics about the job such as how many | 4505 /// query is valid, BigQuery returns statistics about the job such as how |
| 4283 * bytes would be processed. If the query is invalid, an error returns. The | 4506 /// many bytes would be processed. If the query is invalid, an error returns. |
| 4284 * default value is false. | 4507 /// The default value is false. |
| 4285 */ | |
| 4286 core.bool dryRun; | 4508 core.bool dryRun; |
| 4287 /** The resource type of the request. */ | 4509 |
| 4510 /// The resource type of the request. |
| 4288 core.String kind; | 4511 core.String kind; |
| 4289 /** | 4512 |
| 4290 * [Optional] The maximum number of rows of data to return per page of | 4513 /// [Optional] The maximum number of rows of data to return per page of |
| 4291 * results. Setting this flag to a small value such as 1000 and then paging | 4514 /// results. Setting this flag to a small value such as 1000 and then paging |
| 4292 * through results might improve reliability when the query result set is | 4515 /// through results might improve reliability when the query result set is |
| 4293 * large. In addition to this limit, responses are also limited to 10 MB. By | 4516 /// large. In addition to this limit, responses are also limited to 10 MB. By |
| 4294 * default, there is no maximum row count, and only the byte limit applies. | 4517 /// default, there is no maximum row count, and only the byte limit applies. |
| 4295 */ | |
| 4296 core.int maxResults; | 4518 core.int maxResults; |
| 4297 /** | 4519 |
| 4298 * Standard SQL only. Set to POSITIONAL to use positional (?) query parameters | 4520 /// Standard SQL only. Set to POSITIONAL to use positional (?) query |
| 4299 * or to NAMED to use named (@myparam) query parameters in this query. | 4521 /// parameters or to NAMED to use named (@myparam) query parameters in this |
| 4300 */ | 4522 /// query. |
| 4301 core.String parameterMode; | 4523 core.String parameterMode; |
| 4302 /** [Deprecated] This property is deprecated. */ | 4524 |
| 4525 /// [Deprecated] This property is deprecated. |
| 4303 core.bool preserveNulls; | 4526 core.bool preserveNulls; |
| 4304 /** | 4527 |
| 4305 * [Required] A query string, following the BigQuery query syntax, of the | 4528 /// [Required] A query string, following the BigQuery query syntax, of the |
| 4306 * query to execute. Example: "SELECT count(f1) FROM | 4529 /// query to execute. Example: "SELECT count(f1) FROM |
| 4307 * [myProjectId:myDatasetId.myTableId]". | 4530 /// [myProjectId:myDatasetId.myTableId]". |
| 4308 */ | |
| 4309 core.String query; | 4531 core.String query; |
| 4310 /** Query parameters for Standard SQL queries. */ | 4532 |
| 4533 /// Query parameters for Standard SQL queries. |
| 4311 core.List<QueryParameter> queryParameters; | 4534 core.List<QueryParameter> queryParameters; |
| 4312 /** | 4535 |
| 4313 * [Optional] How long to wait for the query to complete, in milliseconds, | 4536 /// [Optional] How long to wait for the query to complete, in milliseconds, |
| 4314 * before the request times out and returns. Note that this is only a timeout | 4537 /// before the request times out and returns. Note that this is only a |
| 4315 * for the request, not the query. If the query takes longer to run than the | 4538 /// timeout for the request, not the query. If the query takes longer to run |
| 4316 * timeout value, the call returns without any results and with the | 4539 /// than the timeout value, the call returns without any results and with the |
| 4317 * 'jobComplete' flag set to false. You can call GetQueryResults() to wait for | 4540 /// 'jobComplete' flag set to false. You can call GetQueryResults() to wait |
| 4318 * the query to complete and read the results. The default value is 10000 | 4541 /// for the query to complete and read the results. The default value is |
| 4319 * milliseconds (10 seconds). | 4542 /// 10000 milliseconds (10 seconds). |
| 4320 */ | |
| 4321 core.int timeoutMs; | 4543 core.int timeoutMs; |
| 4322 /** | 4544 |
| 4323 * Specifies whether to use BigQuery's legacy SQL dialect for this query. The | 4545 /// Specifies whether to use BigQuery's legacy SQL dialect for this query. |
| 4324 * default value is true. If set to false, the query will use BigQuery's | 4546 /// The default value is true. If set to false, the query will use BigQuery's |
| 4325 * standard SQL: https://cloud.google.com/bigquery/sql-reference/ When | 4547 /// standard SQL: https://cloud.google.com/bigquery/sql-reference/ When |
| 4326 * useLegacySql is set to false, the value of flattenResults is ignored; query | 4548 /// useLegacySql is set to false, the value of flattenResults is ignored; |
| 4327 * will be run as if flattenResults is false. | 4549 /// query will be run as if flattenResults is false. |
| 4328 */ | |
| 4329 core.bool useLegacySql; | 4550 core.bool useLegacySql; |
| 4330 /** | 4551 |
| 4331 * [Optional] Whether to look for the result in the query cache. The query | 4552 /// [Optional] Whether to look for the result in the query cache. The query |
| 4332 * cache is a best-effort cache that will be flushed whenever tables in the | 4553 /// cache is a best-effort cache that will be flushed whenever tables in the |
| 4333 * query are modified. The default value is true. | 4554 /// query are modified. The default value is true. |
| 4334 */ | |
| 4335 core.bool useQueryCache; | 4555 core.bool useQueryCache; |
| 4336 | 4556 |
| 4337 QueryRequest(); | 4557 QueryRequest(); |
| 4338 | 4558 |
| 4339 QueryRequest.fromJson(core.Map _json) { | 4559 QueryRequest.fromJson(core.Map _json) { |
| 4340 if (_json.containsKey("defaultDataset")) { | 4560 if (_json.containsKey("defaultDataset")) { |
| 4341 defaultDataset = new DatasetReference.fromJson(_json["defaultDataset"]); | 4561 defaultDataset = new DatasetReference.fromJson(_json["defaultDataset"]); |
| 4342 } | 4562 } |
| 4343 if (_json.containsKey("dryRun")) { | 4563 if (_json.containsKey("dryRun")) { |
| 4344 dryRun = _json["dryRun"]; | 4564 dryRun = _json["dryRun"]; |
| 4345 } | 4565 } |
| 4346 if (_json.containsKey("kind")) { | 4566 if (_json.containsKey("kind")) { |
| 4347 kind = _json["kind"]; | 4567 kind = _json["kind"]; |
| 4348 } | 4568 } |
| 4349 if (_json.containsKey("maxResults")) { | 4569 if (_json.containsKey("maxResults")) { |
| 4350 maxResults = _json["maxResults"]; | 4570 maxResults = _json["maxResults"]; |
| 4351 } | 4571 } |
| 4352 if (_json.containsKey("parameterMode")) { | 4572 if (_json.containsKey("parameterMode")) { |
| 4353 parameterMode = _json["parameterMode"]; | 4573 parameterMode = _json["parameterMode"]; |
| 4354 } | 4574 } |
| 4355 if (_json.containsKey("preserveNulls")) { | 4575 if (_json.containsKey("preserveNulls")) { |
| 4356 preserveNulls = _json["preserveNulls"]; | 4576 preserveNulls = _json["preserveNulls"]; |
| 4357 } | 4577 } |
| 4358 if (_json.containsKey("query")) { | 4578 if (_json.containsKey("query")) { |
| 4359 query = _json["query"]; | 4579 query = _json["query"]; |
| 4360 } | 4580 } |
| 4361 if (_json.containsKey("queryParameters")) { | 4581 if (_json.containsKey("queryParameters")) { |
| 4362 queryParameters = _json["queryParameters"].map((value) => new QueryParamet
er.fromJson(value)).toList(); | 4582 queryParameters = _json["queryParameters"] |
| 4583 .map((value) => new QueryParameter.fromJson(value)) |
| 4584 .toList(); |
| 4363 } | 4585 } |
| 4364 if (_json.containsKey("timeoutMs")) { | 4586 if (_json.containsKey("timeoutMs")) { |
| 4365 timeoutMs = _json["timeoutMs"]; | 4587 timeoutMs = _json["timeoutMs"]; |
| 4366 } | 4588 } |
| 4367 if (_json.containsKey("useLegacySql")) { | 4589 if (_json.containsKey("useLegacySql")) { |
| 4368 useLegacySql = _json["useLegacySql"]; | 4590 useLegacySql = _json["useLegacySql"]; |
| 4369 } | 4591 } |
| 4370 if (_json.containsKey("useQueryCache")) { | 4592 if (_json.containsKey("useQueryCache")) { |
| 4371 useQueryCache = _json["useQueryCache"]; | 4593 useQueryCache = _json["useQueryCache"]; |
| 4372 } | 4594 } |
| 4373 } | 4595 } |
| 4374 | 4596 |
| 4375 core.Map<core.String, core.Object> toJson() { | 4597 core.Map<core.String, core.Object> toJson() { |
| 4376 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 4598 final core.Map<core.String, core.Object> _json = |
| 4599 new core.Map<core.String, core.Object>(); |
| 4377 if (defaultDataset != null) { | 4600 if (defaultDataset != null) { |
| 4378 _json["defaultDataset"] = (defaultDataset).toJson(); | 4601 _json["defaultDataset"] = (defaultDataset).toJson(); |
| 4379 } | 4602 } |
| 4380 if (dryRun != null) { | 4603 if (dryRun != null) { |
| 4381 _json["dryRun"] = dryRun; | 4604 _json["dryRun"] = dryRun; |
| 4382 } | 4605 } |
| 4383 if (kind != null) { | 4606 if (kind != null) { |
| 4384 _json["kind"] = kind; | 4607 _json["kind"] = kind; |
| 4385 } | 4608 } |
| 4386 if (maxResults != null) { | 4609 if (maxResults != null) { |
| 4387 _json["maxResults"] = maxResults; | 4610 _json["maxResults"] = maxResults; |
| 4388 } | 4611 } |
| 4389 if (parameterMode != null) { | 4612 if (parameterMode != null) { |
| 4390 _json["parameterMode"] = parameterMode; | 4613 _json["parameterMode"] = parameterMode; |
| 4391 } | 4614 } |
| 4392 if (preserveNulls != null) { | 4615 if (preserveNulls != null) { |
| 4393 _json["preserveNulls"] = preserveNulls; | 4616 _json["preserveNulls"] = preserveNulls; |
| 4394 } | 4617 } |
| 4395 if (query != null) { | 4618 if (query != null) { |
| 4396 _json["query"] = query; | 4619 _json["query"] = query; |
| 4397 } | 4620 } |
| 4398 if (queryParameters != null) { | 4621 if (queryParameters != null) { |
| 4399 _json["queryParameters"] = queryParameters.map((value) => (value).toJson()
).toList(); | 4622 _json["queryParameters"] = |
| 4623 queryParameters.map((value) => (value).toJson()).toList(); |
| 4400 } | 4624 } |
| 4401 if (timeoutMs != null) { | 4625 if (timeoutMs != null) { |
| 4402 _json["timeoutMs"] = timeoutMs; | 4626 _json["timeoutMs"] = timeoutMs; |
| 4403 } | 4627 } |
| 4404 if (useLegacySql != null) { | 4628 if (useLegacySql != null) { |
| 4405 _json["useLegacySql"] = useLegacySql; | 4629 _json["useLegacySql"] = useLegacySql; |
| 4406 } | 4630 } |
| 4407 if (useQueryCache != null) { | 4631 if (useQueryCache != null) { |
| 4408 _json["useQueryCache"] = useQueryCache; | 4632 _json["useQueryCache"] = useQueryCache; |
| 4409 } | 4633 } |
| 4410 return _json; | 4634 return _json; |
| 4411 } | 4635 } |
| 4412 } | 4636 } |
| 4413 | 4637 |
| 4414 class QueryResponse { | 4638 class QueryResponse { |
| 4415 /** Whether the query result was fetched from the query cache. */ | 4639 /// Whether the query result was fetched from the query cache. |
| 4416 core.bool cacheHit; | 4640 core.bool cacheHit; |
| 4417 /** | 4641 |
| 4418 * [Output-only] The first errors or warnings encountered during the running | 4642 /// [Output-only] The first errors or warnings encountered during the running |
| 4419 * of the job. The final message includes the number of errors that caused the | 4643 /// of the job. The final message includes the number of errors that caused |
| 4420 * process to stop. Errors here do not necessarily mean that the job has | 4644 /// the process to stop. Errors here do not necessarily mean that the job has |
| 4421 * completed or was unsuccessful. | 4645 /// completed or was unsuccessful. |
| 4422 */ | |
| 4423 core.List<ErrorProto> errors; | 4646 core.List<ErrorProto> errors; |
| 4424 /** | 4647 |
| 4425 * Whether the query has completed or not. If rows or totalRows are present, | 4648 /// Whether the query has completed or not. If rows or totalRows are present, |
| 4426 * this will always be true. If this is false, totalRows will not be | 4649 /// this will always be true. If this is false, totalRows will not be |
| 4427 * available. | 4650 /// available. |
| 4428 */ | |
| 4429 core.bool jobComplete; | 4651 core.bool jobComplete; |
| 4430 /** | 4652 |
| 4431 * Reference to the Job that was created to run the query. This field will be | 4653 /// Reference to the Job that was created to run the query. This field will |
| 4432 * present even if the original request timed out, in which case | 4654 /// be present even if the original request timed out, in which case |
| 4433 * GetQueryResults can be used to read the results once the query has | 4655 /// GetQueryResults can be used to read the results once the query has |
| 4434 * completed. Since this API only returns the first page of results, | 4656 /// completed. Since this API only returns the first page of results, |
| 4435 * subsequent pages can be fetched via the same mechanism (GetQueryResults). | 4657 /// subsequent pages can be fetched via the same mechanism (GetQueryResults). |
| 4436 */ | |
| 4437 JobReference jobReference; | 4658 JobReference jobReference; |
| 4438 /** The resource type. */ | 4659 |
| 4660 /// The resource type. |
| 4439 core.String kind; | 4661 core.String kind; |
| 4440 /** | 4662 |
| 4441 * [Output-only] The number of rows affected by a DML statement. Present only | 4663 /// [Output-only] The number of rows affected by a DML statement. Present |
| 4442 * for DML statements INSERT, UPDATE or DELETE. | 4664 /// only for DML statements INSERT, UPDATE or DELETE. |
| 4443 */ | |
| 4444 core.String numDmlAffectedRows; | 4665 core.String numDmlAffectedRows; |
| 4445 /** A token used for paging results. */ | 4666 |
| 4667 /// A token used for paging results. |
| 4446 core.String pageToken; | 4668 core.String pageToken; |
| 4447 /** | 4669 |
| 4448 * An object with as many results as can be contained within the maximum | 4670 /// An object with as many results as can be contained within the maximum |
| 4449 * permitted reply size. To get any additional rows, you can call | 4671 /// permitted reply size. To get any additional rows, you can call |
| 4450 * GetQueryResults and specify the jobReference returned above. | 4672 /// GetQueryResults and specify the jobReference returned above. |
| 4451 */ | |
| 4452 core.List<TableRow> rows; | 4673 core.List<TableRow> rows; |
| 4453 /** | 4674 |
| 4454 * The schema of the results. Present only when the query completes | 4675 /// The schema of the results. Present only when the query completes |
| 4455 * successfully. | 4676 /// successfully. |
| 4456 */ | |
| 4457 TableSchema schema; | 4677 TableSchema schema; |
| 4458 /** | 4678 |
| 4459 * The total number of bytes processed for this query. If this query was a dry | 4679 /// The total number of bytes processed for this query. If this query was a |
| 4460 * run, this is the number of bytes that would be processed if the query were | 4680 /// dry run, this is the number of bytes that would be processed if the query |
| 4461 * run. | 4681 /// were run. |
| 4462 */ | |
| 4463 core.String totalBytesProcessed; | 4682 core.String totalBytesProcessed; |
| 4464 /** | 4683 |
| 4465 * The total number of rows in the complete query result set, which can be | 4684 /// The total number of rows in the complete query result set, which can be |
| 4466 * more than the number of rows in this single page of results. | 4685 /// more than the number of rows in this single page of results. |
| 4467 */ | |
| 4468 core.String totalRows; | 4686 core.String totalRows; |
| 4469 | 4687 |
| 4470 QueryResponse(); | 4688 QueryResponse(); |
| 4471 | 4689 |
| 4472 QueryResponse.fromJson(core.Map _json) { | 4690 QueryResponse.fromJson(core.Map _json) { |
| 4473 if (_json.containsKey("cacheHit")) { | 4691 if (_json.containsKey("cacheHit")) { |
| 4474 cacheHit = _json["cacheHit"]; | 4692 cacheHit = _json["cacheHit"]; |
| 4475 } | 4693 } |
| 4476 if (_json.containsKey("errors")) { | 4694 if (_json.containsKey("errors")) { |
| 4477 errors = _json["errors"].map((value) => new ErrorProto.fromJson(value)).to
List(); | 4695 errors = _json["errors"] |
| 4696 .map((value) => new ErrorProto.fromJson(value)) |
| 4697 .toList(); |
| 4478 } | 4698 } |
| 4479 if (_json.containsKey("jobComplete")) { | 4699 if (_json.containsKey("jobComplete")) { |
| 4480 jobComplete = _json["jobComplete"]; | 4700 jobComplete = _json["jobComplete"]; |
| 4481 } | 4701 } |
| 4482 if (_json.containsKey("jobReference")) { | 4702 if (_json.containsKey("jobReference")) { |
| 4483 jobReference = new JobReference.fromJson(_json["jobReference"]); | 4703 jobReference = new JobReference.fromJson(_json["jobReference"]); |
| 4484 } | 4704 } |
| 4485 if (_json.containsKey("kind")) { | 4705 if (_json.containsKey("kind")) { |
| 4486 kind = _json["kind"]; | 4706 kind = _json["kind"]; |
| 4487 } | 4707 } |
| 4488 if (_json.containsKey("numDmlAffectedRows")) { | 4708 if (_json.containsKey("numDmlAffectedRows")) { |
| 4489 numDmlAffectedRows = _json["numDmlAffectedRows"]; | 4709 numDmlAffectedRows = _json["numDmlAffectedRows"]; |
| 4490 } | 4710 } |
| 4491 if (_json.containsKey("pageToken")) { | 4711 if (_json.containsKey("pageToken")) { |
| 4492 pageToken = _json["pageToken"]; | 4712 pageToken = _json["pageToken"]; |
| 4493 } | 4713 } |
| 4494 if (_json.containsKey("rows")) { | 4714 if (_json.containsKey("rows")) { |
| 4495 rows = _json["rows"].map((value) => new TableRow.fromJson(value)).toList()
; | 4715 rows = |
| 4716 _json["rows"].map((value) => new TableRow.fromJson(value)).toList(); |
| 4496 } | 4717 } |
| 4497 if (_json.containsKey("schema")) { | 4718 if (_json.containsKey("schema")) { |
| 4498 schema = new TableSchema.fromJson(_json["schema"]); | 4719 schema = new TableSchema.fromJson(_json["schema"]); |
| 4499 } | 4720 } |
| 4500 if (_json.containsKey("totalBytesProcessed")) { | 4721 if (_json.containsKey("totalBytesProcessed")) { |
| 4501 totalBytesProcessed = _json["totalBytesProcessed"]; | 4722 totalBytesProcessed = _json["totalBytesProcessed"]; |
| 4502 } | 4723 } |
| 4503 if (_json.containsKey("totalRows")) { | 4724 if (_json.containsKey("totalRows")) { |
| 4504 totalRows = _json["totalRows"]; | 4725 totalRows = _json["totalRows"]; |
| 4505 } | 4726 } |
| 4506 } | 4727 } |
| 4507 | 4728 |
| 4508 core.Map<core.String, core.Object> toJson() { | 4729 core.Map<core.String, core.Object> toJson() { |
| 4509 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 4730 final core.Map<core.String, core.Object> _json = |
| 4731 new core.Map<core.String, core.Object>(); |
| 4510 if (cacheHit != null) { | 4732 if (cacheHit != null) { |
| 4511 _json["cacheHit"] = cacheHit; | 4733 _json["cacheHit"] = cacheHit; |
| 4512 } | 4734 } |
| 4513 if (errors != null) { | 4735 if (errors != null) { |
| 4514 _json["errors"] = errors.map((value) => (value).toJson()).toList(); | 4736 _json["errors"] = errors.map((value) => (value).toJson()).toList(); |
| 4515 } | 4737 } |
| 4516 if (jobComplete != null) { | 4738 if (jobComplete != null) { |
| 4517 _json["jobComplete"] = jobComplete; | 4739 _json["jobComplete"] = jobComplete; |
| 4518 } | 4740 } |
| 4519 if (jobReference != null) { | 4741 if (jobReference != null) { |
| (...skipping 18 matching lines...) Expand all Loading... |
| 4538 _json["totalBytesProcessed"] = totalBytesProcessed; | 4760 _json["totalBytesProcessed"] = totalBytesProcessed; |
| 4539 } | 4761 } |
| 4540 if (totalRows != null) { | 4762 if (totalRows != null) { |
| 4541 _json["totalRows"] = totalRows; | 4763 _json["totalRows"] = totalRows; |
| 4542 } | 4764 } |
| 4543 return _json; | 4765 return _json; |
| 4544 } | 4766 } |
| 4545 } | 4767 } |
| 4546 | 4768 |
| 4547 class Streamingbuffer { | 4769 class Streamingbuffer { |
| 4548 /** | 4770 /// [Output-only] A lower-bound estimate of the number of bytes currently in |
| 4549 * [Output-only] A lower-bound estimate of the number of bytes currently in | 4771 /// the streaming buffer. |
| 4550 * the streaming buffer. | |
| 4551 */ | |
| 4552 core.String estimatedBytes; | 4772 core.String estimatedBytes; |
| 4553 /** | 4773 |
| 4554 * [Output-only] A lower-bound estimate of the number of rows currently in the | 4774 /// [Output-only] A lower-bound estimate of the number of rows currently in |
| 4555 * streaming buffer. | 4775 /// the streaming buffer. |
| 4556 */ | |
| 4557 core.String estimatedRows; | 4776 core.String estimatedRows; |
| 4558 /** | 4777 |
| 4559 * [Output-only] Contains the timestamp of the oldest entry in the streaming | 4778 /// [Output-only] Contains the timestamp of the oldest entry in the streaming |
| 4560 * buffer, in milliseconds since the epoch, if the streaming buffer is | 4779 /// buffer, in milliseconds since the epoch, if the streaming buffer is |
| 4561 * available. | 4780 /// available. |
| 4562 */ | |
| 4563 core.String oldestEntryTime; | 4781 core.String oldestEntryTime; |
| 4564 | 4782 |
| 4565 Streamingbuffer(); | 4783 Streamingbuffer(); |
| 4566 | 4784 |
| 4567 Streamingbuffer.fromJson(core.Map _json) { | 4785 Streamingbuffer.fromJson(core.Map _json) { |
| 4568 if (_json.containsKey("estimatedBytes")) { | 4786 if (_json.containsKey("estimatedBytes")) { |
| 4569 estimatedBytes = _json["estimatedBytes"]; | 4787 estimatedBytes = _json["estimatedBytes"]; |
| 4570 } | 4788 } |
| 4571 if (_json.containsKey("estimatedRows")) { | 4789 if (_json.containsKey("estimatedRows")) { |
| 4572 estimatedRows = _json["estimatedRows"]; | 4790 estimatedRows = _json["estimatedRows"]; |
| 4573 } | 4791 } |
| 4574 if (_json.containsKey("oldestEntryTime")) { | 4792 if (_json.containsKey("oldestEntryTime")) { |
| 4575 oldestEntryTime = _json["oldestEntryTime"]; | 4793 oldestEntryTime = _json["oldestEntryTime"]; |
| 4576 } | 4794 } |
| 4577 } | 4795 } |
| 4578 | 4796 |
| 4579 core.Map<core.String, core.Object> toJson() { | 4797 core.Map<core.String, core.Object> toJson() { |
| 4580 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 4798 final core.Map<core.String, core.Object> _json = |
| 4799 new core.Map<core.String, core.Object>(); |
| 4581 if (estimatedBytes != null) { | 4800 if (estimatedBytes != null) { |
| 4582 _json["estimatedBytes"] = estimatedBytes; | 4801 _json["estimatedBytes"] = estimatedBytes; |
| 4583 } | 4802 } |
| 4584 if (estimatedRows != null) { | 4803 if (estimatedRows != null) { |
| 4585 _json["estimatedRows"] = estimatedRows; | 4804 _json["estimatedRows"] = estimatedRows; |
| 4586 } | 4805 } |
| 4587 if (oldestEntryTime != null) { | 4806 if (oldestEntryTime != null) { |
| 4588 _json["oldestEntryTime"] = oldestEntryTime; | 4807 _json["oldestEntryTime"] = oldestEntryTime; |
| 4589 } | 4808 } |
| 4590 return _json; | 4809 return _json; |
| 4591 } | 4810 } |
| 4592 } | 4811 } |
| 4593 | 4812 |
| 4594 class Table { | 4813 class Table { |
| 4595 /** | 4814 /// [Output-only] The time when this table was created, in milliseconds since |
| 4596 * [Output-only] The time when this table was created, in milliseconds since | 4815 /// the epoch. |
| 4597 * the epoch. | |
| 4598 */ | |
| 4599 core.String creationTime; | 4816 core.String creationTime; |
| 4600 /** [Optional] A user-friendly description of this table. */ | 4817 |
| 4818 /// [Optional] A user-friendly description of this table. |
| 4601 core.String description; | 4819 core.String description; |
| 4602 /** [Output-only] A hash of this resource. */ | 4820 |
| 4821 /// [Experimental] Custom encryption configuration (e.g., Cloud KMS keys). |
| 4822 EncryptionConfiguration encryptionConfiguration; |
| 4823 |
| 4824 /// [Output-only] A hash of this resource. |
| 4603 core.String etag; | 4825 core.String etag; |
| 4604 /** | 4826 |
| 4605 * [Optional] The time when this table expires, in milliseconds since the | 4827 /// [Optional] The time when this table expires, in milliseconds since the |
| 4606 * epoch. If not present, the table will persist indefinitely. Expired tables | 4828 /// epoch. If not present, the table will persist indefinitely. Expired |
| 4607 * will be deleted and their storage reclaimed. | 4829 /// tables will be deleted and their storage reclaimed. |
| 4608 */ | |
| 4609 core.String expirationTime; | 4830 core.String expirationTime; |
| 4610 /** | 4831 |
| 4611 * [Optional] Describes the data format, location, and other properties of a | 4832 /// [Optional] Describes the data format, location, and other properties of a |
| 4612 * table stored outside of BigQuery. By defining these properties, the data | 4833 /// table stored outside of BigQuery. By defining these properties, the data |
| 4613 * source can then be queried as if it were a standard BigQuery table. | 4834 /// source can then be queried as if it were a standard BigQuery table. |
| 4614 */ | |
| 4615 ExternalDataConfiguration externalDataConfiguration; | 4835 ExternalDataConfiguration externalDataConfiguration; |
| 4616 /** [Optional] A descriptive name for this table. */ | 4836 |
| 4837 /// [Optional] A descriptive name for this table. |
| 4617 core.String friendlyName; | 4838 core.String friendlyName; |
| 4618 /** [Output-only] An opaque ID uniquely identifying the table. */ | 4839 |
| 4840 /// [Output-only] An opaque ID uniquely identifying the table. |
| 4619 core.String id; | 4841 core.String id; |
| 4620 /** [Output-only] The type of the resource. */ | 4842 |
| 4843 /// [Output-only] The type of the resource. |
| 4621 core.String kind; | 4844 core.String kind; |
| 4622 /** | 4845 |
| 4623 * [Experimental] The labels associated with this table. You can use these to | 4846 /// [Experimental] The labels associated with this table. You can use these |
| 4624 * organize and group your tables. Label keys and values can be no longer than | 4847 /// to organize and group your tables. Label keys and values can be no longer |
| 4625 * 63 characters, can only contain lowercase letters, numeric characters, | 4848 /// than 63 characters, can only contain lowercase letters, numeric |
| 4626 * underscores and dashes. International characters are allowed. Label values | 4849 /// characters, underscores and dashes. International characters are allowed. |
| 4627 * are optional. Label keys must start with a letter and each label in the | 4850 /// Label values are optional. Label keys must start with a letter and each |
| 4628 * list must have a different key. | 4851 /// label in the list must have a different key. |
| 4629 */ | |
| 4630 core.Map<core.String, core.String> labels; | 4852 core.Map<core.String, core.String> labels; |
| 4631 /** | 4853 |
| 4632 * [Output-only] The time when this table was last modified, in milliseconds | 4854 /// [Output-only] The time when this table was last modified, in milliseconds |
| 4633 * since the epoch. | 4855 /// since the epoch. |
| 4634 */ | |
| 4635 core.String lastModifiedTime; | 4856 core.String lastModifiedTime; |
| 4636 /** | 4857 |
| 4637 * [Output-only] The geographic location where the table resides. This value | 4858 /// [Output-only] The geographic location where the table resides. This value |
| 4638 * is inherited from the dataset. | 4859 /// is inherited from the dataset. |
| 4639 */ | |
| 4640 core.String location; | 4860 core.String location; |
| 4641 /** | 4861 |
| 4642 * [Output-only] The size of this table in bytes, excluding any data in the | 4862 /// [Output-only] The size of this table in bytes, excluding any data in the |
| 4643 * streaming buffer. | 4863 /// streaming buffer. |
| 4644 */ | |
| 4645 core.String numBytes; | 4864 core.String numBytes; |
| 4646 /** | 4865 |
| 4647 * [Output-only] The number of bytes in the table that are considered | 4866 /// [Output-only] The number of bytes in the table that are considered |
| 4648 * "long-term storage". | 4867 /// "long-term storage". |
| 4649 */ | |
| 4650 core.String numLongTermBytes; | 4868 core.String numLongTermBytes; |
| 4651 /** | 4869 |
| 4652 * [Output-only] The number of rows of data in this table, excluding any data | 4870 /// [Output-only] The number of rows of data in this table, excluding any |
| 4653 * in the streaming buffer. | 4871 /// data in the streaming buffer. |
| 4654 */ | |
| 4655 core.String numRows; | 4872 core.String numRows; |
| 4656 /** [Optional] Describes the schema of this table. */ | 4873 |
| 4874 /// [Optional] Describes the schema of this table. |
| 4657 TableSchema schema; | 4875 TableSchema schema; |
| 4658 /** [Output-only] A URL that can be used to access this resource again. */ | 4876 |
| 4877 /// [Output-only] A URL that can be used to access this resource again. |
| 4659 core.String selfLink; | 4878 core.String selfLink; |
| 4660 /** | 4879 |
| 4661 * [Output-only] Contains information regarding this table's streaming buffer, | 4880 /// [Output-only] Contains information regarding this table's streaming |
| 4662 * if one is present. This field will be absent if the table is not being | 4881 /// buffer, if one is present. This field will be absent if the table is not |
| 4663 * streamed to or if there is no data in the streaming buffer. | 4882 /// being streamed to or if there is no data in the streaming buffer. |
| 4664 */ | |
| 4665 Streamingbuffer streamingBuffer; | 4883 Streamingbuffer streamingBuffer; |
| 4666 /** [Required] Reference describing the ID of this table. */ | 4884 |
| 4885 /// [Required] Reference describing the ID of this table. |
| 4667 TableReference tableReference; | 4886 TableReference tableReference; |
| 4668 /** | 4887 |
| 4669 * [Experimental] If specified, configures time-based partitioning for this | 4888 /// [Experimental] If specified, configures time-based partitioning for this |
| 4670 * table. | 4889 /// table. |
| 4671 */ | |
| 4672 TimePartitioning timePartitioning; | 4890 TimePartitioning timePartitioning; |
| 4673 /** | 4891 |
| 4674 * [Output-only] Describes the table type. The following values are supported: | 4892 /// [Output-only] Describes the table type. The following values are |
| 4675 * TABLE: A normal BigQuery table. VIEW: A virtual table defined by a SQL | 4893 /// supported: TABLE: A normal BigQuery table. VIEW: A virtual table defined |
| 4676 * query. EXTERNAL: A table that references data stored in an external storage | 4894 /// by a SQL query. EXTERNAL: A table that references data stored in an |
| 4677 * system, such as Google Cloud Storage. The default value is TABLE. | 4895 /// external storage system, such as Google Cloud Storage. The default value |
| 4678 */ | 4896 /// is TABLE. |
| 4679 core.String type; | 4897 core.String type; |
| 4680 /** [Optional] The view definition. */ | 4898 |
| 4899 /// [Optional] The view definition. |
| 4681 ViewDefinition view; | 4900 ViewDefinition view; |
| 4682 | 4901 |
| 4683 Table(); | 4902 Table(); |
| 4684 | 4903 |
| 4685 Table.fromJson(core.Map _json) { | 4904 Table.fromJson(core.Map _json) { |
| 4686 if (_json.containsKey("creationTime")) { | 4905 if (_json.containsKey("creationTime")) { |
| 4687 creationTime = _json["creationTime"]; | 4906 creationTime = _json["creationTime"]; |
| 4688 } | 4907 } |
| 4689 if (_json.containsKey("description")) { | 4908 if (_json.containsKey("description")) { |
| 4690 description = _json["description"]; | 4909 description = _json["description"]; |
| 4691 } | 4910 } |
| 4911 if (_json.containsKey("encryptionConfiguration")) { |
| 4912 encryptionConfiguration = new EncryptionConfiguration.fromJson( |
| 4913 _json["encryptionConfiguration"]); |
| 4914 } |
| 4692 if (_json.containsKey("etag")) { | 4915 if (_json.containsKey("etag")) { |
| 4693 etag = _json["etag"]; | 4916 etag = _json["etag"]; |
| 4694 } | 4917 } |
| 4695 if (_json.containsKey("expirationTime")) { | 4918 if (_json.containsKey("expirationTime")) { |
| 4696 expirationTime = _json["expirationTime"]; | 4919 expirationTime = _json["expirationTime"]; |
| 4697 } | 4920 } |
| 4698 if (_json.containsKey("externalDataConfiguration")) { | 4921 if (_json.containsKey("externalDataConfiguration")) { |
| 4699 externalDataConfiguration = new ExternalDataConfiguration.fromJson(_json["
externalDataConfiguration"]); | 4922 externalDataConfiguration = new ExternalDataConfiguration.fromJson( |
| 4923 _json["externalDataConfiguration"]); |
| 4700 } | 4924 } |
| 4701 if (_json.containsKey("friendlyName")) { | 4925 if (_json.containsKey("friendlyName")) { |
| 4702 friendlyName = _json["friendlyName"]; | 4926 friendlyName = _json["friendlyName"]; |
| 4703 } | 4927 } |
| 4704 if (_json.containsKey("id")) { | 4928 if (_json.containsKey("id")) { |
| 4705 id = _json["id"]; | 4929 id = _json["id"]; |
| 4706 } | 4930 } |
| 4707 if (_json.containsKey("kind")) { | 4931 if (_json.containsKey("kind")) { |
| 4708 kind = _json["kind"]; | 4932 kind = _json["kind"]; |
| 4709 } | 4933 } |
| (...skipping 21 matching lines...) Expand all Loading... |
| 4731 if (_json.containsKey("selfLink")) { | 4955 if (_json.containsKey("selfLink")) { |
| 4732 selfLink = _json["selfLink"]; | 4956 selfLink = _json["selfLink"]; |
| 4733 } | 4957 } |
| 4734 if (_json.containsKey("streamingBuffer")) { | 4958 if (_json.containsKey("streamingBuffer")) { |
| 4735 streamingBuffer = new Streamingbuffer.fromJson(_json["streamingBuffer"]); | 4959 streamingBuffer = new Streamingbuffer.fromJson(_json["streamingBuffer"]); |
| 4736 } | 4960 } |
| 4737 if (_json.containsKey("tableReference")) { | 4961 if (_json.containsKey("tableReference")) { |
| 4738 tableReference = new TableReference.fromJson(_json["tableReference"]); | 4962 tableReference = new TableReference.fromJson(_json["tableReference"]); |
| 4739 } | 4963 } |
| 4740 if (_json.containsKey("timePartitioning")) { | 4964 if (_json.containsKey("timePartitioning")) { |
| 4741 timePartitioning = new TimePartitioning.fromJson(_json["timePartitioning"]
); | 4965 timePartitioning = |
| 4966 new TimePartitioning.fromJson(_json["timePartitioning"]); |
| 4742 } | 4967 } |
| 4743 if (_json.containsKey("type")) { | 4968 if (_json.containsKey("type")) { |
| 4744 type = _json["type"]; | 4969 type = _json["type"]; |
| 4745 } | 4970 } |
| 4746 if (_json.containsKey("view")) { | 4971 if (_json.containsKey("view")) { |
| 4747 view = new ViewDefinition.fromJson(_json["view"]); | 4972 view = new ViewDefinition.fromJson(_json["view"]); |
| 4748 } | 4973 } |
| 4749 } | 4974 } |
| 4750 | 4975 |
| 4751 core.Map<core.String, core.Object> toJson() { | 4976 core.Map<core.String, core.Object> toJson() { |
| 4752 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 4977 final core.Map<core.String, core.Object> _json = |
| 4978 new core.Map<core.String, core.Object>(); |
| 4753 if (creationTime != null) { | 4979 if (creationTime != null) { |
| 4754 _json["creationTime"] = creationTime; | 4980 _json["creationTime"] = creationTime; |
| 4755 } | 4981 } |
| 4756 if (description != null) { | 4982 if (description != null) { |
| 4757 _json["description"] = description; | 4983 _json["description"] = description; |
| 4758 } | 4984 } |
| 4985 if (encryptionConfiguration != null) { |
| 4986 _json["encryptionConfiguration"] = (encryptionConfiguration).toJson(); |
| 4987 } |
| 4759 if (etag != null) { | 4988 if (etag != null) { |
| 4760 _json["etag"] = etag; | 4989 _json["etag"] = etag; |
| 4761 } | 4990 } |
| 4762 if (expirationTime != null) { | 4991 if (expirationTime != null) { |
| 4763 _json["expirationTime"] = expirationTime; | 4992 _json["expirationTime"] = expirationTime; |
| 4764 } | 4993 } |
| 4765 if (externalDataConfiguration != null) { | 4994 if (externalDataConfiguration != null) { |
| 4766 _json["externalDataConfiguration"] = (externalDataConfiguration).toJson(); | 4995 _json["externalDataConfiguration"] = (externalDataConfiguration).toJson(); |
| 4767 } | 4996 } |
| 4768 if (friendlyName != null) { | 4997 if (friendlyName != null) { |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4811 _json["type"] = type; | 5040 _json["type"] = type; |
| 4812 } | 5041 } |
| 4813 if (view != null) { | 5042 if (view != null) { |
| 4814 _json["view"] = (view).toJson(); | 5043 _json["view"] = (view).toJson(); |
| 4815 } | 5044 } |
| 4816 return _json; | 5045 return _json; |
| 4817 } | 5046 } |
| 4818 } | 5047 } |
| 4819 | 5048 |
| 4820 class TableCell { | 5049 class TableCell { |
| 4821 /** | 5050 /// |
| 4822 * | 5051 /// |
| 4823 * | 5052 /// The values for Object must be JSON objects. It can consist of `num`, |
| 4824 * The values for Object must be JSON objects. It can consist of `num`, | 5053 /// `String`, `bool` and `null` as well as `Map` and `List` values. |
| 4825 * `String`, `bool` and `null` as well as `Map` and `List` values. | |
| 4826 */ | |
| 4827 core.Object v; | 5054 core.Object v; |
| 4828 | 5055 |
| 4829 TableCell(); | 5056 TableCell(); |
| 4830 | 5057 |
| 4831 TableCell.fromJson(core.Map _json) { | 5058 TableCell.fromJson(core.Map _json) { |
| 4832 if (_json.containsKey("v")) { | 5059 if (_json.containsKey("v")) { |
| 4833 v = _json["v"]; | 5060 v = _json["v"]; |
| 4834 } | 5061 } |
| 4835 } | 5062 } |
| 4836 | 5063 |
| 4837 core.Map<core.String, core.Object> toJson() { | 5064 core.Map<core.String, core.Object> toJson() { |
| 4838 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 5065 final core.Map<core.String, core.Object> _json = |
| 5066 new core.Map<core.String, core.Object>(); |
| 4839 if (v != null) { | 5067 if (v != null) { |
| 4840 _json["v"] = v; | 5068 _json["v"] = v; |
| 4841 } | 5069 } |
| 4842 return _json; | 5070 return _json; |
| 4843 } | 5071 } |
| 4844 } | 5072 } |
| 4845 | 5073 |
| 4846 class TableDataInsertAllRequestRows { | 5074 class TableDataInsertAllRequestRows { |
| 4847 /** | 5075 /// [Optional] A unique ID for each row. BigQuery uses this property to |
| 4848 * [Optional] A unique ID for each row. BigQuery uses this property to detect | 5076 /// detect duplicate insertion requests on a best-effort basis. |
| 4849 * duplicate insertion requests on a best-effort basis. | |
| 4850 */ | |
| 4851 core.String insertId; | 5077 core.String insertId; |
| 4852 /** | 5078 |
| 4853 * [Required] A JSON object that contains a row of data. The object's | 5079 /// [Required] A JSON object that contains a row of data. The object's |
| 4854 * properties and values must match the destination table's schema. | 5080 /// properties and values must match the destination table's schema. |
| 4855 */ | |
| 4856 JsonObject json; | 5081 JsonObject json; |
| 4857 | 5082 |
| 4858 TableDataInsertAllRequestRows(); | 5083 TableDataInsertAllRequestRows(); |
| 4859 | 5084 |
| 4860 TableDataInsertAllRequestRows.fromJson(core.Map _json) { | 5085 TableDataInsertAllRequestRows.fromJson(core.Map _json) { |
| 4861 if (_json.containsKey("insertId")) { | 5086 if (_json.containsKey("insertId")) { |
| 4862 insertId = _json["insertId"]; | 5087 insertId = _json["insertId"]; |
| 4863 } | 5088 } |
| 4864 if (_json.containsKey("json")) { | 5089 if (_json.containsKey("json")) { |
| 4865 json = new JsonObject.fromJson(_json["json"]); | 5090 json = new JsonObject.fromJson(_json["json"]); |
| 4866 } | 5091 } |
| 4867 } | 5092 } |
| 4868 | 5093 |
| 4869 core.Map<core.String, core.Object> toJson() { | 5094 core.Map<core.String, core.Object> toJson() { |
| 4870 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 5095 final core.Map<core.String, core.Object> _json = |
| 5096 new core.Map<core.String, core.Object>(); |
| 4871 if (insertId != null) { | 5097 if (insertId != null) { |
| 4872 _json["insertId"] = insertId; | 5098 _json["insertId"] = insertId; |
| 4873 } | 5099 } |
| 4874 if (json != null) { | 5100 if (json != null) { |
| 4875 _json["json"] = json; | 5101 _json["json"] = json; |
| 4876 } | 5102 } |
| 4877 return _json; | 5103 return _json; |
| 4878 } | 5104 } |
| 4879 } | 5105 } |
| 4880 | 5106 |
| 4881 class TableDataInsertAllRequest { | 5107 class TableDataInsertAllRequest { |
| 4882 /** | 5108 /// [Optional] Accept rows that contain values that do not match the schema. |
| 4883 * [Optional] Accept rows that contain values that do not match the schema. | 5109 /// The unknown values are ignored. Default is false, which treats unknown |
| 4884 * The unknown values are ignored. Default is false, which treats unknown | 5110 /// values as errors. |
| 4885 * values as errors. | |
| 4886 */ | |
| 4887 core.bool ignoreUnknownValues; | 5111 core.bool ignoreUnknownValues; |
| 4888 /** The resource type of the response. */ | 5112 |
| 5113 /// The resource type of the response. |
| 4889 core.String kind; | 5114 core.String kind; |
| 4890 /** The rows to insert. */ | 5115 |
| 5116 /// The rows to insert. |
| 4891 core.List<TableDataInsertAllRequestRows> rows; | 5117 core.List<TableDataInsertAllRequestRows> rows; |
| 4892 /** | 5118 |
| 4893 * [Optional] Insert all valid rows of a request, even if invalid rows exist. | 5119 /// [Optional] Insert all valid rows of a request, even if invalid rows |
| 4894 * The default value is false, which causes the entire request to fail if any | 5120 /// exist. The default value is false, which causes the entire request to |
| 4895 * invalid rows exist. | 5121 /// fail if any invalid rows exist. |
| 4896 */ | |
| 4897 core.bool skipInvalidRows; | 5122 core.bool skipInvalidRows; |
| 4898 /** | 5123 |
| 4899 * [Experimental] If specified, treats the destination table as a base | 5124 /// [Experimental] If specified, treats the destination table as a base |
| 4900 * template, and inserts the rows into an instance table named | 5125 /// template, and inserts the rows into an instance table named |
| 4901 * "{destination}{templateSuffix}". BigQuery will manage creation of the | 5126 /// "{destination}{templateSuffix}". BigQuery will manage creation of the |
| 4902 * instance table, using the schema of the base template table. See | 5127 /// instance table, using the schema of the base template table. See |
| 4903 * https://cloud.google.com/bigquery/streaming-data-into-bigquery#template-tab
les | 5128 /// https://cloud.google.com/bigquery/streaming-data-into-bigquery#template-ta
bles |
| 4904 * for considerations when working with templates tables. | 5129 /// for considerations when working with templates tables. |
| 4905 */ | |
| 4906 core.String templateSuffix; | 5130 core.String templateSuffix; |
| 4907 | 5131 |
| 4908 TableDataInsertAllRequest(); | 5132 TableDataInsertAllRequest(); |
| 4909 | 5133 |
| 4910 TableDataInsertAllRequest.fromJson(core.Map _json) { | 5134 TableDataInsertAllRequest.fromJson(core.Map _json) { |
| 4911 if (_json.containsKey("ignoreUnknownValues")) { | 5135 if (_json.containsKey("ignoreUnknownValues")) { |
| 4912 ignoreUnknownValues = _json["ignoreUnknownValues"]; | 5136 ignoreUnknownValues = _json["ignoreUnknownValues"]; |
| 4913 } | 5137 } |
| 4914 if (_json.containsKey("kind")) { | 5138 if (_json.containsKey("kind")) { |
| 4915 kind = _json["kind"]; | 5139 kind = _json["kind"]; |
| 4916 } | 5140 } |
| 4917 if (_json.containsKey("rows")) { | 5141 if (_json.containsKey("rows")) { |
| 4918 rows = _json["rows"].map((value) => new TableDataInsertAllRequestRows.from
Json(value)).toList(); | 5142 rows = _json["rows"] |
| 5143 .map((value) => new TableDataInsertAllRequestRows.fromJson(value)) |
| 5144 .toList(); |
| 4919 } | 5145 } |
| 4920 if (_json.containsKey("skipInvalidRows")) { | 5146 if (_json.containsKey("skipInvalidRows")) { |
| 4921 skipInvalidRows = _json["skipInvalidRows"]; | 5147 skipInvalidRows = _json["skipInvalidRows"]; |
| 4922 } | 5148 } |
| 4923 if (_json.containsKey("templateSuffix")) { | 5149 if (_json.containsKey("templateSuffix")) { |
| 4924 templateSuffix = _json["templateSuffix"]; | 5150 templateSuffix = _json["templateSuffix"]; |
| 4925 } | 5151 } |
| 4926 } | 5152 } |
| 4927 | 5153 |
| 4928 core.Map<core.String, core.Object> toJson() { | 5154 core.Map<core.String, core.Object> toJson() { |
| 4929 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 5155 final core.Map<core.String, core.Object> _json = |
| 5156 new core.Map<core.String, core.Object>(); |
| 4930 if (ignoreUnknownValues != null) { | 5157 if (ignoreUnknownValues != null) { |
| 4931 _json["ignoreUnknownValues"] = ignoreUnknownValues; | 5158 _json["ignoreUnknownValues"] = ignoreUnknownValues; |
| 4932 } | 5159 } |
| 4933 if (kind != null) { | 5160 if (kind != null) { |
| 4934 _json["kind"] = kind; | 5161 _json["kind"] = kind; |
| 4935 } | 5162 } |
| 4936 if (rows != null) { | 5163 if (rows != null) { |
| 4937 _json["rows"] = rows.map((value) => (value).toJson()).toList(); | 5164 _json["rows"] = rows.map((value) => (value).toJson()).toList(); |
| 4938 } | 5165 } |
| 4939 if (skipInvalidRows != null) { | 5166 if (skipInvalidRows != null) { |
| 4940 _json["skipInvalidRows"] = skipInvalidRows; | 5167 _json["skipInvalidRows"] = skipInvalidRows; |
| 4941 } | 5168 } |
| 4942 if (templateSuffix != null) { | 5169 if (templateSuffix != null) { |
| 4943 _json["templateSuffix"] = templateSuffix; | 5170 _json["templateSuffix"] = templateSuffix; |
| 4944 } | 5171 } |
| 4945 return _json; | 5172 return _json; |
| 4946 } | 5173 } |
| 4947 } | 5174 } |
| 4948 | 5175 |
| 4949 class TableDataInsertAllResponseInsertErrors { | 5176 class TableDataInsertAllResponseInsertErrors { |
| 4950 /** Error information for the row indicated by the index property. */ | 5177 /// Error information for the row indicated by the index property. |
| 4951 core.List<ErrorProto> errors; | 5178 core.List<ErrorProto> errors; |
| 4952 /** The index of the row that error applies to. */ | 5179 |
| 5180 /// The index of the row that error applies to. |
| 4953 core.int index; | 5181 core.int index; |
| 4954 | 5182 |
| 4955 TableDataInsertAllResponseInsertErrors(); | 5183 TableDataInsertAllResponseInsertErrors(); |
| 4956 | 5184 |
| 4957 TableDataInsertAllResponseInsertErrors.fromJson(core.Map _json) { | 5185 TableDataInsertAllResponseInsertErrors.fromJson(core.Map _json) { |
| 4958 if (_json.containsKey("errors")) { | 5186 if (_json.containsKey("errors")) { |
| 4959 errors = _json["errors"].map((value) => new ErrorProto.fromJson(value)).to
List(); | 5187 errors = _json["errors"] |
| 5188 .map((value) => new ErrorProto.fromJson(value)) |
| 5189 .toList(); |
| 4960 } | 5190 } |
| 4961 if (_json.containsKey("index")) { | 5191 if (_json.containsKey("index")) { |
| 4962 index = _json["index"]; | 5192 index = _json["index"]; |
| 4963 } | 5193 } |
| 4964 } | 5194 } |
| 4965 | 5195 |
| 4966 core.Map<core.String, core.Object> toJson() { | 5196 core.Map<core.String, core.Object> toJson() { |
| 4967 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 5197 final core.Map<core.String, core.Object> _json = |
| 5198 new core.Map<core.String, core.Object>(); |
| 4968 if (errors != null) { | 5199 if (errors != null) { |
| 4969 _json["errors"] = errors.map((value) => (value).toJson()).toList(); | 5200 _json["errors"] = errors.map((value) => (value).toJson()).toList(); |
| 4970 } | 5201 } |
| 4971 if (index != null) { | 5202 if (index != null) { |
| 4972 _json["index"] = index; | 5203 _json["index"] = index; |
| 4973 } | 5204 } |
| 4974 return _json; | 5205 return _json; |
| 4975 } | 5206 } |
| 4976 } | 5207 } |
| 4977 | 5208 |
| 4978 class TableDataInsertAllResponse { | 5209 class TableDataInsertAllResponse { |
| 4979 /** An array of errors for rows that were not inserted. */ | 5210 /// An array of errors for rows that were not inserted. |
| 4980 core.List<TableDataInsertAllResponseInsertErrors> insertErrors; | 5211 core.List<TableDataInsertAllResponseInsertErrors> insertErrors; |
| 4981 /** The resource type of the response. */ | 5212 |
| 5213 /// The resource type of the response. |
| 4982 core.String kind; | 5214 core.String kind; |
| 4983 | 5215 |
| 4984 TableDataInsertAllResponse(); | 5216 TableDataInsertAllResponse(); |
| 4985 | 5217 |
| 4986 TableDataInsertAllResponse.fromJson(core.Map _json) { | 5218 TableDataInsertAllResponse.fromJson(core.Map _json) { |
| 4987 if (_json.containsKey("insertErrors")) { | 5219 if (_json.containsKey("insertErrors")) { |
| 4988 insertErrors = _json["insertErrors"].map((value) => new TableDataInsertAll
ResponseInsertErrors.fromJson(value)).toList(); | 5220 insertErrors = _json["insertErrors"] |
| 5221 .map((value) => |
| 5222 new TableDataInsertAllResponseInsertErrors.fromJson(value)) |
| 5223 .toList(); |
| 4989 } | 5224 } |
| 4990 if (_json.containsKey("kind")) { | 5225 if (_json.containsKey("kind")) { |
| 4991 kind = _json["kind"]; | 5226 kind = _json["kind"]; |
| 4992 } | 5227 } |
| 4993 } | 5228 } |
| 4994 | 5229 |
| 4995 core.Map<core.String, core.Object> toJson() { | 5230 core.Map<core.String, core.Object> toJson() { |
| 4996 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 5231 final core.Map<core.String, core.Object> _json = |
| 5232 new core.Map<core.String, core.Object>(); |
| 4997 if (insertErrors != null) { | 5233 if (insertErrors != null) { |
| 4998 _json["insertErrors"] = insertErrors.map((value) => (value).toJson()).toLi
st(); | 5234 _json["insertErrors"] = |
| 5235 insertErrors.map((value) => (value).toJson()).toList(); |
| 4999 } | 5236 } |
| 5000 if (kind != null) { | 5237 if (kind != null) { |
| 5001 _json["kind"] = kind; | 5238 _json["kind"] = kind; |
| 5002 } | 5239 } |
| 5003 return _json; | 5240 return _json; |
| 5004 } | 5241 } |
| 5005 } | 5242 } |
| 5006 | 5243 |
| 5007 class TableDataList { | 5244 class TableDataList { |
| 5008 /** A hash of this page of results. */ | 5245 /// A hash of this page of results. |
| 5009 core.String etag; | 5246 core.String etag; |
| 5010 /** The resource type of the response. */ | 5247 |
| 5248 /// The resource type of the response. |
| 5011 core.String kind; | 5249 core.String kind; |
| 5012 /** | 5250 |
| 5013 * A token used for paging results. Providing this token instead of the | 5251 /// A token used for paging results. Providing this token instead of the |
| 5014 * startIndex parameter can help you retrieve stable results when an | 5252 /// startIndex parameter can help you retrieve stable results when an |
| 5015 * underlying table is changing. | 5253 /// underlying table is changing. |
| 5016 */ | |
| 5017 core.String pageToken; | 5254 core.String pageToken; |
| 5018 /** Rows of results. */ | 5255 |
| 5256 /// Rows of results. |
| 5019 core.List<TableRow> rows; | 5257 core.List<TableRow> rows; |
| 5020 /** The total number of rows in the complete table. */ | 5258 |
| 5259 /// The total number of rows in the complete table. |
| 5021 core.String totalRows; | 5260 core.String totalRows; |
| 5022 | 5261 |
| 5023 TableDataList(); | 5262 TableDataList(); |
| 5024 | 5263 |
| 5025 TableDataList.fromJson(core.Map _json) { | 5264 TableDataList.fromJson(core.Map _json) { |
| 5026 if (_json.containsKey("etag")) { | 5265 if (_json.containsKey("etag")) { |
| 5027 etag = _json["etag"]; | 5266 etag = _json["etag"]; |
| 5028 } | 5267 } |
| 5029 if (_json.containsKey("kind")) { | 5268 if (_json.containsKey("kind")) { |
| 5030 kind = _json["kind"]; | 5269 kind = _json["kind"]; |
| 5031 } | 5270 } |
| 5032 if (_json.containsKey("pageToken")) { | 5271 if (_json.containsKey("pageToken")) { |
| 5033 pageToken = _json["pageToken"]; | 5272 pageToken = _json["pageToken"]; |
| 5034 } | 5273 } |
| 5035 if (_json.containsKey("rows")) { | 5274 if (_json.containsKey("rows")) { |
| 5036 rows = _json["rows"].map((value) => new TableRow.fromJson(value)).toList()
; | 5275 rows = |
| 5276 _json["rows"].map((value) => new TableRow.fromJson(value)).toList(); |
| 5037 } | 5277 } |
| 5038 if (_json.containsKey("totalRows")) { | 5278 if (_json.containsKey("totalRows")) { |
| 5039 totalRows = _json["totalRows"]; | 5279 totalRows = _json["totalRows"]; |
| 5040 } | 5280 } |
| 5041 } | 5281 } |
| 5042 | 5282 |
| 5043 core.Map<core.String, core.Object> toJson() { | 5283 core.Map<core.String, core.Object> toJson() { |
| 5044 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 5284 final core.Map<core.String, core.Object> _json = |
| 5285 new core.Map<core.String, core.Object>(); |
| 5045 if (etag != null) { | 5286 if (etag != null) { |
| 5046 _json["etag"] = etag; | 5287 _json["etag"] = etag; |
| 5047 } | 5288 } |
| 5048 if (kind != null) { | 5289 if (kind != null) { |
| 5049 _json["kind"] = kind; | 5290 _json["kind"] = kind; |
| 5050 } | 5291 } |
| 5051 if (pageToken != null) { | 5292 if (pageToken != null) { |
| 5052 _json["pageToken"] = pageToken; | 5293 _json["pageToken"] = pageToken; |
| 5053 } | 5294 } |
| 5054 if (rows != null) { | 5295 if (rows != null) { |
| 5055 _json["rows"] = rows.map((value) => (value).toJson()).toList(); | 5296 _json["rows"] = rows.map((value) => (value).toJson()).toList(); |
| 5056 } | 5297 } |
| 5057 if (totalRows != null) { | 5298 if (totalRows != null) { |
| 5058 _json["totalRows"] = totalRows; | 5299 _json["totalRows"] = totalRows; |
| 5059 } | 5300 } |
| 5060 return _json; | 5301 return _json; |
| 5061 } | 5302 } |
| 5062 } | 5303 } |
| 5063 | 5304 |
| 5064 class TableFieldSchema { | 5305 class TableFieldSchema { |
| 5065 /** | 5306 /// [Optional] The field description. The maximum length is 1,024 characters. |
| 5066 * [Optional] The field description. The maximum length is 1,024 characters. | |
| 5067 */ | |
| 5068 core.String description; | 5307 core.String description; |
| 5069 /** | 5308 |
| 5070 * [Optional] Describes the nested schema fields if the type property is set | 5309 /// [Optional] Describes the nested schema fields if the type property is set |
| 5071 * to RECORD. | 5310 /// to RECORD. |
| 5072 */ | |
| 5073 core.List<TableFieldSchema> fields; | 5311 core.List<TableFieldSchema> fields; |
| 5074 /** | 5312 |
| 5075 * [Optional] The field mode. Possible values include NULLABLE, REQUIRED and | 5313 /// [Optional] The field mode. Possible values include NULLABLE, REQUIRED and |
| 5076 * REPEATED. The default value is NULLABLE. | 5314 /// REPEATED. The default value is NULLABLE. |
| 5077 */ | |
| 5078 core.String mode; | 5315 core.String mode; |
| 5079 /** | 5316 |
| 5080 * [Required] The field name. The name must contain only letters (a-z, A-Z), | 5317 /// [Required] The field name. The name must contain only letters (a-z, A-Z), |
| 5081 * numbers (0-9), or underscores (_), and must start with a letter or | 5318 /// numbers (0-9), or underscores (_), and must start with a letter or |
| 5082 * underscore. The maximum length is 128 characters. | 5319 /// underscore. The maximum length is 128 characters. |
| 5083 */ | |
| 5084 core.String name; | 5320 core.String name; |
| 5085 /** | 5321 |
| 5086 * [Required] The field data type. Possible values include STRING, BYTES, | 5322 /// [Required] The field data type. Possible values include STRING, BYTES, |
| 5087 * INTEGER, INT64 (same as INTEGER), FLOAT, FLOAT64 (same as FLOAT), BOOLEAN, | 5323 /// INTEGER, INT64 (same as INTEGER), FLOAT, FLOAT64 (same as FLOAT), |
| 5088 * BOOL (same as BOOLEAN), TIMESTAMP, DATE, TIME, DATETIME, RECORD (where | 5324 /// BOOLEAN, BOOL (same as BOOLEAN), TIMESTAMP, DATE, TIME, DATETIME, RECORD |
| 5089 * RECORD indicates that the field contains a nested schema) or STRUCT (same | 5325 /// (where RECORD indicates that the field contains a nested schema) or |
| 5090 * as RECORD). | 5326 /// STRUCT (same as RECORD). |
| 5091 */ | |
| 5092 core.String type; | 5327 core.String type; |
| 5093 | 5328 |
| 5094 TableFieldSchema(); | 5329 TableFieldSchema(); |
| 5095 | 5330 |
| 5096 TableFieldSchema.fromJson(core.Map _json) { | 5331 TableFieldSchema.fromJson(core.Map _json) { |
| 5097 if (_json.containsKey("description")) { | 5332 if (_json.containsKey("description")) { |
| 5098 description = _json["description"]; | 5333 description = _json["description"]; |
| 5099 } | 5334 } |
| 5100 if (_json.containsKey("fields")) { | 5335 if (_json.containsKey("fields")) { |
| 5101 fields = _json["fields"].map((value) => new TableFieldSchema.fromJson(valu
e)).toList(); | 5336 fields = _json["fields"] |
| 5337 .map((value) => new TableFieldSchema.fromJson(value)) |
| 5338 .toList(); |
| 5102 } | 5339 } |
| 5103 if (_json.containsKey("mode")) { | 5340 if (_json.containsKey("mode")) { |
| 5104 mode = _json["mode"]; | 5341 mode = _json["mode"]; |
| 5105 } | 5342 } |
| 5106 if (_json.containsKey("name")) { | 5343 if (_json.containsKey("name")) { |
| 5107 name = _json["name"]; | 5344 name = _json["name"]; |
| 5108 } | 5345 } |
| 5109 if (_json.containsKey("type")) { | 5346 if (_json.containsKey("type")) { |
| 5110 type = _json["type"]; | 5347 type = _json["type"]; |
| 5111 } | 5348 } |
| 5112 } | 5349 } |
| 5113 | 5350 |
| 5114 core.Map<core.String, core.Object> toJson() { | 5351 core.Map<core.String, core.Object> toJson() { |
| 5115 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 5352 final core.Map<core.String, core.Object> _json = |
| 5353 new core.Map<core.String, core.Object>(); |
| 5116 if (description != null) { | 5354 if (description != null) { |
| 5117 _json["description"] = description; | 5355 _json["description"] = description; |
| 5118 } | 5356 } |
| 5119 if (fields != null) { | 5357 if (fields != null) { |
| 5120 _json["fields"] = fields.map((value) => (value).toJson()).toList(); | 5358 _json["fields"] = fields.map((value) => (value).toJson()).toList(); |
| 5121 } | 5359 } |
| 5122 if (mode != null) { | 5360 if (mode != null) { |
| 5123 _json["mode"] = mode; | 5361 _json["mode"] = mode; |
| 5124 } | 5362 } |
| 5125 if (name != null) { | 5363 if (name != null) { |
| 5126 _json["name"] = name; | 5364 _json["name"] = name; |
| 5127 } | 5365 } |
| 5128 if (type != null) { | 5366 if (type != null) { |
| 5129 _json["type"] = type; | 5367 _json["type"] = type; |
| 5130 } | 5368 } |
| 5131 return _json; | 5369 return _json; |
| 5132 } | 5370 } |
| 5133 } | 5371 } |
| 5134 | 5372 |
| 5135 /** Additional details for a view. */ | 5373 /// Additional details for a view. |
| 5136 class TableListTablesView { | 5374 class TableListTablesView { |
| 5137 /** | 5375 /// True if view is defined in legacy SQL dialect, false if in standard SQL. |
| 5138 * True if view is defined in legacy SQL dialect, false if in standard SQL. | |
| 5139 */ | |
| 5140 core.bool useLegacySql; | 5376 core.bool useLegacySql; |
| 5141 | 5377 |
| 5142 TableListTablesView(); | 5378 TableListTablesView(); |
| 5143 | 5379 |
| 5144 TableListTablesView.fromJson(core.Map _json) { | 5380 TableListTablesView.fromJson(core.Map _json) { |
| 5145 if (_json.containsKey("useLegacySql")) { | 5381 if (_json.containsKey("useLegacySql")) { |
| 5146 useLegacySql = _json["useLegacySql"]; | 5382 useLegacySql = _json["useLegacySql"]; |
| 5147 } | 5383 } |
| 5148 } | 5384 } |
| 5149 | 5385 |
| 5150 core.Map<core.String, core.Object> toJson() { | 5386 core.Map<core.String, core.Object> toJson() { |
| 5151 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 5387 final core.Map<core.String, core.Object> _json = |
| 5388 new core.Map<core.String, core.Object>(); |
| 5152 if (useLegacySql != null) { | 5389 if (useLegacySql != null) { |
| 5153 _json["useLegacySql"] = useLegacySql; | 5390 _json["useLegacySql"] = useLegacySql; |
| 5154 } | 5391 } |
| 5155 return _json; | 5392 return _json; |
| 5156 } | 5393 } |
| 5157 } | 5394 } |
| 5158 | 5395 |
| 5159 class TableListTables { | 5396 class TableListTables { |
| 5160 /** The user-friendly name for this table. */ | 5397 /// The user-friendly name for this table. |
| 5161 core.String friendlyName; | 5398 core.String friendlyName; |
| 5162 /** An opaque ID of the table */ | 5399 |
| 5400 /// An opaque ID of the table |
| 5163 core.String id; | 5401 core.String id; |
| 5164 /** The resource type. */ | 5402 |
| 5403 /// The resource type. |
| 5165 core.String kind; | 5404 core.String kind; |
| 5166 /** | 5405 |
| 5167 * [Experimental] The labels associated with this table. You can use these to | 5406 /// [Experimental] The labels associated with this table. You can use these |
| 5168 * organize and group your tables. | 5407 /// to organize and group your tables. |
| 5169 */ | |
| 5170 core.Map<core.String, core.String> labels; | 5408 core.Map<core.String, core.String> labels; |
| 5171 /** A reference uniquely identifying the table. */ | 5409 |
| 5410 /// A reference uniquely identifying the table. |
| 5172 TableReference tableReference; | 5411 TableReference tableReference; |
| 5173 /** [Experimental] The time-based partitioning for this table. */ | 5412 |
| 5413 /// [Experimental] The time-based partitioning for this table. |
| 5174 TimePartitioning timePartitioning; | 5414 TimePartitioning timePartitioning; |
| 5175 /** The type of table. Possible values are: TABLE, VIEW. */ | 5415 |
| 5416 /// The type of table. Possible values are: TABLE, VIEW. |
| 5176 core.String type; | 5417 core.String type; |
| 5177 /** Additional details for a view. */ | 5418 |
| 5419 /// Additional details for a view. |
| 5178 TableListTablesView view; | 5420 TableListTablesView view; |
| 5179 | 5421 |
| 5180 TableListTables(); | 5422 TableListTables(); |
| 5181 | 5423 |
| 5182 TableListTables.fromJson(core.Map _json) { | 5424 TableListTables.fromJson(core.Map _json) { |
| 5183 if (_json.containsKey("friendlyName")) { | 5425 if (_json.containsKey("friendlyName")) { |
| 5184 friendlyName = _json["friendlyName"]; | 5426 friendlyName = _json["friendlyName"]; |
| 5185 } | 5427 } |
| 5186 if (_json.containsKey("id")) { | 5428 if (_json.containsKey("id")) { |
| 5187 id = _json["id"]; | 5429 id = _json["id"]; |
| 5188 } | 5430 } |
| 5189 if (_json.containsKey("kind")) { | 5431 if (_json.containsKey("kind")) { |
| 5190 kind = _json["kind"]; | 5432 kind = _json["kind"]; |
| 5191 } | 5433 } |
| 5192 if (_json.containsKey("labels")) { | 5434 if (_json.containsKey("labels")) { |
| 5193 labels = _json["labels"]; | 5435 labels = _json["labels"]; |
| 5194 } | 5436 } |
| 5195 if (_json.containsKey("tableReference")) { | 5437 if (_json.containsKey("tableReference")) { |
| 5196 tableReference = new TableReference.fromJson(_json["tableReference"]); | 5438 tableReference = new TableReference.fromJson(_json["tableReference"]); |
| 5197 } | 5439 } |
| 5198 if (_json.containsKey("timePartitioning")) { | 5440 if (_json.containsKey("timePartitioning")) { |
| 5199 timePartitioning = new TimePartitioning.fromJson(_json["timePartitioning"]
); | 5441 timePartitioning = |
| 5442 new TimePartitioning.fromJson(_json["timePartitioning"]); |
| 5200 } | 5443 } |
| 5201 if (_json.containsKey("type")) { | 5444 if (_json.containsKey("type")) { |
| 5202 type = _json["type"]; | 5445 type = _json["type"]; |
| 5203 } | 5446 } |
| 5204 if (_json.containsKey("view")) { | 5447 if (_json.containsKey("view")) { |
| 5205 view = new TableListTablesView.fromJson(_json["view"]); | 5448 view = new TableListTablesView.fromJson(_json["view"]); |
| 5206 } | 5449 } |
| 5207 } | 5450 } |
| 5208 | 5451 |
| 5209 core.Map<core.String, core.Object> toJson() { | 5452 core.Map<core.String, core.Object> toJson() { |
| 5210 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 5453 final core.Map<core.String, core.Object> _json = |
| 5454 new core.Map<core.String, core.Object>(); |
| 5211 if (friendlyName != null) { | 5455 if (friendlyName != null) { |
| 5212 _json["friendlyName"] = friendlyName; | 5456 _json["friendlyName"] = friendlyName; |
| 5213 } | 5457 } |
| 5214 if (id != null) { | 5458 if (id != null) { |
| 5215 _json["id"] = id; | 5459 _json["id"] = id; |
| 5216 } | 5460 } |
| 5217 if (kind != null) { | 5461 if (kind != null) { |
| 5218 _json["kind"] = kind; | 5462 _json["kind"] = kind; |
| 5219 } | 5463 } |
| 5220 if (labels != null) { | 5464 if (labels != null) { |
| 5221 _json["labels"] = labels; | 5465 _json["labels"] = labels; |
| 5222 } | 5466 } |
| 5223 if (tableReference != null) { | 5467 if (tableReference != null) { |
| 5224 _json["tableReference"] = (tableReference).toJson(); | 5468 _json["tableReference"] = (tableReference).toJson(); |
| 5225 } | 5469 } |
| 5226 if (timePartitioning != null) { | 5470 if (timePartitioning != null) { |
| 5227 _json["timePartitioning"] = (timePartitioning).toJson(); | 5471 _json["timePartitioning"] = (timePartitioning).toJson(); |
| 5228 } | 5472 } |
| 5229 if (type != null) { | 5473 if (type != null) { |
| 5230 _json["type"] = type; | 5474 _json["type"] = type; |
| 5231 } | 5475 } |
| 5232 if (view != null) { | 5476 if (view != null) { |
| 5233 _json["view"] = (view).toJson(); | 5477 _json["view"] = (view).toJson(); |
| 5234 } | 5478 } |
| 5235 return _json; | 5479 return _json; |
| 5236 } | 5480 } |
| 5237 } | 5481 } |
| 5238 | 5482 |
| 5239 class TableList { | 5483 class TableList { |
| 5240 /** A hash of this page of results. */ | 5484 /// A hash of this page of results. |
| 5241 core.String etag; | 5485 core.String etag; |
| 5242 /** The type of list. */ | 5486 |
| 5487 /// The type of list. |
| 5243 core.String kind; | 5488 core.String kind; |
| 5244 /** A token to request the next page of results. */ | 5489 |
| 5490 /// A token to request the next page of results. |
| 5245 core.String nextPageToken; | 5491 core.String nextPageToken; |
| 5246 /** Tables in the requested dataset. */ | 5492 |
| 5493 /// Tables in the requested dataset. |
| 5247 core.List<TableListTables> tables; | 5494 core.List<TableListTables> tables; |
| 5248 /** The total number of tables in the dataset. */ | 5495 |
| 5496 /// The total number of tables in the dataset. |
| 5249 core.int totalItems; | 5497 core.int totalItems; |
| 5250 | 5498 |
| 5251 TableList(); | 5499 TableList(); |
| 5252 | 5500 |
| 5253 TableList.fromJson(core.Map _json) { | 5501 TableList.fromJson(core.Map _json) { |
| 5254 if (_json.containsKey("etag")) { | 5502 if (_json.containsKey("etag")) { |
| 5255 etag = _json["etag"]; | 5503 etag = _json["etag"]; |
| 5256 } | 5504 } |
| 5257 if (_json.containsKey("kind")) { | 5505 if (_json.containsKey("kind")) { |
| 5258 kind = _json["kind"]; | 5506 kind = _json["kind"]; |
| 5259 } | 5507 } |
| 5260 if (_json.containsKey("nextPageToken")) { | 5508 if (_json.containsKey("nextPageToken")) { |
| 5261 nextPageToken = _json["nextPageToken"]; | 5509 nextPageToken = _json["nextPageToken"]; |
| 5262 } | 5510 } |
| 5263 if (_json.containsKey("tables")) { | 5511 if (_json.containsKey("tables")) { |
| 5264 tables = _json["tables"].map((value) => new TableListTables.fromJson(value
)).toList(); | 5512 tables = _json["tables"] |
| 5513 .map((value) => new TableListTables.fromJson(value)) |
| 5514 .toList(); |
| 5265 } | 5515 } |
| 5266 if (_json.containsKey("totalItems")) { | 5516 if (_json.containsKey("totalItems")) { |
| 5267 totalItems = _json["totalItems"]; | 5517 totalItems = _json["totalItems"]; |
| 5268 } | 5518 } |
| 5269 } | 5519 } |
| 5270 | 5520 |
| 5271 core.Map<core.String, core.Object> toJson() { | 5521 core.Map<core.String, core.Object> toJson() { |
| 5272 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 5522 final core.Map<core.String, core.Object> _json = |
| 5523 new core.Map<core.String, core.Object>(); |
| 5273 if (etag != null) { | 5524 if (etag != null) { |
| 5274 _json["etag"] = etag; | 5525 _json["etag"] = etag; |
| 5275 } | 5526 } |
| 5276 if (kind != null) { | 5527 if (kind != null) { |
| 5277 _json["kind"] = kind; | 5528 _json["kind"] = kind; |
| 5278 } | 5529 } |
| 5279 if (nextPageToken != null) { | 5530 if (nextPageToken != null) { |
| 5280 _json["nextPageToken"] = nextPageToken; | 5531 _json["nextPageToken"] = nextPageToken; |
| 5281 } | 5532 } |
| 5282 if (tables != null) { | 5533 if (tables != null) { |
| 5283 _json["tables"] = tables.map((value) => (value).toJson()).toList(); | 5534 _json["tables"] = tables.map((value) => (value).toJson()).toList(); |
| 5284 } | 5535 } |
| 5285 if (totalItems != null) { | 5536 if (totalItems != null) { |
| 5286 _json["totalItems"] = totalItems; | 5537 _json["totalItems"] = totalItems; |
| 5287 } | 5538 } |
| 5288 return _json; | 5539 return _json; |
| 5289 } | 5540 } |
| 5290 } | 5541 } |
| 5291 | 5542 |
| 5292 class TableReference { | 5543 class TableReference { |
| 5293 /** [Required] The ID of the dataset containing this table. */ | 5544 /// [Required] The ID of the dataset containing this table. |
| 5294 core.String datasetId; | 5545 core.String datasetId; |
| 5295 /** [Required] The ID of the project containing this table. */ | 5546 |
| 5547 /// [Required] The ID of the project containing this table. |
| 5296 core.String projectId; | 5548 core.String projectId; |
| 5297 /** | 5549 |
| 5298 * [Required] The ID of the table. The ID must contain only letters (a-z, | 5550 /// [Required] The ID of the table. The ID must contain only letters (a-z, |
| 5299 * A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 | 5551 /// A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 |
| 5300 * characters. | 5552 /// characters. |
| 5301 */ | |
| 5302 core.String tableId; | 5553 core.String tableId; |
| 5303 | 5554 |
| 5304 TableReference(); | 5555 TableReference(); |
| 5305 | 5556 |
| 5306 TableReference.fromJson(core.Map _json) { | 5557 TableReference.fromJson(core.Map _json) { |
| 5307 if (_json.containsKey("datasetId")) { | 5558 if (_json.containsKey("datasetId")) { |
| 5308 datasetId = _json["datasetId"]; | 5559 datasetId = _json["datasetId"]; |
| 5309 } | 5560 } |
| 5310 if (_json.containsKey("projectId")) { | 5561 if (_json.containsKey("projectId")) { |
| 5311 projectId = _json["projectId"]; | 5562 projectId = _json["projectId"]; |
| 5312 } | 5563 } |
| 5313 if (_json.containsKey("tableId")) { | 5564 if (_json.containsKey("tableId")) { |
| 5314 tableId = _json["tableId"]; | 5565 tableId = _json["tableId"]; |
| 5315 } | 5566 } |
| 5316 } | 5567 } |
| 5317 | 5568 |
| 5318 core.Map<core.String, core.Object> toJson() { | 5569 core.Map<core.String, core.Object> toJson() { |
| 5319 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 5570 final core.Map<core.String, core.Object> _json = |
| 5571 new core.Map<core.String, core.Object>(); |
| 5320 if (datasetId != null) { | 5572 if (datasetId != null) { |
| 5321 _json["datasetId"] = datasetId; | 5573 _json["datasetId"] = datasetId; |
| 5322 } | 5574 } |
| 5323 if (projectId != null) { | 5575 if (projectId != null) { |
| 5324 _json["projectId"] = projectId; | 5576 _json["projectId"] = projectId; |
| 5325 } | 5577 } |
| 5326 if (tableId != null) { | 5578 if (tableId != null) { |
| 5327 _json["tableId"] = tableId; | 5579 _json["tableId"] = tableId; |
| 5328 } | 5580 } |
| 5329 return _json; | 5581 return _json; |
| 5330 } | 5582 } |
| 5331 } | 5583 } |
| 5332 | 5584 |
| 5333 class TableRow { | 5585 class TableRow { |
| 5334 /** | 5586 /// Represents a single row in the result set, consisting of one or more |
| 5335 * Represents a single row in the result set, consisting of one or more | 5587 /// fields. |
| 5336 * fields. | |
| 5337 */ | |
| 5338 core.List<TableCell> f; | 5588 core.List<TableCell> f; |
| 5339 | 5589 |
| 5340 TableRow(); | 5590 TableRow(); |
| 5341 | 5591 |
| 5342 TableRow.fromJson(core.Map _json) { | 5592 TableRow.fromJson(core.Map _json) { |
| 5343 if (_json.containsKey("f")) { | 5593 if (_json.containsKey("f")) { |
| 5344 f = _json["f"].map((value) => new TableCell.fromJson(value)).toList(); | 5594 f = _json["f"].map((value) => new TableCell.fromJson(value)).toList(); |
| 5345 } | 5595 } |
| 5346 } | 5596 } |
| 5347 | 5597 |
| 5348 core.Map<core.String, core.Object> toJson() { | 5598 core.Map<core.String, core.Object> toJson() { |
| 5349 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 5599 final core.Map<core.String, core.Object> _json = |
| 5600 new core.Map<core.String, core.Object>(); |
| 5350 if (f != null) { | 5601 if (f != null) { |
| 5351 _json["f"] = f.map((value) => (value).toJson()).toList(); | 5602 _json["f"] = f.map((value) => (value).toJson()).toList(); |
| 5352 } | 5603 } |
| 5353 return _json; | 5604 return _json; |
| 5354 } | 5605 } |
| 5355 } | 5606 } |
| 5356 | 5607 |
| 5357 class TableSchema { | 5608 class TableSchema { |
| 5358 /** Describes the fields in a table. */ | 5609 /// Describes the fields in a table. |
| 5359 core.List<TableFieldSchema> fields; | 5610 core.List<TableFieldSchema> fields; |
| 5360 | 5611 |
| 5361 TableSchema(); | 5612 TableSchema(); |
| 5362 | 5613 |
| 5363 TableSchema.fromJson(core.Map _json) { | 5614 TableSchema.fromJson(core.Map _json) { |
| 5364 if (_json.containsKey("fields")) { | 5615 if (_json.containsKey("fields")) { |
| 5365 fields = _json["fields"].map((value) => new TableFieldSchema.fromJson(valu
e)).toList(); | 5616 fields = _json["fields"] |
| 5617 .map((value) => new TableFieldSchema.fromJson(value)) |
| 5618 .toList(); |
| 5366 } | 5619 } |
| 5367 } | 5620 } |
| 5368 | 5621 |
| 5369 core.Map<core.String, core.Object> toJson() { | 5622 core.Map<core.String, core.Object> toJson() { |
| 5370 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 5623 final core.Map<core.String, core.Object> _json = |
| 5624 new core.Map<core.String, core.Object>(); |
| 5371 if (fields != null) { | 5625 if (fields != null) { |
| 5372 _json["fields"] = fields.map((value) => (value).toJson()).toList(); | 5626 _json["fields"] = fields.map((value) => (value).toJson()).toList(); |
| 5373 } | 5627 } |
| 5374 return _json; | 5628 return _json; |
| 5375 } | 5629 } |
| 5376 } | 5630 } |
| 5377 | 5631 |
| 5378 class TimePartitioning { | 5632 class TimePartitioning { |
| 5379 /** | 5633 /// [Optional] Number of milliseconds for which to keep the storage for a |
| 5380 * [Optional] Number of milliseconds for which to keep the storage for a | 5634 /// partition. |
| 5381 * partition. | |
| 5382 */ | |
| 5383 core.String expirationMs; | 5635 core.String expirationMs; |
| 5384 /** | 5636 |
| 5385 * [Required] The only type supported is DAY, which will generate one | 5637 /// [Experimental] [Optional] If not set, the table is partitioned by pseudo |
| 5386 * partition per day based on data loading time. | 5638 /// column '_PARTITIONTIME'; if set, the table is partitioned by this field. |
| 5387 */ | 5639 /// The field must be a top-level TIMESTAMP or DATE field. Its mode must be |
| 5640 /// NULLABLE or REQUIRED. |
| 5641 core.String field; |
| 5642 |
| 5643 /// [Required] The only type supported is DAY, which will generate one |
| 5644 /// partition per day. |
| 5388 core.String type; | 5645 core.String type; |
| 5389 | 5646 |
| 5390 TimePartitioning(); | 5647 TimePartitioning(); |
| 5391 | 5648 |
| 5392 TimePartitioning.fromJson(core.Map _json) { | 5649 TimePartitioning.fromJson(core.Map _json) { |
| 5393 if (_json.containsKey("expirationMs")) { | 5650 if (_json.containsKey("expirationMs")) { |
| 5394 expirationMs = _json["expirationMs"]; | 5651 expirationMs = _json["expirationMs"]; |
| 5395 } | 5652 } |
| 5653 if (_json.containsKey("field")) { |
| 5654 field = _json["field"]; |
| 5655 } |
| 5396 if (_json.containsKey("type")) { | 5656 if (_json.containsKey("type")) { |
| 5397 type = _json["type"]; | 5657 type = _json["type"]; |
| 5398 } | 5658 } |
| 5399 } | 5659 } |
| 5400 | 5660 |
| 5401 core.Map<core.String, core.Object> toJson() { | 5661 core.Map<core.String, core.Object> toJson() { |
| 5402 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 5662 final core.Map<core.String, core.Object> _json = |
| 5663 new core.Map<core.String, core.Object>(); |
| 5403 if (expirationMs != null) { | 5664 if (expirationMs != null) { |
| 5404 _json["expirationMs"] = expirationMs; | 5665 _json["expirationMs"] = expirationMs; |
| 5405 } | 5666 } |
| 5667 if (field != null) { |
| 5668 _json["field"] = field; |
| 5669 } |
| 5406 if (type != null) { | 5670 if (type != null) { |
| 5407 _json["type"] = type; | 5671 _json["type"] = type; |
| 5408 } | 5672 } |
| 5409 return _json; | 5673 return _json; |
| 5410 } | 5674 } |
| 5411 } | 5675 } |
| 5412 | 5676 |
| 5413 class UserDefinedFunctionResource { | 5677 class UserDefinedFunctionResource { |
| 5414 /** | 5678 /// [Pick one] An inline resource that contains code for a user-defined |
| 5415 * [Pick one] An inline resource that contains code for a user-defined | 5679 /// function (UDF). Providing a inline code resource is equivalent to |
| 5416 * function (UDF). Providing a inline code resource is equivalent to providing | 5680 /// providing a URI for a file containing the same code. |
| 5417 * a URI for a file containing the same code. | |
| 5418 */ | |
| 5419 core.String inlineCode; | 5681 core.String inlineCode; |
| 5420 /** | 5682 |
| 5421 * [Pick one] A code resource to load from a Google Cloud Storage URI | 5683 /// [Pick one] A code resource to load from a Google Cloud Storage URI |
| 5422 * (gs://bucket/path). | 5684 /// (gs://bucket/path). |
| 5423 */ | |
| 5424 core.String resourceUri; | 5685 core.String resourceUri; |
| 5425 | 5686 |
| 5426 UserDefinedFunctionResource(); | 5687 UserDefinedFunctionResource(); |
| 5427 | 5688 |
| 5428 UserDefinedFunctionResource.fromJson(core.Map _json) { | 5689 UserDefinedFunctionResource.fromJson(core.Map _json) { |
| 5429 if (_json.containsKey("inlineCode")) { | 5690 if (_json.containsKey("inlineCode")) { |
| 5430 inlineCode = _json["inlineCode"]; | 5691 inlineCode = _json["inlineCode"]; |
| 5431 } | 5692 } |
| 5432 if (_json.containsKey("resourceUri")) { | 5693 if (_json.containsKey("resourceUri")) { |
| 5433 resourceUri = _json["resourceUri"]; | 5694 resourceUri = _json["resourceUri"]; |
| 5434 } | 5695 } |
| 5435 } | 5696 } |
| 5436 | 5697 |
| 5437 core.Map<core.String, core.Object> toJson() { | 5698 core.Map<core.String, core.Object> toJson() { |
| 5438 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 5699 final core.Map<core.String, core.Object> _json = |
| 5700 new core.Map<core.String, core.Object>(); |
| 5439 if (inlineCode != null) { | 5701 if (inlineCode != null) { |
| 5440 _json["inlineCode"] = inlineCode; | 5702 _json["inlineCode"] = inlineCode; |
| 5441 } | 5703 } |
| 5442 if (resourceUri != null) { | 5704 if (resourceUri != null) { |
| 5443 _json["resourceUri"] = resourceUri; | 5705 _json["resourceUri"] = resourceUri; |
| 5444 } | 5706 } |
| 5445 return _json; | 5707 return _json; |
| 5446 } | 5708 } |
| 5447 } | 5709 } |
| 5448 | 5710 |
| 5449 class ViewDefinition { | 5711 class ViewDefinition { |
| 5450 /** [Required] A query that BigQuery executes when the view is referenced. */ | 5712 /// [Required] A query that BigQuery executes when the view is referenced. |
| 5451 core.String query; | 5713 core.String query; |
| 5452 /** | 5714 |
| 5453 * Specifies whether to use BigQuery's legacy SQL for this view. The default | 5715 /// Specifies whether to use BigQuery's legacy SQL for this view. The default |
| 5454 * value is true. If set to false, the view will use BigQuery's standard SQL: | 5716 /// value is true. If set to false, the view will use BigQuery's standard |
| 5455 * https://cloud.google.com/bigquery/sql-reference/ Queries and views that | 5717 /// SQL: https://cloud.google.com/bigquery/sql-reference/ Queries and views |
| 5456 * reference this view must use the same flag value. | 5718 /// that reference this view must use the same flag value. |
| 5457 */ | |
| 5458 core.bool useLegacySql; | 5719 core.bool useLegacySql; |
| 5459 /** Describes user-defined function resources used in the query. */ | 5720 |
| 5721 /// Describes user-defined function resources used in the query. |
| 5460 core.List<UserDefinedFunctionResource> userDefinedFunctionResources; | 5722 core.List<UserDefinedFunctionResource> userDefinedFunctionResources; |
| 5461 | 5723 |
| 5462 ViewDefinition(); | 5724 ViewDefinition(); |
| 5463 | 5725 |
| 5464 ViewDefinition.fromJson(core.Map _json) { | 5726 ViewDefinition.fromJson(core.Map _json) { |
| 5465 if (_json.containsKey("query")) { | 5727 if (_json.containsKey("query")) { |
| 5466 query = _json["query"]; | 5728 query = _json["query"]; |
| 5467 } | 5729 } |
| 5468 if (_json.containsKey("useLegacySql")) { | 5730 if (_json.containsKey("useLegacySql")) { |
| 5469 useLegacySql = _json["useLegacySql"]; | 5731 useLegacySql = _json["useLegacySql"]; |
| 5470 } | 5732 } |
| 5471 if (_json.containsKey("userDefinedFunctionResources")) { | 5733 if (_json.containsKey("userDefinedFunctionResources")) { |
| 5472 userDefinedFunctionResources = _json["userDefinedFunctionResources"].map((
value) => new UserDefinedFunctionResource.fromJson(value)).toList(); | 5734 userDefinedFunctionResources = _json["userDefinedFunctionResources"] |
| 5735 .map((value) => new UserDefinedFunctionResource.fromJson(value)) |
| 5736 .toList(); |
| 5473 } | 5737 } |
| 5474 } | 5738 } |
| 5475 | 5739 |
| 5476 core.Map<core.String, core.Object> toJson() { | 5740 core.Map<core.String, core.Object> toJson() { |
| 5477 final core.Map<core.String, core.Object> _json = new core.Map<core.String, c
ore.Object>(); | 5741 final core.Map<core.String, core.Object> _json = |
| 5742 new core.Map<core.String, core.Object>(); |
| 5478 if (query != null) { | 5743 if (query != null) { |
| 5479 _json["query"] = query; | 5744 _json["query"] = query; |
| 5480 } | 5745 } |
| 5481 if (useLegacySql != null) { | 5746 if (useLegacySql != null) { |
| 5482 _json["useLegacySql"] = useLegacySql; | 5747 _json["useLegacySql"] = useLegacySql; |
| 5483 } | 5748 } |
| 5484 if (userDefinedFunctionResources != null) { | 5749 if (userDefinedFunctionResources != null) { |
| 5485 _json["userDefinedFunctionResources"] = userDefinedFunctionResources.map((
value) => (value).toJson()).toList(); | 5750 _json["userDefinedFunctionResources"] = userDefinedFunctionResources |
| 5751 .map((value) => (value).toJson()) |
| 5752 .toList(); |
| 5486 } | 5753 } |
| 5487 return _json; | 5754 return _json; |
| 5488 } | 5755 } |
| 5489 } | 5756 } |
| OLD | NEW |