OLD | NEW |
(Empty) | |
| 1 #!/usr/bin/env python2.7 |
| 2 # Copyright 2015-2016, Google Inc. |
| 3 # All rights reserved. |
| 4 # |
| 5 # Redistribution and use in source and binary forms, with or without |
| 6 # modification, are permitted provided that the following conditions are |
| 7 # met: |
| 8 # |
| 9 # * Redistributions of source code must retain the above copyright |
| 10 # notice, this list of conditions and the following disclaimer. |
| 11 # * Redistributions in binary form must reproduce the above |
| 12 # copyright notice, this list of conditions and the following disclaimer |
| 13 # in the documentation and/or other materials provided with the |
| 14 # distribution. |
| 15 # * Neither the name of Google Inc. nor the names of its |
| 16 # contributors may be used to endorse or promote products derived from |
| 17 # this software without specific prior written permission. |
| 18 # |
| 19 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 20 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 21 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 22 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| 23 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 24 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 25 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 26 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 27 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 28 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 29 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 30 |
| 31 import argparse |
| 32 import json |
| 33 import uuid |
| 34 import httplib2 |
| 35 |
| 36 from apiclient import discovery |
| 37 from apiclient.errors import HttpError |
| 38 from oauth2client.client import GoogleCredentials |
| 39 |
| 40 NUM_RETRIES = 3 |
| 41 |
| 42 |
| 43 def create_big_query(): |
| 44 """Authenticates with cloud platform and gets a BiqQuery service object |
| 45 """ |
| 46 creds = GoogleCredentials.get_application_default() |
| 47 return discovery.build('bigquery', 'v2', credentials=creds) |
| 48 |
| 49 |
| 50 def create_dataset(biq_query, project_id, dataset_id): |
| 51 is_success = True |
| 52 body = { |
| 53 'datasetReference': { |
| 54 'projectId': project_id, |
| 55 'datasetId': dataset_id |
| 56 } |
| 57 } |
| 58 |
| 59 try: |
| 60 dataset_req = biq_query.datasets().insert(projectId=project_id, body=body) |
| 61 dataset_req.execute(num_retries=NUM_RETRIES) |
| 62 except HttpError as http_error: |
| 63 if http_error.resp.status == 409: |
| 64 print 'Warning: The dataset %s already exists' % dataset_id |
| 65 else: |
| 66 # Note: For more debugging info, print "http_error.content" |
| 67 print 'Error in creating dataset: %s. Err: %s' % (dataset_id, http_error) |
| 68 is_success = False |
| 69 return is_success |
| 70 |
| 71 |
| 72 def create_table(big_query, project_id, dataset_id, table_id, table_schema, |
| 73 description): |
| 74 is_success = True |
| 75 |
| 76 body = { |
| 77 'description': description, |
| 78 'schema': { |
| 79 'fields': [{ |
| 80 'name': field_name, |
| 81 'type': field_type, |
| 82 'description': field_description |
| 83 } for (field_name, field_type, field_description) in table_schema] |
| 84 }, |
| 85 'tableReference': { |
| 86 'datasetId': dataset_id, |
| 87 'projectId': project_id, |
| 88 'tableId': table_id |
| 89 } |
| 90 } |
| 91 |
| 92 try: |
| 93 table_req = big_query.tables().insert(projectId=project_id, |
| 94 datasetId=dataset_id, |
| 95 body=body) |
| 96 res = table_req.execute(num_retries=NUM_RETRIES) |
| 97 print 'Successfully created %s "%s"' % (res['kind'], res['id']) |
| 98 except HttpError as http_error: |
| 99 if http_error.resp.status == 409: |
| 100 print 'Warning: Table %s already exists' % table_id |
| 101 else: |
| 102 print 'Error in creating table: %s. Err: %s' % (table_id, http_error) |
| 103 is_success = False |
| 104 return is_success |
| 105 |
| 106 |
| 107 def insert_rows(big_query, project_id, dataset_id, table_id, rows_list): |
| 108 is_success = True |
| 109 body = {'rows': rows_list} |
| 110 try: |
| 111 insert_req = big_query.tabledata().insertAll(projectId=project_id, |
| 112 datasetId=dataset_id, |
| 113 tableId=table_id, |
| 114 body=body) |
| 115 print body |
| 116 res = insert_req.execute(num_retries=NUM_RETRIES) |
| 117 print res |
| 118 except HttpError as http_error: |
| 119 print 'Error in inserting rows in the table %s' % table_id |
| 120 is_success = False |
| 121 return is_success |
| 122 |
| 123 |
| 124 def sync_query_job(big_query, project_id, query, timeout=5000): |
| 125 query_data = {'query': query, 'timeoutMs': timeout} |
| 126 query_job = None |
| 127 try: |
| 128 query_job = big_query.jobs().query( |
| 129 projectId=project_id, |
| 130 body=query_data).execute(num_retries=NUM_RETRIES) |
| 131 except HttpError as http_error: |
| 132 print 'Query execute job failed with error: %s' % http_error |
| 133 print http_error.content |
| 134 return query_job |
| 135 |
| 136 # List of (column name, column type, description) tuples |
| 137 def make_row(unique_row_id, row_values_dict): |
| 138 """row_values_dict is a dictionary of column name and column value. |
| 139 """ |
| 140 return {'insertId': unique_row_id, 'json': row_values_dict} |
OLD | NEW |