Skip to content

Commit cb2dadc

Browse files
committed
Merge pull request #61 from blarghmatey/py3k_fixes
Updated library and unit tests to run on Python 2 and Python 3
2 parents 751eb12 + 00cafe9 commit cb2dadc

File tree

11 files changed

+288
-115
lines changed

11 files changed

+288
-115
lines changed

.travis.yml

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
11
language: python
2-
python:
3-
- "2.7"
4-
52
install:
6-
- pip install -r requirements.txt
7-
- pip install -r requirements_dev.txt
8-
9-
script: make test
10-
3+
- pip install tox
4+
script: tox -e $TOXENV
115
notifications:
126
email: false
7+
env:
8+
- TOXENV=py27
9+
- TOXENV=py33
10+
- TOXENV=py34
11+
- TOXENV=nightly
12+
- TOXENV=pypy

bigquery/__init__.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
1-
from client import get_client
2-
from client import (
1+
from __future__ import absolute_import
2+
from .client import get_client
3+
from .client import (
34
BIGQUERY_SCOPE,
45
BIGQUERY_SCOPE_READ_ONLY,
56
JOB_CREATE_IF_NEEDED,
@@ -14,4 +15,4 @@
1415
JOB_ENCODING_ISO_8859_1
1516
)
1617

17-
from schema_builder import schema_from_record
18+
from .schema_builder import schema_from_record

bigquery/client.py

Lines changed: 16 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,19 @@
11
import calendar
2+
import json
3+
import logging
24
from collections import defaultdict
35
from datetime import datetime, timedelta
4-
from time import sleep
5-
from time import time
66
from hashlib import sha256
7-
import json
8-
import logging
7+
from time import sleep, time
98

9+
import httplib2
10+
import six
1011
from apiclient.discovery import build
1112
from apiclient.errors import HttpError
12-
import httplib2
1313

14+
from bigquery.errors import (BigQueryTimeoutException, JobExecutingException,
15+
JobInsertException, UnfinishedQueryException)
1416
from bigquery.schema_builder import schema_from_record
15-
from bigquery.errors import (
16-
JobExecutingException, JobInsertException,
17-
UnfinishedQueryException, BigQueryTimeoutException
18-
)
1917

2018
BIGQUERY_SCOPE = 'https://www.googleapis.com/auth/bigquery'
2119
BIGQUERY_SCOPE_READ_ONLY = 'https://www.googleapis.com/auth/bigquery.readonly'
@@ -154,7 +152,7 @@ def _submit_query_job(self, query_data):
154152
projectId=self.project_id, body=query_data).execute()
155153
except HttpError as e:
156154
if query_data.get("dryRun", False):
157-
return None, json.loads(e.content)
155+
return None, json.loads(e.content.decode('utf8'))
158156
raise
159157

160158
job_id = query_reply['jobReference'].get('jobId')
@@ -266,7 +264,7 @@ def get_table_schema(self, dataset, table):
266264
projectId=self.project_id,
267265
tableId=table,
268266
datasetId=dataset).execute()
269-
except HttpError, e:
267+
except HttpError as e:
270268
if int(e.resp['status']) == 404:
271269
logging.warn('Table %s.%s does not exist', dataset, table)
272270
return None
@@ -651,7 +649,7 @@ def import_data_from_uris(
651649
skip_leading_rows=skip_leading_rows,
652650
quote=quote)
653651
non_null_values = dict((k, v) for k, v
654-
in all_values.items()
652+
in list(all_values.items())
655653
if v)
656654
raise Exception("Parameters field_delimiter, allow_jagged_rows, "
657655
"allow_quoted_newlines, quote and "
@@ -837,6 +835,7 @@ def wait_for_job(self, job, interval=5, timeout=60):
837835
Waits until the job indicated by job_resource is done or has failed
838836
Args:
839837
job: dict, representing a BigQuery job resource
838+
or str, representing a BigQuery job id
840839
interval: optional float polling interval in seconds, default = 5
841840
timeout: optional float timeout in seconds, default = 60
842841
Returns:
@@ -848,7 +847,9 @@ def wait_for_job(self, job, interval=5, timeout=60):
848847
BigQueryTimeoutException on timeout
849848
"""
850849
complete = False
851-
job_id = job['jobReference']['jobId']
850+
job_id = str(job if isinstance(job,
851+
(six.binary_type, six.text_type, int))
852+
else job['jobReference']['jobId'])
852853
job_resource = None
853854

854855
start_time = time()
@@ -1048,7 +1049,7 @@ def _filter_tables_by_time(self, tables, start_time, end_time):
10481049
A list of table names that are inside the time range.
10491050
"""
10501051

1051-
return [table_name for (table_name, unix_seconds) in tables.iteritems()
1052+
return [table_name for (table_name, unix_seconds) in tables.items()
10521053
if self._in_range(start_time, end_time, unix_seconds)]
10531054

10541055
def _in_range(self, start_time, end_time, time):
@@ -1167,7 +1168,7 @@ def _generate_hex_for_uris(self, uris):
11671168
Returns:
11681169
string of hexed uris
11691170
"""
1170-
return sha256(":".join(uris) + str(time())).hexdigest()
1171+
return sha256((":".join(uris) + str(time())).encode()).hexdigest()
11711172

11721173
def _raise_insert_exception_if_error(self, job):
11731174
error_http = job.get('error')

bigquery/query_builder.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ def _render_select(selections):
7777
return 'SELECT *'
7878

7979
rendered_selections = []
80-
for name, options in selections.iteritems():
80+
for name, options in selections.items():
8181
if not isinstance(options, list):
8282
options = [options]
8383

@@ -200,7 +200,8 @@ def _render_condition(field, field_type, comparators):
200200
if condition == "IN":
201201
if isinstance(value, (list, tuple, set)):
202202
value = ', '.join(
203-
[_render_condition_value(v, field_type) for v in value]
203+
sorted([_render_condition_value(v, field_type)
204+
for v in value])
204205
)
205206
else:
206207
value = _render_condition_value(value, field_type)

bigquery/schema_builder.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,12 @@
1+
from __future__ import absolute_import
12
__author__ = 'Aneil Mallavarapu (http://github.com/aneilbaboo)'
23

34
from datetime import datetime
45

6+
import six
57
import dateutil.parser
68

7-
from errors import InvalidTypeException
9+
from .errors import InvalidTypeException
810

911

1012
def default_timestamp_parser(s):
@@ -30,7 +32,7 @@ def schema_from_record(record, timestamp_parser=default_timestamp_parser):
3032
schema: list
3133
"""
3234
return [describe_field(k, v, timestamp_parser=timestamp_parser)
33-
for k, v in record.items()]
35+
for k, v in list(record.items())]
3436

3537

3638
def describe_field(k, v, timestamp_parser=default_timestamp_parser):
@@ -76,7 +78,7 @@ def bq_schema_field(name, bq_type, mode):
7678
if bq_type == "record":
7779
try:
7880
field['fields'] = schema_from_record(v, timestamp_parser)
79-
except InvalidTypeException, e:
81+
except InvalidTypeException as e:
8082
# recursively construct the key causing the error
8183
raise InvalidTypeException("%s.%s" % (k, e.key), e.value)
8284

@@ -100,7 +102,7 @@ def bigquery_type(o, timestamp_parser=default_timestamp_parser):
100102
t = type(o)
101103
if t == int:
102104
return "integer"
103-
elif t == str or t == unicode:
105+
elif (t == six.binary_type and six.PY2) or t == six.text_type:
104106
if timestamp_parser and timestamp_parser(o):
105107
return "timestamp"
106108
else:

0 commit comments

Comments
 (0)