Skip to content

Commit 58bc8bc

Browse files
committed
Python 2 -> Python 3
1 parent aa53386 commit 58bc8bc

File tree

6 files changed

+447
-18
lines changed

6 files changed

+447
-18
lines changed

Dockerfile

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
FROM python:2
1+
FROM python:3
22

33
WORKDIR /usr/src/app
44

lambda_hook/aws_lambda.py

+7-7
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import stat
44
import logging
55
import hashlib
6-
from StringIO import StringIO
6+
from io import StringIO
77
from zipfile import ZipFile, ZIP_DEFLATED
88
import botocore
99

@@ -44,9 +44,9 @@ def _zip_files(files, root):
4444
for zip_entry in zip_file.filelist:
4545
perms = (zip_entry.external_attr & ZIP_PERMS_MASK) >> 16
4646
if perms & stat.S_IXUSR != 0:
47-
new_perms = 0755
47+
new_perms = 0o755
4848
else:
49-
new_perms = 0644
49+
new_perms = 0o644
5050

5151
if new_perms != perms:
5252
logger.debug("lambda: fixing perms: %s: %o => %o",
@@ -168,10 +168,10 @@ def _ensure_bucket(s3_conn, bucket):
168168
try:
169169
s3_conn.head_bucket(Bucket=bucket)
170170
except botocore.exceptions.ClientError as e:
171-
if e.response['Error']['Code'] == '404':
171+
if e.response['Error']['Code'] == 404:
172172
logger.info('Creating bucket %s.', bucket)
173173
s3_conn.create_bucket(Bucket=bucket)
174-
elif e.response['Error']['Code'] in ('401', '403'):
174+
elif e.response['Error']['Code'] in (401, 403):
175175
logger.exception('Access denied for bucket %s.', bucket)
176176
raise
177177
else:
@@ -249,11 +249,11 @@ def _check_pattern_list(patterns, key, default=None):
249249
if not patterns:
250250
return default
251251

252-
if isinstance(patterns, basestring):
252+
if isinstance(patterns, str):
253253
return [patterns]
254254

255255
if isinstance(patterns, list):
256-
if all(isinstance(p, basestring) for p in patterns):
256+
if all(isinstance(p, str) for p in patterns):
257257
return patterns
258258

259259
raise ValueError("Invalid file patterns in key '{}': must be a string or "

lambda_hook/aws_lambda.py.bak

+321
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,321 @@
1+
import os
2+
import os.path
3+
import stat
4+
import logging
5+
import hashlib
6+
from StringIO import StringIO
7+
from zipfile import ZipFile, ZIP_DEFLATED
8+
import botocore
9+
10+
import formic
11+
12+
13+
"""Mask to retrieve only UNIX file permissions from the external attributes
14+
field of a ZIP entry.
15+
"""
16+
ZIP_PERMS_MASK = (stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) << 16
17+
18+
logger = logging.getLogger(__name__)
19+
20+
21+
def _zip_files(files, root):
22+
"""Generates a ZIP file in-memory from a list of files.
23+
24+
Files will be stored in the archive with relative names, and have their
25+
UNIX permissions forced to 755 or 644 (depending on whether they are
26+
user-executable in the source filesystem).
27+
28+
Args:
29+
files (list[str]): file names to add to the archive, relative to
30+
``root``.
31+
root (str): base directory to retrieve files from.
32+
33+
Returns:
34+
str: content of the ZIP file as a byte string.
35+
36+
"""
37+
zip_data = StringIO()
38+
with ZipFile(zip_data, 'w', ZIP_DEFLATED) as zip_file:
39+
for fname in files:
40+
zip_file.write(os.path.join(root, fname), fname)
41+
42+
# Fix file permissions to avoid any issues - only care whether a file
43+
# is executable or not, choosing between modes 755 and 644 accordingly.
44+
for zip_entry in zip_file.filelist:
45+
perms = (zip_entry.external_attr & ZIP_PERMS_MASK) >> 16
46+
if perms & stat.S_IXUSR != 0:
47+
new_perms = 0755
48+
else:
49+
new_perms = 0644
50+
51+
if new_perms != perms:
52+
logger.debug("lambda: fixing perms: %s: %o => %o",
53+
zip_entry.filename, perms, new_perms)
54+
new_attr = ((zip_entry.external_attr & ~ZIP_PERMS_MASK) |
55+
(new_perms << 16))
56+
zip_entry.external_attr = new_attr
57+
58+
contents = zip_data.getvalue()
59+
zip_data.close()
60+
61+
return contents
62+
63+
64+
def _find_files(root, includes, excludes):
65+
"""List files inside a directory based on include and exclude rules.
66+
67+
This is a more advanced version of `glob.glob`, that accepts multiple
68+
complex patterns.
69+
70+
Args:
71+
root (str): base directory to list files from.
72+
includes (list[str]): inclusion patterns. Only files matching those
73+
patterns will be included in the result.
74+
includes (list[str]): exclusion patterns. Files matching those
75+
patterns will be excluded from the result. Exclusions take
76+
precedence over inclusions.
77+
78+
Yields:
79+
str: a file name relative to the root.
80+
81+
Note:
82+
Documentation for the patterns can be found at
83+
http://www.aviser.asia/formic/doc/index.html
84+
"""
85+
86+
root = os.path.abspath(root)
87+
file_set = formic.FileSet(directory=root, include=includes,
88+
exclude=excludes)
89+
for filename in file_set.qualified_files(absolute=False):
90+
yield filename
91+
92+
93+
def _zip_from_file_patterns(root, includes, excludes):
94+
"""Generates a ZIP file in-memory from file search patterns.
95+
96+
Args:
97+
root (str): base directory to list files from.
98+
includes (list[str]): inclusion patterns. Only files matching those
99+
patterns will be included in the result.
100+
includes (list[str]): exclusion patterns. Files matching those
101+
patterns will be excluded from the result. Exclusions take
102+
precedence over inclusions.
103+
104+
See Also:
105+
:func:`_zip_files`, :func:`_find_files`.
106+
107+
Raises:
108+
RuntimeError: when the generated archive would be empty.
109+
110+
"""
111+
logger.info('lambda: base directory: %s', root)
112+
113+
files = list(_find_files(root, includes, excludes))
114+
if not files:
115+
raise RuntimeError('Empty list of files for Lambda payload. Check '
116+
'your include/exclude options for errors.')
117+
118+
logger.info('lambda: adding %d files:', len(files))
119+
120+
for fname in files:
121+
logger.debug('lambda: + %s', fname)
122+
123+
return _zip_files(files, root)
124+
125+
126+
def _head_object(s3_conn, bucket, key):
127+
"""Retrieve information about an object in S3 if it exists.
128+
129+
Args:
130+
s3_conn (:class:`botocore.client.S3`): S3 connection to use for
131+
operations.
132+
bucket (str): name of the bucket containing the key.
133+
key (str): name of the key to lookup.
134+
135+
Returns:
136+
dict: S3 object information, or None if the object does not exist.
137+
See the AWS documentation for explanation of the contents.
138+
139+
Raises:
140+
botocore.exceptions.ClientError: any error from boto3 other than key
141+
not found is passed through.
142+
"""
143+
try:
144+
return s3_conn.head_object(Bucket=bucket, Key=key)
145+
except botocore.exceptions.ClientError as e:
146+
if e.response['Error']['Code'] == '404':
147+
return None
148+
else:
149+
raise
150+
151+
152+
def _ensure_bucket(s3_conn, bucket):
153+
"""Create an S3 bucket if it does not already exist.
154+
155+
Args:
156+
s3_conn (:class:`botocore.client.S3`): S3 connection to use for
157+
operations.
158+
bucket (str): name of the bucket to create.
159+
160+
Returns:
161+
dict: S3 object information. See the AWS documentation for explanation
162+
of the contents.
163+
164+
Raises:
165+
botocore.exceptions.ClientError: any error from boto3 is passed
166+
through.
167+
"""
168+
try:
169+
s3_conn.head_bucket(Bucket=bucket)
170+
except botocore.exceptions.ClientError as e:
171+
if e.response['Error']['Code'] == '404':
172+
logger.info('Creating bucket %s.', bucket)
173+
s3_conn.create_bucket(Bucket=bucket)
174+
elif e.response['Error']['Code'] in ('401', '403'):
175+
logger.exception('Access denied for bucket %s.', bucket)
176+
raise
177+
else:
178+
logger.exception('Error creating bucket %s. Error %s', bucket,
179+
e.response)
180+
raise
181+
182+
183+
def _upload_code(s3_conn, bucket_name, name, contents):
184+
"""Upload a ZIP file to S3 for use by Lambda.
185+
186+
The key used for the upload will be unique based on the checksum of the
187+
contents. No changes will be made if the contents in S3 already match the
188+
expected contents.
189+
190+
Args:
191+
s3_conn (:class:`botocore.client.S3`): S3 connection to use for
192+
operations.
193+
bucket (str): name of the bucket to create.
194+
prefix (str): S3 prefix to prepend to the constructed key name for
195+
the uploaded file
196+
name (str): desired name of the Lambda function. Will be used to
197+
construct a key name for the uploaded file.
198+
contents (str): byte string with the content of the file upload.
199+
200+
Returns:
201+
troposphere.awslambda.Code: CloudFormation Lambda Code object,
202+
pointing to the uploaded payload in S3.
203+
204+
Raises:
205+
botocore.exceptions.ClientError: any error from boto3 is passed
206+
through.
207+
"""
208+
209+
hsh = hashlib.md5(contents)
210+
logger.debug('lambda: ZIP hash: %s', hsh.hexdigest())
211+
212+
key = 'lambda-{}-{}.zip'.format(name, hsh.hexdigest())
213+
214+
info = _head_object(s3_conn, bucket_name, key)
215+
expected_etag = '"{}"'.format(hsh.hexdigest())
216+
217+
if info and info['ETag'] == expected_etag:
218+
logger.info('lambda: object %s already exists, not uploading', key)
219+
else:
220+
logger.info('lambda: uploading object %s', key)
221+
s3_conn.put_object(Bucket=bucket_name, Key=key, Body=contents,
222+
ContentType='application/zip',
223+
ACL='authenticated-read')
224+
225+
return {"bucket": bucket_name, "key": key}
226+
227+
228+
def _check_pattern_list(patterns, key, default=None):
229+
"""Validates file search patterns from user configuration.
230+
231+
Acceptable input is a string (which will be converted to a singleton list),
232+
a list of strings, or anything falsy (such as None or an empty dictionary).
233+
Empty or unset input will be converted to a default.
234+
235+
Args:
236+
patterns: input from user configuration (YAML).
237+
key (str): name of the configuration key the input came from,
238+
used for error display purposes.
239+
240+
Keyword Args:
241+
default: value to return in case the input is empty or unset.
242+
243+
Returns:
244+
list[str]: validated list of patterns
245+
246+
Raises:
247+
ValueError: if the input is unacceptable.
248+
"""
249+
if not patterns:
250+
return default
251+
252+
if isinstance(patterns, basestring):
253+
return [patterns]
254+
255+
if isinstance(patterns, list):
256+
if all(isinstance(p, basestring) for p in patterns):
257+
return patterns
258+
259+
raise ValueError("Invalid file patterns in key '{}': must be a string or "
260+
'list of strings'.format(key))
261+
262+
263+
def _upload_function(s3_conn, bucket_name, function_name, path,
264+
include=None, exclude=None):
265+
"""Builds a Lambda payload from user configuration and uploads it to S3.
266+
267+
Args:
268+
s3_conn (:class:`botocore.client.S3`): S3 connection to use for
269+
operations.
270+
bucket_name (str): name of the bucket to upload to.
271+
function_name (str): desired name of the Lambda function. Will be used
272+
to construct a key name for the uploaded file.
273+
path (str): base path to retrieve files from (mandatory).
274+
include (list): file patterns to include in the payload (optional).
275+
exclude (list): file patterns to exclude from the payload (optional).
276+
277+
Returns:
278+
dict: A dictionary with the bucket & key where the code is located.
279+
280+
"""
281+
root = os.path.expanduser(path)
282+
283+
includes = _check_pattern_list(include, 'include', default=['**'])
284+
excludes = _check_pattern_list(exclude, 'exclude', default=[])
285+
286+
logger.debug('lambda: processing function %s', function_name)
287+
288+
zip_contents = _zip_from_file_patterns(root, includes, excludes)
289+
290+
return _upload_code(s3_conn, bucket_name, function_name, zip_contents)
291+
292+
293+
def upload_lambda_functions(s3_conn, bucket_name, function_name, path,
294+
include=None, exclude=None):
295+
"""Builds Lambda payloads from user configuration and uploads them to S3.
296+
297+
Constructs ZIP archives containing files matching specified patterns for
298+
each function, uploads the result to Amazon S3, then returns the bucket
299+
and key name where the function is stored.
300+
301+
Args:
302+
s3_conn (:class:`botocore.client.S3`): S3 connection to use for
303+
operations.
304+
bucket_name (str): name of the bucket to upload to.
305+
function_name (str): desired name of the Lambda function. Will be used
306+
to construct a key name for the uploaded file.
307+
path (str): base path to retrieve files from (mandatory).
308+
include (list): file patterns to include in the payload (optional).
309+
exclude (list): file patterns to exclude from the payload (optional).
310+
311+
Returns:
312+
dict: A dictionary with the bucket & key where the code is located.
313+
314+
"""
315+
316+
_ensure_bucket(s3_conn, bucket_name)
317+
318+
results = _upload_function(s3_conn, bucket_name, function_name, path,
319+
include, exclude)
320+
321+
return results

0 commit comments

Comments
 (0)