aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorRafael G. Martins <rafael@rafaelmartins.eng.br>2016-12-09 20:28:46 +0100
committerRafael G. Martins <rafael@rafaelmartins.eng.br>2016-12-09 20:28:46 +0100
commit95b0aa48c4dc9a267634fad5e8b4b10f959ca61c (patch)
tree5a4fef809bb002e2d211d1af5d8485dd3fbe7c28
parent71dce4a18bdfc46104b362c779abc8f6a5777827 (diff)
downloadblogc-95b0aa48c4dc9a267634fad5e8b4b10f959ca61c.tar.gz
blogc-95b0aa48c4dc9a267634fad5e8b4b10f959ca61c.tar.bz2
blogc-95b0aa48c4dc9a267634fad5e8b4b10f959ca61c.zip
github-lambda: fix directory index and allow to override content-type
this patch allows users to use files like `index.xml` as directory index, by renaming it to `index.html`, because Amazon S3 only allows one file name as directory index. it also allows users to override the automatically guessed content-type with something else, e.g. to force a charset.
-rw-r--r--src/blogc-github-lambda/lambda_function.py43
1 files changed, 34 insertions, 9 deletions
diff --git a/src/blogc-github-lambda/lambda_function.py b/src/blogc-github-lambda/lambda_function.py
index e456397..5a3bf5e 100644
--- a/src/blogc-github-lambda/lambda_function.py
+++ b/src/blogc-github-lambda/lambda_function.py
@@ -60,7 +60,23 @@ def get_tarball(repo_name):
return rootdir
-def sync_s3(src, dest):
+def translate_filename(filename):
+ f = filename.split('/')
+ if len(f) == 0:
+ return filename
+ basename = f[-1]
+
+ # replace any index.$EXT file with index.html, because s3 only allows
+ # users to declare one directory index file name.
+ p = basename.split('.')
+ if len(p) == 2 and p[0] == 'index':
+ f[-1] = 'index.html'
+ return '/'.join(f)
+
+ return filename
+
+
+def sync_s3(src, dest, settings_file):
s3 = boto3.resource('s3')
bucket = s3.Bucket(dest)
@@ -69,35 +85,43 @@ def sync_s3(src, dest):
if not obj.key.endswith('/'):
remote_files[obj.key] = obj
- local_files = []
+ local_files = {}
for root, dirs, files in os.walk(src):
real_root = root[len(src):].lstrip('/')
for file in files:
- local_files.append(os.path.join(real_root, file))
+ f = os.path.join(real_root, file)
+ local_files[translate_filename(f)] = f
to_upload = []
for file in local_files:
if file not in remote_files:
- to_upload.append(file)
+ to_upload.append(local_files[file])
to_delete = []
for file in remote_files:
if file in local_files:
- with open(os.path.join(src, file)) as fp:
+ with open(os.path.join(src, local_files[file])) as fp:
l = hashlib.sha1(fp.read())
with closing(remote_files[file].get()['Body']) as fp:
r = hashlib.sha1(fp.read())
if l.hexdigest() != r.hexdigest():
- to_upload.append(file)
+ to_upload.append(local_files[file])
else:
to_delete.append(file)
+ content_types = {}
+ if os.path.exists(settings_file):
+ with open(settings_file, 'r') as fp:
+ settings = json.load(fp)
+ content_types = settings.get('content-type', {})
+
for file in to_upload:
with open(os.path.join(src, file), 'rb') as fp:
- print 'Uploading file:', file
- mime, _ = mimetypes.guess_type(file)
+ mime = content_types.get(file, mimetypes.guess_type(file)[0])
+ file = translate_filename(file)
+ print 'Uploading file: %s; content-type: "%s"' % (file, mime)
if mime is not None:
bucket.put_object(Key=file, Body=fp, ContentType=mime)
else:
@@ -124,4 +148,5 @@ def lambda_handler(event, context):
stderr=None if debug else subprocess.PIPE)
if rv != 0:
raise RuntimeError('Failed to run make')
- sync_s3(os.path.join(rootdir, '_build'), repo_name)
+ sync_s3(os.path.join(rootdir, '_build'), repo_name,
+ os.path.join(rootdir, 's3.json'))