aboutsummaryrefslogtreecommitdiffstats
path: root/src/blogc-github-lambda
diff options
context:
space:
mode:
Diffstat (limited to 'src/blogc-github-lambda')
-rw-r--r--src/blogc-github-lambda/lambda_function.py43
1 files changed, 34 insertions, 9 deletions
diff --git a/src/blogc-github-lambda/lambda_function.py b/src/blogc-github-lambda/lambda_function.py
index e456397..5a3bf5e 100644
--- a/src/blogc-github-lambda/lambda_function.py
+++ b/src/blogc-github-lambda/lambda_function.py
@@ -60,7 +60,23 @@ def get_tarball(repo_name):
return rootdir
-def sync_s3(src, dest):
+def translate_filename(filename):
+ f = filename.split('/')
+ if len(f) == 0:
+ return filename
+ basename = f[-1]
+
+ # replace any index.$EXT file with index.html, because s3 only allows
+ # users to declare one directory index file name.
+ p = basename.split('.')
+ if len(p) == 2 and p[0] == 'index':
+ f[-1] = 'index.html'
+ return '/'.join(f)
+
+ return filename
+
+
+def sync_s3(src, dest, settings_file):
s3 = boto3.resource('s3')
bucket = s3.Bucket(dest)
@@ -69,35 +85,43 @@ def sync_s3(src, dest):
if not obj.key.endswith('/'):
remote_files[obj.key] = obj
- local_files = []
+ local_files = {}
for root, dirs, files in os.walk(src):
real_root = root[len(src):].lstrip('/')
for file in files:
- local_files.append(os.path.join(real_root, file))
+ f = os.path.join(real_root, file)
+ local_files[translate_filename(f)] = f
to_upload = []
for file in local_files:
if file not in remote_files:
- to_upload.append(file)
+ to_upload.append(local_files[file])
to_delete = []
for file in remote_files:
if file in local_files:
- with open(os.path.join(src, file)) as fp:
+ with open(os.path.join(src, local_files[file])) as fp:
l = hashlib.sha1(fp.read())
with closing(remote_files[file].get()['Body']) as fp:
r = hashlib.sha1(fp.read())
if l.hexdigest() != r.hexdigest():
- to_upload.append(file)
+ to_upload.append(local_files[file])
else:
to_delete.append(file)
+ content_types = {}
+ if os.path.exists(settings_file):
+ with open(settings_file, 'r') as fp:
+ settings = json.load(fp)
+ content_types = settings.get('content-type', {})
+
for file in to_upload:
with open(os.path.join(src, file), 'rb') as fp:
- print 'Uploading file:', file
- mime, _ = mimetypes.guess_type(file)
+ mime = content_types.get(file, mimetypes.guess_type(file)[0])
+ file = translate_filename(file)
+ print 'Uploading file: %s; content-type: "%s"' % (file, mime)
if mime is not None:
bucket.put_object(Key=file, Body=fp, ContentType=mime)
else:
@@ -124,4 +148,5 @@ def lambda_handler(event, context):
stderr=None if debug else subprocess.PIPE)
if rv != 0:
raise RuntimeError('Failed to run make')
- sync_s3(os.path.join(rootdir, '_build'), repo_name)
+ sync_s3(os.path.join(rootdir, '_build'), repo_name,
+ os.path.join(rootdir, 's3.json'))