aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorRafael G. Martins <rafael@rafaelmartins.eng.br>2017-01-03 02:20:52 +0100
committerRafael G. Martins <rafael@rafaelmartins.eng.br>2017-01-03 02:20:52 +0100
commitaa8871ffe2e335b6f4a5107c15e978ba99a9fae5 (patch)
treef94e576da8249fe70f200aefef5c362cd64f5a75
parent45eb3261c0096ab5cacccefd2103aa16b887e605 (diff)
downloadblogc-aa8871ffe2e335b6f4a5107c15e978ba99a9fae5.tar.gz
blogc-aa8871ffe2e335b6f4a5107c15e978ba99a9fae5.tar.bz2
blogc-aa8871ffe2e335b6f4a5107c15e978ba99a9fae5.zip
github-lambda: always upload files with real file names
even if we are translating some file names, for example to use a .xml file as directory index, we still need to upload the file to the real filename, to avoid automatically generated atom feeds produced by blogc-make pointing to wrong urls.
-rw-r--r--src/blogc-github-lambda/lambda_function.py41
1 files changed, 27 insertions, 14 deletions
diff --git a/src/blogc-github-lambda/lambda_function.py b/src/blogc-github-lambda/lambda_function.py
index 9798055..8f2d2b3 100644
--- a/src/blogc-github-lambda/lambda_function.py
+++ b/src/blogc-github-lambda/lambda_function.py
@@ -98,8 +98,26 @@ def sync_s3(src, dest, settings_file):
for root, dirs, files in os.walk(src):
real_root = root[len(src):].lstrip('/')
for filename in files:
- f = os.path.join(real_root, filename)
- local_files[translate_filename(f)] = f
+ real_filename = os.path.join(real_root, filename)
+ data = {'Key': real_filename}
+
+ mime = content_types.get(real_filename,
+ mimetypes.guess_type(real_filename)[0])
+ if mime is not None:
+ data['ContentType'] = mime
+
+ with open(os.path.join(src, real_filename), 'rb') as fp:
+ data['Body'] = fp.read()
+
+ # always push the original file to its place
+ local_files[real_filename] = data
+
+ # if we need a copy on s3 for index or something, push it too
+ translated_filename = translate_filename(real_filename)
+ if translated_filename != real_filename:
+ translated_data = data.copy()
+ translated_data['Key'] = translated_filename
+ local_files[translated_filename] = translated_data
to_upload = []
for filename in local_files:
@@ -109,8 +127,7 @@ def sync_s3(src, dest, settings_file):
to_delete = []
for filename in remote_files:
if filename in local_files:
- with open(os.path.join(src, local_files[filename])) as fp:
- l = hashlib.sha1(fp.read())
+ l = hashlib.sha1(local_files[filename]['Body'])
with closing(remote_files[filename].get()['Body']) as fp:
r = hashlib.sha1(fp.read())
@@ -120,16 +137,12 @@ def sync_s3(src, dest, settings_file):
else:
to_delete.append(filename)
- for filename in to_upload:
- with open(os.path.join(src, filename), 'rb') as fp:
- mime = content_types.get(filename,
- mimetypes.guess_type(filename)[0])
- filename = translate_filename(filename)
- print 'Uploading file: %s; content-type: "%s"' % (filename, mime)
- if mime is not None:
- bucket.put_object(Key=filename, Body=fp, ContentType=mime)
- else:
- bucket.put_object(Key=filename, Body=fp)
+ for data in to_upload:
+ print 'Uploading file: %s; content-type: "%s"' % (
+ data['Key'],
+ data.get('ContentType'),
+ )
+ bucket.put_object(**data)
for filename in to_delete:
print 'Deleting file:', filename