diff options
| author | Rafael G. Martins <rafael@rafaelmartins.eng.br> | 2018-12-20 01:08:54 +0100 | 
|---|---|---|
| committer | Rafael G. Martins <rafael@rafaelmartins.eng.br> | 2019-01-15 18:59:50 +0100 | 
| commit | a6edfdee8beb53aef708c7d5c90c9aeada23d6bc (patch) | |
| tree | 78fb7df592072407ce444ffc6516afd83ba9df68 | |
| parent | 982e8caf70821d883cb80d3cc0c5700ed952345b (diff) | |
| download | blogc-a6edfdee8beb53aef708c7d5c90c9aeada23d6bc.tar.gz blogc-a6edfdee8beb53aef708c7d5c90c9aeada23d6bc.tar.bz2 blogc-a6edfdee8beb53aef708c7d5c90c9aeada23d6bc.zip | |
removed blogc-github-lambda
it will be provided in a separate repository soon
| -rw-r--r-- | .gitignore | 7 | ||||
| -rw-r--r-- | Makefile.am | 2 | ||||
| -rwxr-xr-x | build-aux/build-static.sh | 15 | ||||
| -rw-r--r-- | configure.ac | 1 | ||||
| -rw-r--r-- | src/blogc-github-lambda/lambda_function.py.in | 257 | 
5 files changed, 1 insertions, 281 deletions
| @@ -95,13 +95,6 @@ blogc-*.rpm  # scan-build reports  *.plist -# blogc-github-lambda -/src/blogc-github-lambda/blogc -/src/blogc-github-lambda/LICENSE -/src/blogc-github-lambda/*.pyc -/src/blogc-github-lambda/*.pyo -/src/blogc-github-lambda/*.py -  /build/  /root/ diff --git a/Makefile.am b/Makefile.am index f33798d..b383833 100644 --- a/Makefile.am +++ b/Makefile.am @@ -27,8 +27,6 @@ EXTRA_DIST = \  	$(NULL)  CLEANFILES = \ -	src/blogc-github-lambda/LICENSE \ -	src/blogc-github-lambda/blogc \  	$(NULL)  MAINTAINERCLEANFILES = \ diff --git a/build-aux/build-static.sh b/build-aux/build-static.sh index bf9cbf4..cacca1b 100755 --- a/build-aux/build-static.sh +++ b/build-aux/build-static.sh @@ -4,17 +4,4 @@ set -ex  ${MAKE_CMD:-make} LDFLAGS="-all-static" blogc -rm -rf root -mkdir -p root - -install -m 755 blogc root/blogc -install -m 644 src/blogc-github-lambda/lambda_function.py root/lambda_function.py -install -m 644 ../LICENSE root/LICENSE -strip root/blogc - -pushd root > /dev/null -zip "../blogc-github-lambda-${PV}.zip" * -popd > /dev/null - -install -m 755 root/blogc "blogc-static-amd64-${PV}" -xz -z "blogc-static-amd64-${PV}" +xz -zc blogc > "blogc-static-amd64-${PV}.xz" diff --git a/configure.ac b/configure.ac index 6ed9e3d..62244e1 100644 --- a/configure.ac +++ b/configure.ac @@ -227,7 +227,6 @@ LT_LIB_M  AC_CONFIG_FILES([    Makefile    blogc.spec -  src/blogc-github-lambda/lambda_function.py  ])  AC_CONFIG_FILES([tests/blogc/check_blogc.sh],                  [chmod +x tests/blogc/check_blogc.sh]) diff --git a/src/blogc-github-lambda/lambda_function.py.in b/src/blogc-github-lambda/lambda_function.py.in deleted file mode 100644 index 08b2e2b..0000000 --- a/src/blogc-github-lambda/lambda_function.py.in +++ /dev/null @@ -1,257 +0,0 @@ -# coding: utf-8 -# -# blogc: A blog compiler. -# Copyright (C) 2014-2018 Rafael G. Martins <rafael@rafaelmartins.eng.br> -# -# This program can be distributed under the terms of the BSD License. -# See the license for details. -# - -from __future__ import print_function - -from contextlib import closing -from io import BytesIO - -import base64 -import boto3 -import hmac -import hashlib -import json -import mimetypes -import os -import shutil -import subprocess -import tarfile -import traceback -import urllib.request - -BLOGC_VERSION = '@PACKAGE_VERSION@' - -cwd = os.path.dirname(os.path.abspath(__file__)) -os.environ['PATH'] = '%s:%s' % (cwd, os.environ.get('PATH', '')) - -s3 = boto3.resource('s3') - -GITHUB_AUTH = os.environ.get('GITHUB_AUTH') -GITHUB_SECRET = os.environ.get('GITHUB_SECRET') - -if os.environ.get("ENABLE_KMS"): -    kms = boto3.client('kms') - -    if GITHUB_AUTH is not None: -        GITHUB_AUTH = kms.decrypt( -            CiphertextBlob=base64.b64decode(GITHUB_AUTH))['Plaintext'] - -    if GITHUB_SECRET is not None: -        GITHUB_SECRET = kms.decrypt( -            CiphertextBlob=base64.b64decode(GITHUB_SECRET))['Plaintext'] -else: -    GITHUB_AUTH = GITHUB_AUTH.encode('utf-8') -    GITHUB_SECRET = GITHUB_SECRET.encode('utf-8') - - -def get_tarball(repo_name): -    tarball_url = 'https://api.github.com/repos/%s/tarball/master' % repo_name -    request = urllib.request.Request(tarball_url) - -    if GITHUB_AUTH is not None: -        auth = base64.b64encode(GITHUB_AUTH) -        request.add_header("Authorization", "Basic %s" % auth.decode('utf-8')) - -    with closing(urllib.request.urlopen(request)) as fp: -        tarball = fp.read() - -    rootdir = None -    with closing(BytesIO(tarball)) as fp: -        with tarfile.open(fileobj=fp, mode='r:gz') as tar: -            for f in tar.getnames(): -                if '/' not in f: -                    rootdir = f -                    break -            if rootdir is None: -                raise RuntimeError('Failed to find a directory in tarball') -            rootdir = '/tmp/%s' % rootdir - -            if os.path.isdir(rootdir): -                shutil.rmtree(rootdir) - -            tar.extractall('/tmp/') - -    return rootdir - - -def translate_filename(filename): -    f = filename.split('/') -    if len(f) == 0: -        return filename -    basename = f[-1] - -    # replace any index.$EXT file with index.html, because s3 only allows -    # users to declare one directory index file name. -    p = basename.split('.') -    if len(p) == 2 and p[0] == 'index': -        f[-1] = 'index.html' -        f = '/'.join(f) -        if not os.path.exists(f): -            return f - -    return filename - - -def sync_s3(src, dest, settings_file): -    settings = {} -    if os.path.exists(settings_file): -        with open(settings_file, 'r') as fp: -            settings = json.load(fp) - -    content_types = settings.get('content-type', {}) -    dest = settings.get('bucket', dest) - -    bucket = s3.Bucket(dest) - -    remote_files = {} -    for obj in bucket.objects.all(): -        if not obj.key.endswith('/'): -            remote_files[obj.key] = obj - -    local_files = {} -    for root, dirs, files in os.walk(src): -        real_root = root[len(src):].lstrip('/') -        for filename in files: -            real_filename = os.path.join(real_root, filename) -            data = {'Key': real_filename} - -            mime = content_types.get(real_filename, -                                     mimetypes.guess_type(real_filename)[0]) -            if mime is not None: -                data['ContentType'] = mime - -            with open(os.path.join(src, real_filename), 'rb') as fp: -                data['Body'] = fp.read() - -            # always push the original file to its place -            local_files[real_filename] = data - -            # if we need a copy on s3 for index or something, push it too -            translated_filename = translate_filename(real_filename) -            if translated_filename != real_filename: -                translated_data = data.copy() -                translated_data['Key'] = translated_filename -                local_files[translated_filename] = translated_data - -    to_upload = [] -    for filename in local_files: -        if filename not in remote_files: -            to_upload.append(local_files[filename]) - -    to_delete = [] -    for filename in remote_files: -        if filename in local_files: -            l = hashlib.sha1(local_files[filename]['Body']) - -            with closing(remote_files[filename].get()['Body']) as fp: -                r = hashlib.sha1(fp.read()) - -            if l.hexdigest() != r.hexdigest(): -                to_upload.append(local_files[filename]) -        else: -            to_delete.append(filename) - -    for data in to_upload: -        print('Uploading file: %s; content-type: "%s"' % ( -            data['Key'], -            data.get('ContentType'), -        )) -        bucket.put_object(**data) - -    for filename in to_delete: -        print('Deleting file:', filename) -        remote_files[filename].delete() - - -def blogc_handler(message): -    print('blogc-github-lambda %s' % BLOGC_VERSION) -    payload = json.loads(message) - -    if payload['ref'] == 'refs/heads/master': -        print('Building: %s' % payload['repository']['full_name']) -        debug = 'DEBUG' in os.environ - -        env = os.environ.copy() -        env['BLOGC'] = os.path.join(cwd, 'blogc') -        env['OUTPUT_DIR'] = '_build_lambda' - -        rootdir = get_tarball(payload['repository']['full_name']) -        blogcfile = os.path.join(rootdir, 'blogcfile') - -        if os.path.isfile(blogcfile): -            # deploy using blogc-make -            args = [os.path.join(cwd, 'blogc'), '-m', '-f', blogcfile, -                    'all'] -            if debug: -                args.append('-V') -            rv = subprocess.call(args, env=env) -        else: -            # fallback to using make. please note that this will break if -            # amazon removes gnu make from lambda images -            stream = None if debug else subprocess.PIPE -            rv = subprocess.call(['make', '-C', rootdir], env=env, -                                 stdout=stream, stderr=stream) -        if rv != 0: -            raise RuntimeError('Failed to run the build tool.') - -        sync_s3(os.path.join(rootdir, env['OUTPUT_DIR']), -                payload['repository']['name'], -                os.path.join(rootdir, 's3.json')) - -    else: -        print("Commit not for master branch, skipping: %s" % payload['ref']) - - -def api_gateway_response(code, message): -    return { -        'statusCode': code, -        'body': json.dumps({'message': message}), -    } - - -def api_gateway_handler(event): -    headers = event.get('headers') -    if headers is None: -        return api_gateway_response(400, 'NO_HEADERS') - -    if headers.get('X-GitHub-Event') != 'push': -        return api_gateway_response(400, 'UNSUPPORTED_EVENT') - -    body = event.get('body', '') - -    if GITHUB_SECRET is not None: -        sig = headers.get('X-Hub-Signature') -        if sig is None: -            return api_gateway_response(400, 'NO_SIGNATURE') - -        pieces = sig.split('=') -        if len(pieces) != 2 or pieces[0] != 'sha1': -            return api_gateway_response(400, 'INVALID_SIGNATURE') - -        digest = hmac.new(GITHUB_SECRET, body.encode('utf-8'), hashlib.sha1) - -        if not hmac.compare_digest(digest.hexdigest(), pieces[1]): -            return api_gateway_response(400, 'BAD_SIGNATURE') - -    try: -        blogc_handler(body) -    except Exception as err: -        traceback.print_exc() -        return api_gateway_response(500, 'ERROR: %s' % err) - -    return api_gateway_response(202, 'ACCEPTED') - - -def lambda_handler(event, context): -    if 'Records' in event:  # sns -        for record in event['Records']: -            if 'Sns' in record: -                blogc_handler(record['Sns']['Message']) -    elif 'body' in event:  # api-gateway -        return api_gateway_handler(event) | 
