1
0
mirror of https://github.com/bigchaindb/site.git synced 2024-11-22 01:36:55 +01:00

call aws cli from within gulp task

This commit is contained in:
Matthias Kretschmann 2018-02-02 16:42:56 +01:00
parent 59c5e7b7be
commit bc851f4a6e
Signed by: m
GPG Key ID: 606EEEF3C479A91F
2 changed files with 17 additions and 111 deletions

View File

@ -9,42 +9,19 @@
set -e;
SOURCE="./_dist"
BUCKET_LIVE="www.bigchaindb.com"
BUCKET_BETA="beta.bigchaindb.com"
OPTIONS_DEFAULT="--delete --acl public-read"
OPTIONS_CACHING="--cache-control max-age=2592000,public"
##
## check for pull request against master
##
if [ "$TRAVIS_PULL_REQUEST" != "false" ] && [ "$TRAVIS_BRANCH" == "master" ]; then
#gulp deploy --beta;
echo "$(tput setaf 64)---------------------------------------------"
echo " Deploying to Beta "
echo "---------------------------------------------$(tput sgr0)"
aws s3 sync $SOURCE s3://$BUCKET_BETA --exclude "assets/*" $OPTIONS_DEFAULT
aws s3 sync $SOURCE s3://$BUCKET_BETA --exclude "*" --include "assets/*" $OPTIONS_DEFAULT $OPTIONS_CACHING
gulp deploy --beta;
##
## check for master push which is no pull request
##
elif [ "$TRAVIS_BRANCH" == "master" ] && [ "$TRAVIS_PULL_REQUEST" == "false" ]; then
#gulp deploy --live;
echo "$(tput setaf 64)---------------------------------------------"
echo " Deploying to Live "
echo "---------------------------------------------$(tput sgr0)"
aws s3 sync $SOURCE s3://$BUCKET_LIVE --exclude "assets/*" $OPTIONS_DEFAULT
aws s3 sync $SOURCE s3://$BUCKET_LIVE --exclude "*" --include "assets/*" $OPTIONS_DEFAULT $OPTIONS_CACHING
gulp seo --live
gulp deploy --live;
else

View File

@ -17,6 +17,7 @@ import request from 'request'
import uglifyjs from 'uglify-es'
import composer from 'gulp-uglify/composer'
const minify = composer(uglifyjs, console)
const cp = require('child_process')
// get all the configs: `pkg` and `site`
import pkg from './package.json'
@ -60,9 +61,11 @@ const SRC = site.source,
DIST = site.destination
// deployment
const S3BUCKET = 'www.bigchaindb.com',
S3BUCKET_BETA = 'beta.bigchaindb.com',
S3BUCKET_GAMMA = 'gamma.bigchaindb.com'
const S3_BUCKET_LIVE = 'www.bigchaindb.com',
S3_BUCKET_BETA = 'beta.bigchaindb.com',
S3_BUCKET_GAMMA = 'gamma.bigchaindb.com',
S3_OPTIONS_DEFAULT = '--delete --acl public-read',
S3_OPTIONS_CACHING = '--cache-control max-age=2592000,public'
// SVG sprite
const SPRITECONFIG = {
@ -120,8 +123,7 @@ export const jekyll = (done) => {
var jekyll_options = 'jekyll build --incremental --drafts --future'
}
let spawn = require('child_process').spawn,
jekyll = spawn('bundle', ['exec', jekyll_options], { stdio: 'inherit' })
const jekyll = cp.execFile('bundle', ['exec', jekyll_options], { stdio: 'inherit' })
jekyll.on('error', (error) => onError() ).on('close', done)
}
@ -362,92 +364,19 @@ export default dev
// gulp deploy --beta
// gulp deploy --gamma
//
export const s3 = () => {
// create publisher, define config
export const s3 = (cb) => {
let S3_BUCKET_TARGET
if ($.util.env.live === true) {
var publisher = $.awspublish.create({
params: { 'Bucket': S3BUCKET },
'accessKeyId': process.env.AWS_ACCESS_KEY_ID,
'secretAccessKey': process.env.AWS_SECRET_ACCESS_KEY,
'region': process.env.AWS_DEFAULT_REGION
})
S3_BUCKET_TARGET = S3_BUCKET_LIVE
} else if ($.util.env.beta === true) {
var publisher = $.awspublish.create({
params: { 'Bucket': S3BUCKET_BETA },
'accessKeyId': process.env.AWS_ACCESS_KEY_ID,
'secretAccessKey': process.env.AWS_SECRET_ACCESS_KEY,
'region': process.env.AWS_DEFAULT_REGION
})
S3_BUCKET_TARGET = S3_BUCKET_BETA
} else if ($.util.env.gamma === true) {
var publisher = $.awspublish.create({
params: { 'Bucket': S3BUCKET_GAMMA },
'accessKeyId': process.env.AWS_ACCESS_KEY_ID,
'secretAccessKey': process.env.AWS_SECRET_ACCESS_KEY,
'region': process.env.AWS_DEFAULT_REGION
})
S3_BUCKET_TARGET = S3_BUCKET_GAMMA
}
return src(DIST + '/**/*')
.pipe($.awspublishRouter({
cache: {
// cache for 5 minutes by default
cacheTime: 300
},
routes: {
// all static assets, cached & gzipped
'^assets/(?:.+)\\.(?:js|css|png|jpg|jpeg|gif|ico|svg|ttf|eot|woff|woff2)$': {
cacheTime: 2592000, // cache for 1 month
gzip: true
},
// every other asset, cached
'^assets/.+$': {
cacheTime: 2592000 // cache for 1 month
},
// all html files, not cached & gzipped
'^.+\\.html': {
cacheTime: 0,
gzip: true
},
// all pdf files, not cached
'^.+\\.pdf': {
cacheTime: 0
},
// all zip files, not cached
'^.+\\.zip': {
cacheTime: 0
},
// font mime types
'.ttf$': {
key: '$&',
headers: { 'Content-Type': 'application/x-font-ttf' }
},
'.woff$': {
key: '$&',
headers: { 'Content-Type': 'application/x-font-woff' }
},
'.woff2$': {
key: '$&',
headers: { 'Content-Type': 'application/x-font-woff2' }
},
// pass-through for anything that wasn't matched by routes above, to be uploaded with default options
'^.+$': '$&'
}
}))
// Make sure everything goes to the root '/'
.pipe($.rename(path => {
path.dirname = '/' + path.dirname
}))
.pipe(parallelize(publisher.publish(), 100))
.pipe(publisher.sync()) // delete files in bucket that are not in local folder
.pipe($.awspublish.reporter({
states: ['create', 'update', 'delete']
}))
cp.exec(`aws s3 sync ${DIST} s3://${S3_BUCKET_TARGET} --exclude "assets/*" ${S3_OPTIONS_DEFAULT}`, (err) => cb(err))
cp.exec(`aws s3 sync ${DIST} s3://${S3_BUCKET_TARGET} --exclude "*" --include "assets/*" ${S3_OPTIONS_DEFAULT} ${S3_OPTIONS_CACHING}`, (err) => cb(err))
}