chore(ci): sync paralell demo builds via s3

This commit is contained in:
perry
2017-05-10 14:28:46 -05:00
parent 3139c97571
commit 700e4c2479
4 changed files with 92 additions and 2 deletions

1
.gitignore vendored
View File

@ -22,6 +22,7 @@ $RECYCLE.BIN/
.DS_Store
Thumbs.db
UserInterfaceState.xcuserstate
.env
.package.tmp.json

View File

@ -68,6 +68,7 @@
"del": "2.2.2",
"dgeni": "^0.4.7",
"dgeni-packages": "^0.16.10",
"dotenv": "4.0.0",
"event-stream": "3.3.4",
"file-loader": "0.9.0",
"fs-extra": "^2.0.0",
@ -120,6 +121,7 @@
"rollup-plugin-node-resolve": "3.0.0",
"rollup-plugin-uglify": "1.0.1",
"run-sequence": "1.2.2",
"s3": "4.4.0",
"sassdoc": "2.2.1",
"semver": "5.3.0",
"serve-static": "1.11.1",

View File

@ -19,7 +19,8 @@ function run {
cd ..
VERSION=$(readJsonProp "package.json" "version")
#compile API Demos
# download and copy over API Demos
./node_modules/.bin/gulp demos.download
./node_modules/.bin/gulp docs.demos --production=true
# if release, copy old version to seperate folder and blow out docs root api

View File

@ -4,6 +4,7 @@ import * as glob from 'glob';
import { task } from 'gulp';
import * as del from 'del';
import * as runSequence from 'run-sequence';
import * as s3 from 's3';
import { argv } from 'yargs';
@ -12,6 +13,9 @@ import { createTempTsConfig, getFolderInfo, runAppScriptsBuild, writePolyfills }
import * as pAll from 'p-all';
import * as dotenv from 'dotenv';
dotenv.config();
task('demos.prepare', (done: Function) => {
runSequence('demos.clean', 'demos.polyfill', 'demos.sass', (err: any) => done(err));
});
@ -95,9 +99,19 @@ function buildDemo(filePath: string) {
const appNgModulePath = join(dirname(filePath), 'app.module.ts');
const distDir = join(distTestRoot, 'www');
return runAppScriptsBuild(appEntryPoint, appNgModulePath, ionicAngularDir, distDir, pathToWriteFile, ionicAngularDir, sassConfigPath, copyConfigPath).then(() => {
return runAppScriptsBuild(
appEntryPoint,
appNgModulePath,
ionicAngularDir,
distDir,
pathToWriteFile,
ionicAngularDir,
sassConfigPath,
copyConfigPath
).then(() => {
const end = Date.now();
console.log(`${filePath} took a total of ${(end - start) / 1000} seconds to build`);
uploadToS3(pathToWriteFile);
});
}
@ -112,6 +126,78 @@ function chunkArrayInGroups(arr, size) {
return result;
}
function uploadToS3(path) {
// fail silently if envars not present
if (!process.env.AWS_KEY || !process.env.AWS_SECRET) {
return new Promise((resolve) => {resolve();});
}
let client = s3.createClient({
s3Options: {
accessKeyId: process.env.AWS_KEY,
secretAccessKey: process.env.AWS_SECRET
},
});
// get demo name from path
let demo = path.split('/')[path.split('/').length - 2];
let params = {
localDir: path.replace('tsconfig.json',''),
deleteRemoved: true,
s3Params: {
Bucket: "ionic-demos",
Prefix: demo,
},
};
var uploader = client.uploadDir(params);
return new Promise((resolve, reject) => {
uploader.on('error', function(err) {
console.error("s3 Upload Error:", err.stack);
reject();
});
uploader.on('end', function() {
console.log(demo, " demo uploaded to s3");
resolve();
});
});
}
task('demos.download', (done: Function) => {
if (!process.env.AWS_KEY || !process.env.AWS_SECRET) {
return new Promise((resolve) => {resolve();});
}
let client = s3.createClient({
s3Options: {
accessKeyId: process.env.AWS_KEY,
secretAccessKey: process.env.AWS_SECRET
},
});
let params = {
localDir: join(process.cwd(), 'dist', 'demos', 'src'),
s3Params: {
Bucket: "ionic-demos",
},
};
let uploader = client.downloadDir(params);
return new Promise((resolve, reject) => {
uploader.on('error', function(err) {
console.error("s3 Download Error:", err.stack);
reject();
});
uploader.on('end', function() {
console.log("Demos downloaded from s3");
resolve();
});
});
})
task('demos.clean', (done: Function) => {
// this is a super hack, but it works for now
if (argv.skipClean) {