Merge branch 'master' of github.com:FirebasePrivate/firebase-tools into firestore

This commit is contained in:
Michael Bleigh
2016-12-09 17:19:50 -08:00
26 changed files with 827 additions and 46 deletions

2
.gitignore vendored
View File

@@ -1,5 +1,5 @@
/.vscode
/node_modules
node_modules
/coverage
firebase-debug.log
npm-debug.log

View File

@@ -72,6 +72,81 @@ Command | Description
------- | -----------
**hosting:disable** | Stop serving Firebase Hosting traffic for the active project. A "Site Not Found" message will be displayed at your project's Hosting URL after running this command.
### Auth Commands
Command | Description
------- | -----------
**auth:import** | Batch importing accounts into Firebase from data file.
The supported file format are [csv](#auth_csv_format) and [json](#auth_json_format). Supported hash algorithms are `HMAC_SHA512`, `HMAC_SHA256`, `HMAC_SHA1`, `HMAC_MD5`, `MD5`, `PBKDF_SHA1`, `SCRYPT`, `BCRYPT`. Hash algorithm and related parameters should be specified in command.
#### <a name="auth_csv_format"></a>CSV format
Every line represents an user account. There must be at least 23 columns each line. The definition of each column is as followed. `UID` is required. If there's no value in other field, just leave that position empty. Quotation marks can also be added for all string fields.
Pos. | Name | Type
-------- | ---- | ----
1 | UID | String
2 | Email | String
3 | Email Verified | Boolean
4 | Password Hash | String(Base64 encoded)
5 | Password Salt | String(Base64 encoded)
6 | Display Name | String
7 | Photo URL | String
8 | Google:ID | String
9 | Google:Email | String
10 | Google:Display Name | String
11 | Google:Photo URL | String
12 | Facebook:ID | String
13 | Facebook:Email | String
14 | Facebook:Display Name | String
15 | Facebook:Photo URL | String
16 | Twitter:ID | String
17 | Twitter:Email | String
18 | Twitter:Display Name | String
19 | Twitter:Photo URL | String
20 | Github:ID | String
21 | Github:Email | String
22 | Github:Display Name | String
23 | Github:Photo URL | String
**<a name="example_account"></a>Example:**
```
111, test@test.org, false, Jlf7onfLbzqPNFP/1pqhx6fQF/w=, c2FsdC0x, Test User, http://photo.com/123, , , , , 123, test@test.org, Test FB User, http://photo.com/456, , , , , , , , ,
```
Note: Spaces between commas can be eliminated automatically. `Jlf7onfLbzqPNFP/1pqhx6fQF/w=` is base64 encoded string of HMAC_SHA1 hashed password and salt('salt-1' in this example. 'c2FsdC0x' is base64 encoded of 'salt-1').
#### <a name="auth_json_format"></a>JSON format
The JSON file should looks like this:
```js
{
users: [
{
"localId": "111",
"email": "test@test.org"
"emailVerified": false,
"passwordHash": "Jlf7onfLbzqPNFP/1pqhx6fQF/w=",
"salt": "c2FsdC0x",
"displayName": "Test User",
"photoUrl": "http://photo.com/123",
"providerUserInfo": [ {
"providerId": "facebook.com",
"rawId": "123",
"email": "test@test.org",
"displayName": "Test FB User",
"photoUrl": "http://photo.com/456"
} ]
}, {
...
},
...
]
}
```
The first element of `users` in above JSON object represents the [example user account](#example_account) in previous section. All user accounts should be put in `users`. `localId` is actually `UID` and is required. `providerId` must be one of "google.com", "facebook.com", "github.com", and "twitter.com".
## Using with CI Systems
The Firebase CLI requires a browser to complete authentication, but is fully

100
commands/auth-import.js Normal file
View File

@@ -0,0 +1,100 @@
'use strict';
var RSVP = require('rsvp');
var csv = require('csv-streamify');
var chalk = require('chalk');
var fs = require('fs');
var jsonStream = require('JSONStream');
var _ = require('lodash');
var Command = require('../lib/command');
var accountImporter = require('../lib/accountImporter');
var getProjectId = require('../lib/getProjectId');
var logger = require('../lib/logger');
var requireAuth = require('../lib/requireAuth');
var utils = require('../lib/utils');
var MAX_BATCH_SIZE = 1000;
var validateOptions = accountImporter.validateOptions;
var validateUserJson = accountImporter.validateUserJson;
var transArrayToUser = accountImporter.transArrayToUser;
var serialImportUsers = accountImporter.serialImportUsers;
module.exports = new Command('auth:import [dataFile]')
.description('import users into your Firebase project from a data file(.csv or .json)')
.option('--hash-algo <hashAlgo>', 'specify the hash algorithm used in password for these accounts')
.option('--hash-key <hashKey>', 'specify the key used in hash algorithm')
.option('--rounds <rounds>', 'specify how many rounds for hash calculation.')
.option('--mem-cost <memCost>', 'specify the memory cost for hash calculation.')
.before(requireAuth)
.action(function(dataFile, options) {
var projectId = getProjectId(options);
var checkRes = validateOptions(options);
if (!checkRes.hashAlgo) {
return checkRes;
}
var hashOptions = checkRes;
if (!_.endsWith(dataFile, '.csv') && !_.endsWith(dataFile, '.json')) {
return utils.reject('Data file must end with .csv or .json', {exit: 1});
}
var stats = fs.statSync(dataFile);
var fileSizeInBytes = stats.size;
logger.info('Processing ' + chalk.bold(dataFile) + ' (' + fileSizeInBytes + ' bytes)');
var inStream = fs.createReadStream(dataFile);
var batches = [];
var currentBatch = [];
return new RSVP.Promise(function(resolve, reject) {
var parser;
var counter = 0;
if (dataFile.endsWith('.csv')) {
parser = csv({objectMode: true});
parser.on('data', function(line) {
counter++;
if (line.length < 23) {
return reject('Line ' + counter + ' must have 23 columns.');
}
var user = transArrayToUser(line.map(
function(str) {
return str.trim().replace(/^["|'](.*)["|']$/, '$1');
}));
currentBatch.push(user);
if (currentBatch.length === MAX_BATCH_SIZE) {
batches.push(currentBatch);
currentBatch = [];
}
}).on('end', function() {
if (currentBatch.length) {
batches.push(currentBatch);
}
return resolve(batches);
});
inStream.pipe(parser);
} else {
parser = jsonStream.parse(['users', {emitKey: true}]);
parser.on('data', function(pair) {
counter++;
var res = validateUserJson(pair.value);
if (res.error) {
return reject(res.error);
}
currentBatch.push(pair.value);
if (currentBatch.length === MAX_BATCH_SIZE) {
batches.push(currentBatch);
currentBatch = [];
}
}).on('end', function() {
if (currentBatch.length) {
batches.push(currentBatch);
}
return resolve(batches);
});
inStream.pipe(parser);
}
}).then(function(userListArr) {
if (userListArr.length) {
return serialImportUsers(projectId, hashOptions, userListArr, 0);
}
});
});

View File

@@ -9,6 +9,10 @@ module.exports = function(client) {
return cmd.runner();
};
client.auth = {
upload: loadCommand('auth-import')
};
client.database = {
get: loadCommand('database-get'),
push: loadCommand('database-push'),

View File

@@ -25,6 +25,10 @@ var paths = {
tests: [
'test/**/*.spec.js'
],
scripts: [
'scripts/*.js'
]
};
@@ -34,7 +38,7 @@ var paths = {
/***********/
// Lints the JavaScript files
gulp.task('lint', function() {
var filesToLint = _.union(paths.js, paths.tests);
var filesToLint = _.union(paths.js, paths.tests, paths.scripts);
return gulp.src(filesToLint)
.pipe(eslint())
.pipe(eslint.format())

182
lib/accountImporter.js Normal file
View File

@@ -0,0 +1,182 @@
'use strict';
var chalk = require('chalk');
var _ = require('lodash');
var api = require('../lib/api');
var logger = require('../lib/logger');
var utils = require('../lib/utils');
var ALLOWED_JSON_KEYS = ['localId', 'email', 'emailVerified', 'passwordHash', 'salt', 'displayName', 'photoUrl', 'providerUserInfo'];
var ALLOWED_PROVIDER_USER_INFO_KEYS = ['providerId', 'rawId', 'email', 'displayName', 'photoUrl'];
var ALLOWED_PROVIDER_IDS = ['google.com', 'facebook.com', 'twitter.com', 'github.com'];
var _toWebSafeBase64 = function(data) {
return data.toString('base64').replace(/\//g, '_').replace(/\+/g, '-');
};
var _addProviderUserInfo = function(user, providerId, arr) {
if (!!arr[0]) {
user.providerUserInfo.push({
providerId: providerId,
rawId: arr[0],
email: arr[1],
displayName: arr[2],
photoUrl: arr[3]
});
}
};
var _genUploadAccountPostBody = function(projectId, accounts, hashOptions) {
var postBody = {
users: accounts.map(
function(account) {
if (account.passwordHash) {
account.passwordHash = _toWebSafeBase64(account.passwordHash);
}
if (account.salt) {
account.salt = _toWebSafeBase64(account.salt);
}
return account;
})
};
if (hashOptions.hashAlgo) {
postBody.hashAlgorithm = hashOptions.hashAlgo;
}
if (hashOptions.hashKey) {
postBody.signerKey = _toWebSafeBase64(hashOptions.hashKey);
}
if (hashOptions.rounds) {
postBody.rounds = hashOptions.rounds;
}
if (hashOptions.memCost) {
postBody.memoryCost = hashOptions.memCost;
}
postBody.targetProjectId = projectId;
return postBody;
};
var transArrayToUser = function(arr) {
var user = {
localId: arr[0],
email: arr[1],
emailVerified: arr[2] === 'true',
passwordHash: arr[3],
salt: arr[4],
displayName: arr[5],
photoUrl: arr[6],
providerUserInfo: []
};
_addProviderUserInfo(user, 'google.com', arr.slice(7, 11));
_addProviderUserInfo(user, 'facebook.com', arr.slice(11, 15));
_addProviderUserInfo(user, 'twitter.com', arr.slice(15, 19));
_addProviderUserInfo(user, 'github.com', arr.slice(19, 23));
return user;
};
var validateOptions = function(options) {
if (!options.hashAlgo) {
return utils.reject('Must provide hash algorithm');
}
var hashAlgo = options.hashAlgo.toUpperCase();
switch (hashAlgo) {
case 'HMAC_SHA512':
case 'HMAC_SHA256':
case 'HMAC_SHA1':
case 'HMAC_MD5':
if (!options.hashKey || options.hashKey === '') {
return utils.reject('Must provide hash key(base64 encoded) for hash algorithm ' + options.hashAlgo, {exit: 1});
}
return {hashAlgo: hashAlgo, hashKey: options.hashKey};
case 'MD5':
case 'PBKDF_SHA1':
var roundsNum = parseInt(options.rounds, 10);
if (isNaN(roundsNum) || roundsNum < 0 || roundsNum > 8192) {
return utils.reject('Must provide valid rounds(0..8192) for hash algorithm ' + options.hashAlgo, {exit: 1});
}
return {hashAlgo: hashAlgo, rounds: options.rounds};
case 'SCRYPT':
roundsNum = parseInt(options.rounds, 10);
if (isNaN(roundsNum) || roundsNum <= 0 || roundsNum > 8) {
return utils.reject('Must provide valid rounds(1..8) for hash algorithm ' + options.hashAlgo, {exit: 1});
}
var memCost = parseInt(options.memCost, 10);
if (isNaN(memCost) || memCost <= 0 || memCost > 14) {
return utils.reject('Must provide valid memory cost(1..14) for hash algorithm ' + options.hashAlgo, {exit: 1});
}
return {hashAlgo: hashAlgo, rounds: options.rounds, memCost: options.memCost};
case 'BCRYPT':
return {hashAlgo: hashAlgo};
default:
return utils.reject('Unsupported hash algorithm ' + chalk.bold(options.hashAlgo));
}
};
var _validateProviderUserInfo = function(providerUserInfo) {
if (!_.includes(ALLOWED_PROVIDER_IDS, providerUserInfo.providerId)) {
return {error: JSON.stringify(providerUserInfo, null, 2) + ' has unsupported providerId'};
}
var keydiff = _.difference(_.keys(providerUserInfo), ALLOWED_PROVIDER_USER_INFO_KEYS);
if (keydiff.length) {
return {error: JSON.stringify(providerUserInfo, null, 2) + ' has unsupported keys: ' + keydiff.join(',')};
}
return {};
};
var validateUserJson = function(userJson) {
var keydiff = _.difference(_.keys(userJson), ALLOWED_JSON_KEYS);
if (keydiff.length) {
return {error: JSON.stringify(userJson, null, 2) + ' has unsupported keys: ' + keydiff.join(',')};
}
if (userJson.providerUserInfo) {
for (var i = 0; i < userJson.providerUserInfo.length; i++) {
var res = _validateProviderUserInfo(userJson.providerUserInfo[i]);
if (res.error) {
return res;
}
}
}
return {};
};
var _sendRequest = function(projectId, userList, hashOptions) {
logger.info('Starting importing ' + userList.length + ' account(s).');
return api.request('POST', '/identitytoolkit/v3/relyingparty/uploadAccount', {
auth: true,
json: true,
data: _genUploadAccountPostBody(projectId, userList, hashOptions),
origin: api.googleOrigin
}).then(function(ret) {
if (ret.body.error) {
logger.info('Encountered problems while importing accounts. Details:');
logger.info(ret.body.error.map(
function(rawInfo) {
return {
account: JSON.stringify(userList[parseInt(rawInfo.index, 10)], null, 2),
reason: rawInfo.message
};
}));
} else {
utils.logSuccess('Imported successfully.');
}
logger.info();
});
};
var serialImportUsers = function(projectId, hashOptions, userListArr, index) {
return _sendRequest(projectId, userListArr[index], hashOptions)
.then(function() {
if (index < userListArr.length - 1) {
return serialImportUsers(projectId, hashOptions, userListArr, index + 1);
}
});
};
var accountImporter = {
validateOptions: validateOptions,
validateUserJson: validateUserJson,
transArrayToUser: transArrayToUser,
serialImportUsers: serialImportUsers
};
module.exports = accountImporter;

View File

@@ -100,6 +100,7 @@ Config.prototype._materialize = function(target) {
} else if (_.isObject(val)) {
if (target === 'database.rules') {
this.notes.databaseRules = 'inline';
return JSON.stringify({ 'rules': val }, null, 2);
}
return val;
}

View File

@@ -4,7 +4,7 @@ var api = require('../../api');
var utils = require('../../utils');
module.exports = function(projectId) {
return api.request('GET', '/v1/projects/' + encodeURIComponent(projectId) + '/billingInfo', {
return api.request('GET', '/v1/projects/' + projectId + '/billingInfo', {
auth: true,
origin: api.billingOrigin
}).then(function(response) {

View File

@@ -58,6 +58,8 @@ module.exports = function(context, options, payload) {
return RSVP.all([
ensureApiEnabled(options.project, 'cloudfunctions', 'functions'),
env.ensureSetup(options)
]);
]).then(function(results) {
_.set(context, 'firebaseConfig', results[1].reserved.firebase);
});
});
};

View File

@@ -8,7 +8,6 @@ var logger = require('../../logger');
var FirebaseError = require('../../error');
var chalk = require('chalk');
var api = require('../../api');
var Spinner = require('cli-spinner').Spinner;
module.exports = function(context, options, payload) {
if (!options.config.has('functions')) {
@@ -19,7 +18,8 @@ module.exports = function(context, options, payload) {
var PROVIDER_SERVICE_ACCOUNT = utils.envOverride('FIREBASE_PROVIDER_SERVICE_ACCOUNT', '176829341474-s3rdr7brhks3ihq8735pcih7sbpdkdvv@developer.gserviceaccount.com');
var GCP_REGION = 'us-central1';
var projectId = context.projectId;
var sourceUrl = 'gs://' + projectId + '-gcf/' + projectId;
var bucketName = gcp.storage.buckets.name(projectId);
var sourceUrl = 'gs://' + bucketName + '/' + projectId;
var functionsInfo = payload.functions.triggers;
@@ -175,6 +175,9 @@ module.exports = function(context, options, payload) {
});
case 'cloud.storage':
var bucket = functionInfo.bucket;
if (!bucket) {
bucket = _.get(context, 'firebaseConfig.storageBucket');
}
functionTrigger = {gcsTrigger: 'gs://' + bucket + '/'};
return RSVP.resolve(functionTrigger);
case 'cloud.http':
@@ -224,17 +227,12 @@ module.exports = function(context, options, payload) {
return RSVP.allSettled([].concat(addOps, updateOps, deleteOps));
}).then(function(allOps) {
var spinner = new Spinner(chalk.bold.cyan('%s functions: ') + 'deploy in progress...');
spinner.setSpinnerString(19);
spinner.setSpinnerDelay(100);
spinner.start();
var failedCalls = _.chain(allOps).filter({'state': 'rejected'}).map('reason').value();
var successfulCalls = _.chain(allOps).filter({'state': 'fulfilled'}).map('value').value();
var fetch = _fetchTriggerUrls(successfulCalls);
var poll = _pollOperations(successfulCalls);
return RSVP.allSettled([fetch, poll]).then(function() {
spinner.stop(true);
return _reportResults(successfulCalls, failedCalls);
});
}).catch(function() {

View File

@@ -122,22 +122,25 @@ function ensureMeta(projectId, instance) {
var fetchKeyPromise = gcp.apikeys.getServerKey(projectId);
return RSVP.all([fetchVariablePromise, fetchBucketPromise, fetchKeyPromise]).then(function(results) {
var meta = results[0] || {};
var firebaseConfig = {
databaseURL: utils.addSubdomain(api.realtimeOrigin, instance),
storageBucket: results[1],
apiKey: results[2],
authDomain: instance + '.firebaseapp.com'
};
var value = _.assign({}, meta, {
reserved: {
firebase: {
databaseURL: utils.addSubdomain(api.realtimeOrigin, instance),
storageBucket: results[1],
apiKey: results[2],
authDomain: instance + '.firebaseapp.com'
}
firebase: firebaseConfig
}
});
if (_.isEqual(meta, value)) {
return RSVP.resolve();
return RSVP.resolve(value);
}
return setVariable(projectId, 'meta', value);
return setVariable(projectId, 'meta', value).then(function() {
return RSVP.resolve(value);
});
});
}

View File

@@ -7,7 +7,11 @@ var logger = require('../logger');
var version = 'v1';
function _bucketName(projectId) {
return projectId + '-gcf';
// Follow naming rules in
// https://cloud.google.com/storage/docs/naming#requirements
// (Replace colon and dot in G Suite accounts with dashes. Replace
// disallowed "google" substring.)
return projectId.replace(/[\.:]/g, '-').replace(/google/g, 'g00g1e') + '-gcf';
}
function _getBucket(projectId) {

View File

@@ -20,7 +20,7 @@ module.exports = function(setup, config) {
logger.info();
logger.info('Your ' + chalk.bold('public') + ' directory is the folder (relative to your project directory) that');
logger.info('will contain Hosting assets to uploaded with ' + chalk.bold('firebase deploy') + '. If you');
logger.info('will contain Hosting assets to be uploaded with ' + chalk.bold('firebase deploy') + '. If you');
logger.info('have a build process for your assets, use your build\'s output directory.');
logger.info();

View File

@@ -6,7 +6,7 @@ var RSVP = require('rsvp');
var _ = require('lodash');
var configstore = require('./configstore');
var pkg = require('../package.json');
var uuid = require('node-uuid');
var uuid = require('uuid');
var logger = require('./logger');
var anonId = configstore.get('analytics-uuid');

View File

@@ -43,42 +43,38 @@
],
"homepage": "https://github.com/firebase/firebase-tools",
"dependencies": {
"JSONStream": "^1.2.1",
"archiver": "^0.16.0",
"chalk": "^1.1.0",
"cjson": "^0.3.1",
"cli-spinner": "^0.2.5",
"cli-table": "^0.3.1",
"commander": "^2.8.1",
"concat-stream": "^1.5.0",
"configstore": "^1.2.0",
"cross-spawn": "^4.0.0",
"csv-streamify": "^3.0.4",
"didyoumean": "^1.2.1",
"exit-code": "^1.0.2",
"filesize": "^3.1.3",
"firebase": "2.x.x",
"fs-extra": "^0.23.1",
"fstream-ignore": "^1.0.2",
"googleapis": "^2.1.7",
"inquirer": "^0.12.0",
"jsonschema": "^1.0.2",
"jsonwebtoken": "^5.4.0",
"lodash": "^4.6.1",
"node-uuid": "^1.4.3",
"open": "^0.0.5",
"portfinder": "^0.4.0",
"progress": "^1.1.8",
"request": "^2.58.0",
"rsvp": "^3.0.18",
"semver": "^5.0.3",
"shelljs": "^0.5.3",
"superstatic": "^4.0",
"tar": "^2.2.0",
"through": "^2.3.8",
"tmp": "0.0.27",
"universal-analytics": "^0.3.9",
"update-notifier": "^0.5.0",
"user-home": "^2.0.0",
"uuid": "^2.0.1",
"uuid": "^3.0.0",
"winston": "^1.0.1"
},
"devDependencies": {

View File

@@ -0,0 +1,28 @@
var functions = require('firebase-functions');
exports.dbAction = functions.database().path('/input/{uuid}').onWrite(function(event) {
return event.data.ref.root.child('output/' + event.params.uuid).set(event.data.val());
});
exports.nested = {
dbAction: functions.database().path('/inputNested/{uuid}').onWrite(function(event) {
return event.data.ref.root.child('output/' + event.params.uuid).set(event.data.val());
})
};
exports.httpsAction = functions.cloud.https().onRequest(function(req, res) {
res.send(req.body);
});
exports.pubsubAction = functions.cloud.pubsub('topic1').onPublish(function(event) {
var uuid = event.data.json;
var app = functions.app;
return app.database().ref('output/' + uuid).set(uuid);
});
exports.gcsAction = functions.cloud.storage('functions-integration-test.appspot.com')
.onChange(function(event) {
var uuid = event.data.data.name;
var app = functions.app;
return app.database().ref('output/' + uuid).set(uuid);
});

14
scripts/package.json Normal file
View File

@@ -0,0 +1,14 @@
{
"name": "firebase-functions-integration-test",
"version": "1.0.0",
"description": "Integration test for deploying Firebase functions",
"main": "test-functions-deploy.js",
"scripts": {
"test": "node test-functions-deploy.js"
},
"author": "Firebase",
"license": "MIT",
"dependencies": {
"firebase": "^3.5.0"
}
}

View File

@@ -0,0 +1,203 @@
#!/usr/bin/env node
'use strict';
var expect = require('chai').expect;
var execSync = require('child_process').execSync;
var exec = require('child_process').exec;
var tmp = require('tmp');
var _ = require('lodash');
var fs = require('fs-extra');
var cloudfunctions = require('../lib/gcp/cloudfunctions');
var api = require('../lib/api');
var scopes = require('../lib/scopes');
var configstore = require('../lib/configstore');
var extractTriggers = require('../lib/extractTriggers');
var RSVP = require('rsvp');
var chalk = require('chalk');
var firebase = require('firebase');
var functionsSource = __dirname + '/assets/functions_to_test.js';
var projectDir = __dirname + '/test-project';
var projectId = 'functions-integration-test';
var httpsTrigger = 'https://us-central1-functions-integration-test.cloudfunctions.net/httpsAction';
var region = 'us-central1';
var localFirebase = __dirname + '/../bin/firebase';
var TIMEOUT = 40000;
var tmpDir;
var app;
var parseFunctionsList = function() {
var triggers = [];
extractTriggers(require(tmpDir + '/functions'), triggers);
return _.map(triggers, 'name');
};
var getUuid = function() {
return Math.floor(Math.random() * 100000000000).toString();
};
var preTest = function() {
var dir = tmp.dirSync({prefix: 'fntest_'});
tmpDir = dir.name;
fs.copySync(projectDir, tmpDir);
execSync('npm install', {'cwd': tmpDir + '/functions'});
api.setToken(configstore.get('tokens').refresh_token);
api.setScopes(scopes.CLOUD_PLATFORM);
var config = {
apiKey: 'AIzaSyCLgng7Qgzf-2UKRPLz--LtLLxUsMK8oco',
authDomain: 'functions-integration-test.firebaseapp.com',
databaseURL: 'https://functions-integration-test.firebaseio.com',
storageBucket: 'functions-integration-test.appspot.com'
};
app = firebase.initializeApp(config);
console.log('Done pretest prep.');
};
var postTest = function() {
fs.remove(tmpDir);
execSync(localFirebase + ' database:remove / -y', {'cwd': tmpDir});
console.log('Done post-test cleanup.');
process.exit();
};
var checkFunctionsListMatch = function(expectedFunctions) {
return cloudfunctions.list(projectId, region).then(function(result) {
var deployedFunctions = _.map(result, 'functionName');
expect(_.isEmpty(_.xor(expectedFunctions, deployedFunctions))).to.be.true;
return true;
}).catch(function(err) {
expect(err).to.be.null;
});
};
var testCreateUpdate = function() {
fs.copySync(functionsSource, tmpDir + '/functions/index.js');
return new RSVP.Promise(function(resolve) {
exec(localFirebase + ' deploy', {'cwd': tmpDir}, function(err, stdout) {
console.log(stdout);
expect(err).to.be.null;
resolve(checkFunctionsListMatch(parseFunctionsList()));
});
});
};
var testDelete = function() {
return new RSVP.Promise(function(resolve) {
exec('> functions/index.js &&' + localFirebase + ' deploy', {'cwd': tmpDir}, function(err, stdout) {
console.log(stdout);
expect(err).to.be.null;
resolve(checkFunctionsListMatch([]));
});
});
};
var waitForAck = function(uuid, testDescription) {
return Promise.race([
new Promise(function(resolve) {
var ref = firebase.database().ref('output').child(uuid);
var listener = ref.on('value', function(snap) {
if (snap.exists()) {
ref.off('value', listener);
resolve();
}
});
}),
new Promise(function(resolve, reject) {
setTimeout(function() {
reject('Timed out while waiting for output from ' + testDescription);
}, TIMEOUT);
})
]);
};
var writeToDB = function(path) {
var uuid = getUuid();
return app.database().ref(path).child(uuid).set({'foo': 'bar'}).then(function() {
return RSVP.resolve(uuid);
});
};
var sendHttpRequest = function(message) {
return api.request('POST', httpsTrigger, {
data: message,
origin: ''
}).then(function(resp) {
expect(resp.status).to.equal(200);
expect(resp.body).to.deep.equal(message);
});
};
var publishPubsub = function(topic) {
var uuid = getUuid();
var message = new Buffer(uuid).toString('base64');
return api.request('POST', '/v1/projects/functions-integration-test/topics/' + topic + ':publish', {
auth: true,
data: {'messages': [
{'data': message}
]},
origin: 'https://pubsub.googleapis.com'
}).then(function(resp) {
expect(resp.status).to.equal(200);
return RSVP.resolve(uuid);
});
};
var saveToStorage = function() {
var uuid = getUuid();
var contentLength = Buffer.byteLength(uuid, 'utf8');
var resource = ['b', projectId + '.appspot.com', 'o'].join('/');
var endpoint = '/upload/storage/v1/' + resource + '?uploadType=media&name=' + uuid;
return api.request('POST', endpoint, {
auth: true,
headers: {
'Content-Type': 'text/plain',
'Content-Length': contentLength
},
data: uuid,
json: false,
origin: api.googleOrigin
}).then(function(resp) {
expect(resp.status).to.equal(200);
return RSVP.resolve(uuid);
});
};
var testFunctionsTrigger = function() {
var checkDbAction = writeToDB('input').then(function(uuid) {
return waitForAck(uuid, 'database triggered function');
});
var checkNestedDbAction = writeToDB('inputNested').then(function(uuid) {
return waitForAck(uuid, 'nested database triggered function');
});
var checkHttpsAction = sendHttpRequest({'message': 'hello'});
var checkPubsubAction = publishPubsub('topic1').then(function(uuid) {
return waitForAck(uuid, 'pubsub triggered function');
});
var checkGcsAction = saveToStorage().then(function(uuid) {
return waitForAck(uuid, 'storage triggered function');
});
return RSVP.all([checkDbAction, checkNestedDbAction, checkHttpsAction, checkPubsubAction, checkGcsAction]);
};
var main = function() {
preTest();
testCreateUpdate().then(function() {
console.log(chalk.green('\u2713 Test passed: creating functions'));
return testCreateUpdate();
}).then(function() {
console.log(chalk.green('\u2713 Test passed: updating functions'));
return testFunctionsTrigger();
}).then(function() {
console.log(chalk.green('\u2713 Test passed: triggering functions'));
return testDelete();
}).then(function() {
console.log(chalk.green('\u2713 Test passed: deleting functions'));
}).catch(function(err) {
console.log(chalk.red('Error while running tests: '), err);
return RSVP.resolve();
}).then(postTest);
};
main();

View File

@@ -0,0 +1,5 @@
{
"projects": {
"default": "functions-integration-test"
}
}

View File

@@ -0,0 +1,6 @@
{
"rules": {
".read": true,
".write": true
}
}

View File

@@ -0,0 +1,8 @@
{
"database": {
"rules": "database.rules.json"
},
"hosting": {
"public": "public"
}
}

View File

@@ -0,0 +1,8 @@
{
"name": "functions",
"description": "Firebase Functions",
"dependencies": {
"firebase": "^3.1",
"firebase-functions": "https://storage.googleapis.com/firebase-preview-drop/node/firebase-functions/firebase-functions-preview.latest.tar.gz"
}
}

View File

@@ -0,0 +1,4 @@
<!doctype html>
<html>
</html>

View File

@@ -0,0 +1,7 @@
<!DOCTYPE html>
<html>
<head>
</head>
<body>
</body>
</html>

View File

@@ -1,19 +1,8 @@
var functions = require('firebase-functions');
// // Uppercases the value of the data when a write event occurs for
// // child nodes of '/uppercase' in the Firebase Realtime Database.
// //
// // Documentation: https://firebase.google.com/preview/functions
//
// exports.makeUpperCase = functions.database().path('/uppercase/{childId}')
// .onWrite(event => {
// // For an explanation of this code, see "Handle Database Events"
// var written = event.data.val();
// console.log("Uppercasing", event.params.childId, written);
// var uppercase = written.toUpperCase()
// // Don't do anything if val() was already upper cased.
// if (written == uppercase) {
// return null;
// }
// return event.data.ref.set(uppercase);
// });
// // Start writing Firebase Functions
// // https://firebase.google.com/preview/functions/write-firebase-functions
//
// exports.helloWorld = functions.https().onRequest((request, response) => {
// response.send("Hello from Firebase!");
// })

View File

@@ -0,0 +1,140 @@
'use strict';
var chai = require('chai');
var sinon = require('sinon');
var api = require('../../lib/api');
var accountImporter = require('../../lib/accountImporter');
var helpers = require('../helpers');
var expect = chai.expect;
describe('accountImporter', function() {
var validateOptions = accountImporter.validateOptions;
var validateUserJson = accountImporter.validateUserJson;
var serialImportUsers = accountImporter.serialImportUsers;
describe('validateOptions', function() {
it('should reject when no hash algorithm provided', function() {
return expect(validateOptions({})).to.be.rejected;
});
it('should reject when unsupported hash algorithm provided', function() {
return expect(validateOptions({hashAlgo: 'MD2'})).to.be.rejected;
});
it('should reject when missing parameters', function() {
return expect(validateOptions({hashAlgo: 'HMAC_SHA1'})).to.be.rejected;
});
});
describe('validateUserJson', function() {
it('should reject when unknown fields in user json', function() {
return expect(validateUserJson({
uid: '123',
email: 'test@test.org'
})).to.have.property('error');
});
it('should reject when unknown fields in providerUserInfo of user json', function() {
return expect(validateUserJson({
localId: '123',
email: 'test@test.org',
providerUserInfo: [{
providerId: 'google.com',
googleId: 'abc',
email: 'test@test.org'
}]
})).to.have.property('error');
});
it('should reject when unknown providerUserInfo of user json', function() {
return expect(validateUserJson({
localId: '123',
email: 'test@test.org',
providerUserInfo: [{
providerId: 'otheridp.com',
rawId: 'abc',
email: 'test@test.org'
}]
})).to.have.property('error');
});
});
describe('serialImportUsers', function() {
var sandbox;
var mockApi;
var batches = [];
var hashOptions = {
hashAlgo: 'HMAC_SHA1',
hashKey: 'a2V5MTIz'
};
var expectedResponse = [];
beforeEach(function() {
sandbox = sinon.sandbox.create();
helpers.mockAuth(sandbox);
mockApi = sandbox.mock(api);
for (var i = 0; i < 10; i++) {
batches.push([{
localId: i.toString(),
email: 'test' + i + '@test.org'
}]);
expectedResponse.push({
status: 200,
response: '',
body: ''
});
}
});
afterEach(function() {
mockApi.verify();
sandbox.restore();
batches = [];
expectedResponse = [];
});
it('should call api.request multiple times', function(done) {
for (var i = 0; i < batches.length; i++) {
mockApi.expects('request').withArgs('POST', '/identitytoolkit/v3/relyingparty/uploadAccount', {
auth: true,
data: {
hashAlgorithm: 'HMAC_SHA1',
signerKey: 'a2V5MTIz',
targetProjectId: 'test-project-id',
users: [{ email: 'test' + i + '@test.org', localId: i.toString() }]
},
json: true,
origin: 'https://www.googleapis.com'
}).once().resolves(expectedResponse[i]);
}
return expect(serialImportUsers('test-project-id', hashOptions, batches, 0)).to.eventually.notify(done);
});
it('should continue when some request\'s response is 200 but has `error` in response', function(done) {
expectedResponse[5] = {
status: 200,
response: '',
body: {
error: [{
index: 0,
message: 'some error message'
}]
}
};
for (var i = 0; i < batches.length; i++) {
mockApi.expects('request').withArgs('POST', '/identitytoolkit/v3/relyingparty/uploadAccount', {
auth: true,
data: {
hashAlgorithm: 'HMAC_SHA1',
signerKey: 'a2V5MTIz',
targetProjectId: 'test-project-id',
users: [{ email: 'test' + i + '@test.org', localId: i.toString() }]
},
json: true,
origin: 'https://www.googleapis.com'
}).once().resolves(expectedResponse[i]);
}
return expect(serialImportUsers('test-project-id', hashOptions, batches, 0)).to.eventually.notify(done);
});
});
});