1
0
mirror of https://github.com/Unleash/unleash.git synced 2025-01-25 00:07:47 +01:00

feat: move secrets to settings (#577)

* feat: move secrets to settings

* feat: Add better support for detailed db options.

Added db field in options to allow better control of
db-options. Especially important to allow special chars
in database password which might lead to an invaid url
when defined as a database-url.

* fix: integrate logger with knex logger

* fix: remove secret option from all examples

* fix: more options.js unit tests

* fix: added settings-store e2e tests
This commit is contained in:
Ivar Conradi Østhus 2020-04-13 22:38:46 +02:00 committed by GitHub
parent 0f73f09675
commit b912768923
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
22 changed files with 295 additions and 60 deletions

View File

@ -52,7 +52,6 @@ Available unleash options include:
- **serverMetrics** (boolean) - use this option to turn on/off prometheus metrics.
- **preHook** (function) - this is a hook if you need to provide any middlewares to express before `unleash` adds any. Express app instance is injected as first argument.
- **preRouterHook** (function) - use this to register custom express middlewares before the `unleash` specific routers are added. This is typically how you would register custom middlewares to handle authentication.
- **secret** (string) - set this when you want to secure unleash. Used to encrypt the user session.
- **adminAuthentication** (string) - use this when implementing custom admin authentication [securing-unleash](./securing-unleash.md). Possible values are:
- `none` - will disable authentication altogether
- `unsecure` - (default) will use simple cookie based authentication. UI will require the user to specify an email in order to use unleash.

View File

@ -7,11 +7,11 @@ The Unleash API is split into two different paths: `/api/client` and `/api/admin
## General settings
Unleash uses an encrypted cookie to maintain a user session. This allows users to be logged in across multiple instances of Unleash. To protect this cookie, you should specify the `secret` option when starting Unleash.
Unleash uses an encrypted cookie to maintain a user session. This allows users to be logged in across multiple instances of Unleash. To protect this cookie, Unleash will automatically generate a secure token the first time you start Unleash.
## Securing the Admin API
To secure the Admin API, you have to tell Unleash that you are using a custom admin authentication and implement your authentication logic as a preHook. You should also set the secret option to a protected secret in your system.
To secure the Admin API, you have to tell Unleash that you are using a custom admin authentication and implement your authentication logic as a preHook.
```javascript
const unleash = require('unleash-server');
@ -20,7 +20,6 @@ const myCustomAdminAuth = require('./auth-hook');
unleash
.start({
databaseUrl: 'postgres://unleash_user:passord@localhost:5432/unleash',
secret: 'super-duper-secret',
adminAuthentication: 'custom',
preRouterHook: myCustomAdminAuth,
})

View File

@ -8,7 +8,6 @@ const basicAuth = require('./basic-auth-hook');
unleash
.start({
databaseUrl: 'postgres://unleash_user:passord@localhost:5432/unleash',
secret: 'super-duper-secret',
adminAuthentication: 'custom',
preRouterHook: basicAuth,
})

View File

@ -8,7 +8,6 @@ const enableGoogleOauth = require('./google-auth-hook');
unleash
.start({
databaseUrl: 'postgres://unleash_user:passord@localhost:5432/unleash',
secret: 'super-duper-secret',
adminAuthentication: 'custom',
preRouterHook: enableGoogleOauth,
})

View File

@ -8,7 +8,6 @@ const enableKeycloak = require('./keycloak-auth-hook');
unleash
.start({
databaseUrl: 'postgres://unleash_user:passord@localhost:5432/unleash',
secret: 'super-duper-secret',
adminAuthentication: 'custom',
preRouterHook: enableKeycloak,
})

View File

@ -3,17 +3,23 @@
const knex = require('knex');
module.exports.createDb = function({
databaseUrl,
db,
poolMin = 2,
poolMax = 20,
databaseSchema = 'public',
databaseSchema,
getLogger,
}) {
const db = knex({
const logger = getLogger('db-pool.js');
return knex({
client: 'pg',
connection: databaseUrl,
connection: db,
pool: { min: poolMin, max: poolMax },
searchPath: databaseSchema,
log: {
debug: msg => logger.debug(msg),
info: msg => logger.info(msg),
warn: msg => logger.warn(msg),
error: msg => logger.error(msg),
},
});
return db;
};

View File

@ -9,6 +9,7 @@ const ClientMetricsDb = require('./client-metrics-db');
const ClientMetricsStore = require('./client-metrics-store');
const ClientApplicationsStore = require('./client-applications-store');
const ContextFieldStore = require('./context-field-store');
const SettingStore = require('./setting-store');
module.exports.createStores = (config, eventBus) => {
const getLogger = config.getLogger;
@ -38,5 +39,6 @@ module.exports.createStores = (config, eventBus) => {
eventStore,
getLogger
),
settingStore: new SettingStore(db, getLogger),
};
};

52
lib/db/setting-store.js Normal file
View File

@ -0,0 +1,52 @@
/* eslint camelcase: "off" */
'use strict';
const TABLE = 'settings';
class SettingStore {
constructor(db, getLogger) {
this.db = db;
this.logger = getLogger('settings-store.js');
}
updateRow(name, content) {
return this.db(TABLE)
.where('name', name)
.update({
content: JSON.stringify(content),
});
}
insertNewRow(name, content) {
return this.db(TABLE).insert({ name, content });
}
insert(name, content) {
return this.db(TABLE)
.count('*')
.where('name', name)
.map(row => ({ count: row.count }))
.then(rows => {
if (rows[0].count > 0) {
return this.updateRow(name, content);
} else {
return this.insertNewRow(name, content);
}
});
}
async get(name) {
const result = await this.db
.select()
.from(TABLE)
.where('name', name);
if (result.length > 0) {
return result[0].content;
} else {
return undefined;
}
}
}
module.exports = SettingStore;

View File

@ -1,16 +1,26 @@
'use strict';
const { readFileSync } = require('fs');
const parseDbUrl = require('parse-database-url');
const merge = require('deepmerge');
const { publicFolder } = require('unleash-frontend');
const { defaultLogProvider, validateLogProvider } = require('./logger');
const fs = require('fs');
const isDev = () => process.env.NODE_ENV === 'development';
const THIRTY_DAYS = 30 * 24 * 60 * 60 * 1000;
function defaultOptions() {
return {
databaseUrl: defaultDatabaseUrl(),
databaseSchema: 'public',
db: {
user: process.env.DATABASE_USERNAME,
password: process.env.DATABASE_PASSWORD,
host: process.env.DATABASE_HOST,
port: process.env.DATABASE_PORT || 5432,
database: process.env.DATABASE_NAME || 'unleash',
ssl: process.env.DATABASE_SSL,
driver: 'postgres',
},
port: process.env.HTTP_PORT || process.env.PORT || 4242,
host: process.env.HTTP_HOST,
pipe: undefined,
@ -19,8 +29,7 @@ function defaultOptions() {
enableLegacyRoutes: true,
extendedPermissions: false,
publicFolder,
enableRequestLogger: isDev(),
secret: 'UNLEASH-SECRET',
enableRequestLogger: false,
sessionAge: THIRTY_DAYS,
adminAuthentication: 'unsecure',
ui: {},
@ -34,29 +43,26 @@ function defaultOptions() {
function defaultDatabaseUrl() {
if (process.env.DATABASE_URL_FILE) {
return fs.readFileSync(process.env.DATABASE_URL_FILE, 'utf8');
return readFileSync(process.env.DATABASE_URL_FILE, 'utf8');
} else if (process.env.DATABASE_URL) {
return process.env.DATABASE_URL;
} else if (isDev() || process.env.DATABASE_HOST) {
const dbUsername = process.env.DATABASE_USERNAME || 'unleash_user';
const dbPassword = process.env.DATABASE_PASSWORD || 'passord';
const dbHost = process.env.DATABASE_HOST || 'localhost';
const dbPort = process.env.DATABASE_PORT || 5432;
const dbName = process.env.DATABASE_NAME || 'unleash';
const sslSupport = process.env.DATABASE_SSL || 'true';
return `postgres://${dbUsername}:${dbPassword}@${dbHost}:${dbPort}/${dbName}?ssl=${sslSupport}`;
} else {
return undefined;
}
}
module.exports = {
createOptions: opts => {
const options = Object.assign({}, defaultOptions(), opts);
createOptions: (opts = {}) => {
const options = merge(defaultOptions(), opts);
if (!options.databaseUrl) {
// Use DATABASE_URL when 'db' not defined.
if (!opts.db && options.databaseUrl) {
options.db = parseDbUrl(options.databaseUrl);
}
if (!options.db.host) {
throw new Error(
'You must either pass databaseUrl option or set environment variable DATABASE_URL || (DATABASE_HOST, DATABASE_PORT, DATABASE_USERNAME, DATABASE_PASSWORD, DATABASE_NAME)'
`Unleash requires database details to start. See https://unleash.github.io/docs/getting_started`
);
}

View File

@ -13,19 +13,6 @@ test('should require DATABASE_URI', t => {
});
});
test('should set default databaseUrl for development', t => {
delete process.env.NODE_ENV;
process.env.NODE_ENV = 'development';
const { createOptions } = require('./options');
const options = createOptions({});
t.true(
options.databaseUrl ===
'postgres://unleash_user:passord@localhost:5432/unleash?ssl=true'
);
});
test('should use DATABASE_URL from env', t => {
const databaseUrl = 'postgres://u:p@localhost:5432/name';
delete process.env.NODE_ENV;
@ -60,12 +47,115 @@ test('should use databaseUrl from options', t => {
});
test('should not override provided options', t => {
process.env.DATABASE_URL = 'test';
process.env.DATABASE_URL = 'postgres://test:5432/name';
process.env.NODE_ENV = 'production';
const { createOptions } = require('./options');
const options = createOptions({ databaseUrl: 'test', port: 1111 });
const options = createOptions({
databaseUrl: 'postgres://test:5432/name',
port: 1111,
});
t.true(options.databaseUrl === 'test');
t.true(options.databaseUrl === 'postgres://test:5432/name');
t.true(options.port === 1111);
});
test('should add listen options from host and port', t => {
const { createOptions } = require('./options');
const options = createOptions({
databaseUrl: 'postgres://test:5432/name',
port: 1111,
host: 'localhost',
});
t.deepEqual(options.listen, { port: 1111, host: 'localhost' });
});
test('should use pipe to path', t => {
const { createOptions } = require('./options');
const options = createOptions({
databaseUrl: 'postgres://test:5432/name',
port: 1111,
host: 'localhost',
pipe: '\\\\?\\pipe',
});
t.deepEqual(options.listen, { path: options.pipe });
});
test('should prefer databaseUrl from options', t => {
process.env.DATABASE_URL = 'postgres://test:5432/name';
const databaseUrl = 'postgres://u:p@localhost:5432/options';
const { createOptions } = require('./options');
const options = createOptions({ databaseUrl });
t.deepEqual(options.databaseUrl, databaseUrl);
});
test('should expand databaseUrl from options', t => {
process.env.DATABASE_URL = 'postgres://test:5432/name';
const databaseUrl = 'postgres://u:p@localhost:5432/options';
const { createOptions } = require('./options');
const options = createOptions({ databaseUrl });
t.deepEqual(options.db, {
database: 'options',
driver: 'postgres',
host: 'localhost',
password: 'p',
port: '5432',
user: 'u',
});
});
test('should validate getLogger', t => {
const databaseUrl = 'postgres://u:p@localhost:5432/options';
const getLogger = () => {};
const { createOptions } = require('./options');
t.throws(() => {
createOptions({ databaseUrl, getLogger });
});
});
test('should accept custome log-provider', t => {
const databaseUrl = 'postgres://u:p@localhost:5432/options';
const getLogger = () => ({
debug: console.log,
info: console.log,
warn: console.log,
error: console.log,
});
const { createOptions } = require('./options');
const options = createOptions({ databaseUrl, getLogger });
t.deepEqual(options.getLogger, getLogger);
});
test('should prefer custom db connection options', t => {
const databaseUrl = 'postgres://u:p@localhost:5432/options';
const db = {
user: 'db_user',
password: 'db_password',
host: 'db-host',
port: 3232,
database: 'db_database',
ssl: false,
driver: 'postgres',
};
const { createOptions } = require('./options');
const options = createOptions({ databaseUrl, db });
t.deepEqual(options.db, db);
});
test('should baseUriPath', t => {
const databaseUrl = 'postgres://u:p@localhost:5432/options';
const baseUriPath = 'some';
const { createOptions } = require('./options');
const options = createOptions({ databaseUrl, baseUriPath });
t.deepEqual(options.baseUriPath, baseUriPath);
});

View File

@ -18,11 +18,13 @@ async function createApp(options) {
const logger = options.getLogger('server-impl.js');
const eventBus = new EventEmitter();
const stores = createStores(options, eventBus);
const secret = await stores.settingStore.get('unleash.secret');
const config = Object.assign(
{
stores,
eventBus,
secret,
logFactory: options.getLogger, // TODO: remove in v4.x
},
options
@ -59,6 +61,7 @@ async function createApp(options) {
server.on('listening', () =>
resolve({
app,
stores,
server,
eventBus,
stateService,

View File

@ -15,6 +15,11 @@ const getApp = proxyquire('./app', {
});
const eventStore = new EventEmitter();
const settingStore = {
get: () => {
Promise.resolve('secret');
},
};
const serverImpl = proxyquire('./server-impl', {
'./app': getApp,
@ -27,6 +32,7 @@ const serverImpl = proxyquire('./server-impl', {
createStores() {
return {
eventStore,
settingStore,
};
},
},

View File

@ -0,0 +1,21 @@
/* eslint camelcase: "off" */
'use strict';
const crypto = require('crypto');
const settingsName = 'unleash.secret';
exports.up = function(db, cb) {
const secret = crypto.randomBytes(20).toString('hex');
db.runSql(
`
INSERT INTO settings(name, content)
VALUES('${settingsName}', '${JSON.stringify(secret)}')`,
cb
);
};
exports.down = function(db, cb) {
db.runSql(`DELETE FROM settings WHERE name = '${settingsName}'`, cb);
};

View File

@ -3,11 +3,9 @@
require('db-migrate-shared').log.setLogLevel('error');
const { getInstance } = require('db-migrate');
const parseDbUrl = require('parse-database-url');
function migrateDb({ databaseUrl, databaseSchema = 'public' }) {
const custom = parseDbUrl(databaseUrl);
custom.schema = databaseSchema;
function migrateDb({ db, databaseSchema = 'public' }) {
const custom = Object.assign({}, db, { schema: databaseSchema });
const dbmigrate = getInstance(true, {
cwd: __dirname,
config: { custom },

View File

@ -34,7 +34,7 @@
"scripts": {
"start": "node server.js",
"start:google": "node examples/google-auth-unleash.js",
"start:dev": "NODE_ENV=development supervisor --ignore ./node_modules/,website server.js",
"start:dev": "NODE_ENV=development supervisor --ignore ./node_modules/,website server-dev.js",
"start:dev:pg": "pg_virtualenv npm run start:dev:pg-chain",
"start:dev:pg-chain": "export DATABASE_URL=postgres://$PGUSER:$PGPASSWORD@localhost:$PGPORT/postgres ; db-migrate up && npm run start:dev",
"db-migrate": "db-migrate",
@ -70,6 +70,7 @@
"db-migrate": "^0.11.6",
"db-migrate-pg": "^1.0.0",
"deep-diff": "^1.0.2",
"deepmerge": "^4.2.2",
"errorhandler": "^1.5.1",
"express": "^4.17.1",
"gravatar-url": "^3.1.0",

15
server-dev.js Normal file
View File

@ -0,0 +1,15 @@
'use strict';
const unleash = require('./lib/server-impl');
unleash.start({
db: {
user: 'unleash_user',
password: 'passord',
host: 'localhost',
port: 5432,
database: 'unleash',
ssl: true,
},
enableRequestLogger: true,
});

View File

@ -1,13 +1,12 @@
'use strict';
function getDatabaseUrl() {
if (process.env.TEST_DATABASE_URL) {
return process.env.TEST_DATABASE_URL;
} else {
return 'postgres://unleash_user:passord@localhost:5432/unleash_test';
}
}
const parseDbUrl = require('parse-database-url');
module.exports = {
getDatabaseUrl,
getDb: () => {
const url =
process.env.TEST_DATABASE_URL ||
'postgres://unleash_user:passord@localhost:5432/unleash_test';
return parseDbUrl(url);
},
};

View File

@ -58,7 +58,7 @@ function createFeatures(store) {
module.exports = async function init(databaseSchema = 'test', getLogger) {
const options = {
databaseUrl: require('./database-config').getDatabaseUrl(),
db: require('./database-config').getDb(),
databaseSchema,
minPool: 1,
maxPool: 1,

View File

@ -0,0 +1,28 @@
'use strict';
const test = require('ava');
const dbInit = require('../helpers/database-init');
const getLogger = require('../../fixtures/no-logger');
let stores;
test.before(async () => {
const db = await dbInit('setting_store_serial', getLogger);
stores = db.stores;
});
test.after(async () => {
await stores.db.destroy();
});
test.serial('should have api secret stored', async t => {
const secret = await stores.settingStore.get('unleash.secret');
t.assert(secret);
});
test.serial('should insert arbitarty value', async t => {
const value = { b: 'hello' };
await stores.settingStore.insert('unleash.custom', value);
const ret = await stores.settingStore.get('unleash.custom');
t.deepEqual(ret, value);
});

6
test/fixtures/fake-setting-store.js vendored Normal file
View File

@ -0,0 +1,6 @@
'use strict';
module.exports = () => ({
insert: () => Promise.resolve(),
get: () => Promise.resolve(),
});

View File

@ -7,6 +7,7 @@ const featureToggleStore = require('./fake-feature-toggle-store');
const eventStore = require('./fake-event-store');
const strategyStore = require('./fake-strategies-store');
const contextFieldStore = require('./fake-context-store');
const settingStore = require('./fake-setting-store');
module.exports = {
createStores: () => {
@ -25,6 +26,7 @@ module.exports = {
eventStore: eventStore(),
strategyStore: strategyStore(),
contextFieldStore: contextFieldStore(),
settingStore: settingStore(),
};
},
};

View File

@ -1725,6 +1725,11 @@ deep-is@~0.1.3:
version "0.1.3"
resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34"
deepmerge@^4.2.2:
version "4.2.2"
resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955"
integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==
default-require-extensions@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/default-require-extensions/-/default-require-extensions-3.0.0.tgz#e03f93aac9b2b6443fc52e5e4a37b3ad9ad8df96"