mirror of
https://github.com/Unleash/unleash.git
synced 2025-02-04 00:18:01 +01:00
feat: move secrets to settings (#577)
* feat: move secrets to settings * feat: Add better support for detailed db options. Added db field in options to allow better control of db-options. Especially important to allow special chars in database password which might lead to an invaid url when defined as a database-url. * fix: integrate logger with knex logger * fix: remove secret option from all examples * fix: more options.js unit tests * fix: added settings-store e2e tests
This commit is contained in:
parent
0f73f09675
commit
b912768923
@ -52,7 +52,6 @@ Available unleash options include:
|
|||||||
- **serverMetrics** (boolean) - use this option to turn on/off prometheus metrics.
|
- **serverMetrics** (boolean) - use this option to turn on/off prometheus metrics.
|
||||||
- **preHook** (function) - this is a hook if you need to provide any middlewares to express before `unleash` adds any. Express app instance is injected as first argument.
|
- **preHook** (function) - this is a hook if you need to provide any middlewares to express before `unleash` adds any. Express app instance is injected as first argument.
|
||||||
- **preRouterHook** (function) - use this to register custom express middlewares before the `unleash` specific routers are added. This is typically how you would register custom middlewares to handle authentication.
|
- **preRouterHook** (function) - use this to register custom express middlewares before the `unleash` specific routers are added. This is typically how you would register custom middlewares to handle authentication.
|
||||||
- **secret** (string) - set this when you want to secure unleash. Used to encrypt the user session.
|
|
||||||
- **adminAuthentication** (string) - use this when implementing custom admin authentication [securing-unleash](./securing-unleash.md). Possible values are:
|
- **adminAuthentication** (string) - use this when implementing custom admin authentication [securing-unleash](./securing-unleash.md). Possible values are:
|
||||||
- `none` - will disable authentication altogether
|
- `none` - will disable authentication altogether
|
||||||
- `unsecure` - (default) will use simple cookie based authentication. UI will require the user to specify an email in order to use unleash.
|
- `unsecure` - (default) will use simple cookie based authentication. UI will require the user to specify an email in order to use unleash.
|
||||||
|
@ -7,11 +7,11 @@ The Unleash API is split into two different paths: `/api/client` and `/api/admin
|
|||||||
|
|
||||||
## General settings
|
## General settings
|
||||||
|
|
||||||
Unleash uses an encrypted cookie to maintain a user session. This allows users to be logged in across multiple instances of Unleash. To protect this cookie, you should specify the `secret` option when starting Unleash.
|
Unleash uses an encrypted cookie to maintain a user session. This allows users to be logged in across multiple instances of Unleash. To protect this cookie, Unleash will automatically generate a secure token the first time you start Unleash.
|
||||||
|
|
||||||
## Securing the Admin API
|
## Securing the Admin API
|
||||||
|
|
||||||
To secure the Admin API, you have to tell Unleash that you are using a custom admin authentication and implement your authentication logic as a preHook. You should also set the secret option to a protected secret in your system.
|
To secure the Admin API, you have to tell Unleash that you are using a custom admin authentication and implement your authentication logic as a preHook.
|
||||||
|
|
||||||
```javascript
|
```javascript
|
||||||
const unleash = require('unleash-server');
|
const unleash = require('unleash-server');
|
||||||
@ -20,7 +20,6 @@ const myCustomAdminAuth = require('./auth-hook');
|
|||||||
unleash
|
unleash
|
||||||
.start({
|
.start({
|
||||||
databaseUrl: 'postgres://unleash_user:passord@localhost:5432/unleash',
|
databaseUrl: 'postgres://unleash_user:passord@localhost:5432/unleash',
|
||||||
secret: 'super-duper-secret',
|
|
||||||
adminAuthentication: 'custom',
|
adminAuthentication: 'custom',
|
||||||
preRouterHook: myCustomAdminAuth,
|
preRouterHook: myCustomAdminAuth,
|
||||||
})
|
})
|
||||||
|
@ -8,7 +8,6 @@ const basicAuth = require('./basic-auth-hook');
|
|||||||
unleash
|
unleash
|
||||||
.start({
|
.start({
|
||||||
databaseUrl: 'postgres://unleash_user:passord@localhost:5432/unleash',
|
databaseUrl: 'postgres://unleash_user:passord@localhost:5432/unleash',
|
||||||
secret: 'super-duper-secret',
|
|
||||||
adminAuthentication: 'custom',
|
adminAuthentication: 'custom',
|
||||||
preRouterHook: basicAuth,
|
preRouterHook: basicAuth,
|
||||||
})
|
})
|
||||||
|
@ -8,7 +8,6 @@ const enableGoogleOauth = require('./google-auth-hook');
|
|||||||
unleash
|
unleash
|
||||||
.start({
|
.start({
|
||||||
databaseUrl: 'postgres://unleash_user:passord@localhost:5432/unleash',
|
databaseUrl: 'postgres://unleash_user:passord@localhost:5432/unleash',
|
||||||
secret: 'super-duper-secret',
|
|
||||||
adminAuthentication: 'custom',
|
adminAuthentication: 'custom',
|
||||||
preRouterHook: enableGoogleOauth,
|
preRouterHook: enableGoogleOauth,
|
||||||
})
|
})
|
||||||
|
@ -8,7 +8,6 @@ const enableKeycloak = require('./keycloak-auth-hook');
|
|||||||
unleash
|
unleash
|
||||||
.start({
|
.start({
|
||||||
databaseUrl: 'postgres://unleash_user:passord@localhost:5432/unleash',
|
databaseUrl: 'postgres://unleash_user:passord@localhost:5432/unleash',
|
||||||
secret: 'super-duper-secret',
|
|
||||||
adminAuthentication: 'custom',
|
adminAuthentication: 'custom',
|
||||||
preRouterHook: enableKeycloak,
|
preRouterHook: enableKeycloak,
|
||||||
})
|
})
|
||||||
|
@ -3,17 +3,23 @@
|
|||||||
const knex = require('knex');
|
const knex = require('knex');
|
||||||
|
|
||||||
module.exports.createDb = function({
|
module.exports.createDb = function({
|
||||||
databaseUrl,
|
db,
|
||||||
poolMin = 2,
|
poolMin = 2,
|
||||||
poolMax = 20,
|
poolMax = 20,
|
||||||
databaseSchema = 'public',
|
databaseSchema,
|
||||||
|
getLogger,
|
||||||
}) {
|
}) {
|
||||||
const db = knex({
|
const logger = getLogger('db-pool.js');
|
||||||
|
return knex({
|
||||||
client: 'pg',
|
client: 'pg',
|
||||||
connection: databaseUrl,
|
connection: db,
|
||||||
pool: { min: poolMin, max: poolMax },
|
pool: { min: poolMin, max: poolMax },
|
||||||
searchPath: databaseSchema,
|
searchPath: databaseSchema,
|
||||||
|
log: {
|
||||||
|
debug: msg => logger.debug(msg),
|
||||||
|
info: msg => logger.info(msg),
|
||||||
|
warn: msg => logger.warn(msg),
|
||||||
|
error: msg => logger.error(msg),
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
return db;
|
|
||||||
};
|
};
|
||||||
|
@ -9,6 +9,7 @@ const ClientMetricsDb = require('./client-metrics-db');
|
|||||||
const ClientMetricsStore = require('./client-metrics-store');
|
const ClientMetricsStore = require('./client-metrics-store');
|
||||||
const ClientApplicationsStore = require('./client-applications-store');
|
const ClientApplicationsStore = require('./client-applications-store');
|
||||||
const ContextFieldStore = require('./context-field-store');
|
const ContextFieldStore = require('./context-field-store');
|
||||||
|
const SettingStore = require('./setting-store');
|
||||||
|
|
||||||
module.exports.createStores = (config, eventBus) => {
|
module.exports.createStores = (config, eventBus) => {
|
||||||
const getLogger = config.getLogger;
|
const getLogger = config.getLogger;
|
||||||
@ -38,5 +39,6 @@ module.exports.createStores = (config, eventBus) => {
|
|||||||
eventStore,
|
eventStore,
|
||||||
getLogger
|
getLogger
|
||||||
),
|
),
|
||||||
|
settingStore: new SettingStore(db, getLogger),
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
52
lib/db/setting-store.js
Normal file
52
lib/db/setting-store.js
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
/* eslint camelcase: "off" */
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
const TABLE = 'settings';
|
||||||
|
|
||||||
|
class SettingStore {
|
||||||
|
constructor(db, getLogger) {
|
||||||
|
this.db = db;
|
||||||
|
this.logger = getLogger('settings-store.js');
|
||||||
|
}
|
||||||
|
|
||||||
|
updateRow(name, content) {
|
||||||
|
return this.db(TABLE)
|
||||||
|
.where('name', name)
|
||||||
|
.update({
|
||||||
|
content: JSON.stringify(content),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
insertNewRow(name, content) {
|
||||||
|
return this.db(TABLE).insert({ name, content });
|
||||||
|
}
|
||||||
|
|
||||||
|
insert(name, content) {
|
||||||
|
return this.db(TABLE)
|
||||||
|
.count('*')
|
||||||
|
.where('name', name)
|
||||||
|
.map(row => ({ count: row.count }))
|
||||||
|
.then(rows => {
|
||||||
|
if (rows[0].count > 0) {
|
||||||
|
return this.updateRow(name, content);
|
||||||
|
} else {
|
||||||
|
return this.insertNewRow(name, content);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async get(name) {
|
||||||
|
const result = await this.db
|
||||||
|
.select()
|
||||||
|
.from(TABLE)
|
||||||
|
.where('name', name);
|
||||||
|
|
||||||
|
if (result.length > 0) {
|
||||||
|
return result[0].content;
|
||||||
|
} else {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = SettingStore;
|
@ -1,16 +1,26 @@
|
|||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
|
const { readFileSync } = require('fs');
|
||||||
|
const parseDbUrl = require('parse-database-url');
|
||||||
|
const merge = require('deepmerge');
|
||||||
const { publicFolder } = require('unleash-frontend');
|
const { publicFolder } = require('unleash-frontend');
|
||||||
const { defaultLogProvider, validateLogProvider } = require('./logger');
|
const { defaultLogProvider, validateLogProvider } = require('./logger');
|
||||||
const fs = require('fs');
|
|
||||||
|
|
||||||
const isDev = () => process.env.NODE_ENV === 'development';
|
|
||||||
const THIRTY_DAYS = 30 * 24 * 60 * 60 * 1000;
|
const THIRTY_DAYS = 30 * 24 * 60 * 60 * 1000;
|
||||||
|
|
||||||
function defaultOptions() {
|
function defaultOptions() {
|
||||||
return {
|
return {
|
||||||
databaseUrl: defaultDatabaseUrl(),
|
databaseUrl: defaultDatabaseUrl(),
|
||||||
databaseSchema: 'public',
|
databaseSchema: 'public',
|
||||||
|
db: {
|
||||||
|
user: process.env.DATABASE_USERNAME,
|
||||||
|
password: process.env.DATABASE_PASSWORD,
|
||||||
|
host: process.env.DATABASE_HOST,
|
||||||
|
port: process.env.DATABASE_PORT || 5432,
|
||||||
|
database: process.env.DATABASE_NAME || 'unleash',
|
||||||
|
ssl: process.env.DATABASE_SSL,
|
||||||
|
driver: 'postgres',
|
||||||
|
},
|
||||||
port: process.env.HTTP_PORT || process.env.PORT || 4242,
|
port: process.env.HTTP_PORT || process.env.PORT || 4242,
|
||||||
host: process.env.HTTP_HOST,
|
host: process.env.HTTP_HOST,
|
||||||
pipe: undefined,
|
pipe: undefined,
|
||||||
@ -19,8 +29,7 @@ function defaultOptions() {
|
|||||||
enableLegacyRoutes: true,
|
enableLegacyRoutes: true,
|
||||||
extendedPermissions: false,
|
extendedPermissions: false,
|
||||||
publicFolder,
|
publicFolder,
|
||||||
enableRequestLogger: isDev(),
|
enableRequestLogger: false,
|
||||||
secret: 'UNLEASH-SECRET',
|
|
||||||
sessionAge: THIRTY_DAYS,
|
sessionAge: THIRTY_DAYS,
|
||||||
adminAuthentication: 'unsecure',
|
adminAuthentication: 'unsecure',
|
||||||
ui: {},
|
ui: {},
|
||||||
@ -34,29 +43,26 @@ function defaultOptions() {
|
|||||||
|
|
||||||
function defaultDatabaseUrl() {
|
function defaultDatabaseUrl() {
|
||||||
if (process.env.DATABASE_URL_FILE) {
|
if (process.env.DATABASE_URL_FILE) {
|
||||||
return fs.readFileSync(process.env.DATABASE_URL_FILE, 'utf8');
|
return readFileSync(process.env.DATABASE_URL_FILE, 'utf8');
|
||||||
} else if (process.env.DATABASE_URL) {
|
} else if (process.env.DATABASE_URL) {
|
||||||
return process.env.DATABASE_URL;
|
return process.env.DATABASE_URL;
|
||||||
} else if (isDev() || process.env.DATABASE_HOST) {
|
|
||||||
const dbUsername = process.env.DATABASE_USERNAME || 'unleash_user';
|
|
||||||
const dbPassword = process.env.DATABASE_PASSWORD || 'passord';
|
|
||||||
const dbHost = process.env.DATABASE_HOST || 'localhost';
|
|
||||||
const dbPort = process.env.DATABASE_PORT || 5432;
|
|
||||||
const dbName = process.env.DATABASE_NAME || 'unleash';
|
|
||||||
const sslSupport = process.env.DATABASE_SSL || 'true';
|
|
||||||
return `postgres://${dbUsername}:${dbPassword}@${dbHost}:${dbPort}/${dbName}?ssl=${sslSupport}`;
|
|
||||||
} else {
|
} else {
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
createOptions: opts => {
|
createOptions: (opts = {}) => {
|
||||||
const options = Object.assign({}, defaultOptions(), opts);
|
const options = merge(defaultOptions(), opts);
|
||||||
|
|
||||||
if (!options.databaseUrl) {
|
// Use DATABASE_URL when 'db' not defined.
|
||||||
|
if (!opts.db && options.databaseUrl) {
|
||||||
|
options.db = parseDbUrl(options.databaseUrl);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!options.db.host) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
'You must either pass databaseUrl option or set environment variable DATABASE_URL || (DATABASE_HOST, DATABASE_PORT, DATABASE_USERNAME, DATABASE_PASSWORD, DATABASE_NAME)'
|
`Unleash requires database details to start. See https://unleash.github.io/docs/getting_started`
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -13,19 +13,6 @@ test('should require DATABASE_URI', t => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
test('should set default databaseUrl for development', t => {
|
|
||||||
delete process.env.NODE_ENV;
|
|
||||||
process.env.NODE_ENV = 'development';
|
|
||||||
const { createOptions } = require('./options');
|
|
||||||
|
|
||||||
const options = createOptions({});
|
|
||||||
|
|
||||||
t.true(
|
|
||||||
options.databaseUrl ===
|
|
||||||
'postgres://unleash_user:passord@localhost:5432/unleash?ssl=true'
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('should use DATABASE_URL from env', t => {
|
test('should use DATABASE_URL from env', t => {
|
||||||
const databaseUrl = 'postgres://u:p@localhost:5432/name';
|
const databaseUrl = 'postgres://u:p@localhost:5432/name';
|
||||||
delete process.env.NODE_ENV;
|
delete process.env.NODE_ENV;
|
||||||
@ -60,12 +47,115 @@ test('should use databaseUrl from options', t => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
test('should not override provided options', t => {
|
test('should not override provided options', t => {
|
||||||
process.env.DATABASE_URL = 'test';
|
process.env.DATABASE_URL = 'postgres://test:5432/name';
|
||||||
process.env.NODE_ENV = 'production';
|
process.env.NODE_ENV = 'production';
|
||||||
|
|
||||||
const { createOptions } = require('./options');
|
const { createOptions } = require('./options');
|
||||||
const options = createOptions({ databaseUrl: 'test', port: 1111 });
|
const options = createOptions({
|
||||||
|
databaseUrl: 'postgres://test:5432/name',
|
||||||
|
port: 1111,
|
||||||
|
});
|
||||||
|
|
||||||
t.true(options.databaseUrl === 'test');
|
t.true(options.databaseUrl === 'postgres://test:5432/name');
|
||||||
t.true(options.port === 1111);
|
t.true(options.port === 1111);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('should add listen options from host and port', t => {
|
||||||
|
const { createOptions } = require('./options');
|
||||||
|
const options = createOptions({
|
||||||
|
databaseUrl: 'postgres://test:5432/name',
|
||||||
|
port: 1111,
|
||||||
|
host: 'localhost',
|
||||||
|
});
|
||||||
|
|
||||||
|
t.deepEqual(options.listen, { port: 1111, host: 'localhost' });
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should use pipe to path', t => {
|
||||||
|
const { createOptions } = require('./options');
|
||||||
|
const options = createOptions({
|
||||||
|
databaseUrl: 'postgres://test:5432/name',
|
||||||
|
port: 1111,
|
||||||
|
host: 'localhost',
|
||||||
|
pipe: '\\\\?\\pipe',
|
||||||
|
});
|
||||||
|
|
||||||
|
t.deepEqual(options.listen, { path: options.pipe });
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should prefer databaseUrl from options', t => {
|
||||||
|
process.env.DATABASE_URL = 'postgres://test:5432/name';
|
||||||
|
const databaseUrl = 'postgres://u:p@localhost:5432/options';
|
||||||
|
const { createOptions } = require('./options');
|
||||||
|
|
||||||
|
const options = createOptions({ databaseUrl });
|
||||||
|
|
||||||
|
t.deepEqual(options.databaseUrl, databaseUrl);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should expand databaseUrl from options', t => {
|
||||||
|
process.env.DATABASE_URL = 'postgres://test:5432/name';
|
||||||
|
const databaseUrl = 'postgres://u:p@localhost:5432/options';
|
||||||
|
const { createOptions } = require('./options');
|
||||||
|
|
||||||
|
const options = createOptions({ databaseUrl });
|
||||||
|
|
||||||
|
t.deepEqual(options.db, {
|
||||||
|
database: 'options',
|
||||||
|
driver: 'postgres',
|
||||||
|
host: 'localhost',
|
||||||
|
password: 'p',
|
||||||
|
port: '5432',
|
||||||
|
user: 'u',
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should validate getLogger', t => {
|
||||||
|
const databaseUrl = 'postgres://u:p@localhost:5432/options';
|
||||||
|
const getLogger = () => {};
|
||||||
|
const { createOptions } = require('./options');
|
||||||
|
|
||||||
|
t.throws(() => {
|
||||||
|
createOptions({ databaseUrl, getLogger });
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should accept custome log-provider', t => {
|
||||||
|
const databaseUrl = 'postgres://u:p@localhost:5432/options';
|
||||||
|
const getLogger = () => ({
|
||||||
|
debug: console.log,
|
||||||
|
info: console.log,
|
||||||
|
warn: console.log,
|
||||||
|
error: console.log,
|
||||||
|
});
|
||||||
|
const { createOptions } = require('./options');
|
||||||
|
const options = createOptions({ databaseUrl, getLogger });
|
||||||
|
|
||||||
|
t.deepEqual(options.getLogger, getLogger);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should prefer custom db connection options', t => {
|
||||||
|
const databaseUrl = 'postgres://u:p@localhost:5432/options';
|
||||||
|
const db = {
|
||||||
|
user: 'db_user',
|
||||||
|
password: 'db_password',
|
||||||
|
host: 'db-host',
|
||||||
|
port: 3232,
|
||||||
|
database: 'db_database',
|
||||||
|
ssl: false,
|
||||||
|
driver: 'postgres',
|
||||||
|
};
|
||||||
|
const { createOptions } = require('./options');
|
||||||
|
const options = createOptions({ databaseUrl, db });
|
||||||
|
|
||||||
|
t.deepEqual(options.db, db);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should baseUriPath', t => {
|
||||||
|
const databaseUrl = 'postgres://u:p@localhost:5432/options';
|
||||||
|
const baseUriPath = 'some';
|
||||||
|
const { createOptions } = require('./options');
|
||||||
|
const options = createOptions({ databaseUrl, baseUriPath });
|
||||||
|
|
||||||
|
t.deepEqual(options.baseUriPath, baseUriPath);
|
||||||
|
});
|
||||||
|
@ -18,11 +18,13 @@ async function createApp(options) {
|
|||||||
const logger = options.getLogger('server-impl.js');
|
const logger = options.getLogger('server-impl.js');
|
||||||
const eventBus = new EventEmitter();
|
const eventBus = new EventEmitter();
|
||||||
const stores = createStores(options, eventBus);
|
const stores = createStores(options, eventBus);
|
||||||
|
const secret = await stores.settingStore.get('unleash.secret');
|
||||||
|
|
||||||
const config = Object.assign(
|
const config = Object.assign(
|
||||||
{
|
{
|
||||||
stores,
|
stores,
|
||||||
eventBus,
|
eventBus,
|
||||||
|
secret,
|
||||||
logFactory: options.getLogger, // TODO: remove in v4.x
|
logFactory: options.getLogger, // TODO: remove in v4.x
|
||||||
},
|
},
|
||||||
options
|
options
|
||||||
@ -59,6 +61,7 @@ async function createApp(options) {
|
|||||||
server.on('listening', () =>
|
server.on('listening', () =>
|
||||||
resolve({
|
resolve({
|
||||||
app,
|
app,
|
||||||
|
stores,
|
||||||
server,
|
server,
|
||||||
eventBus,
|
eventBus,
|
||||||
stateService,
|
stateService,
|
||||||
|
@ -15,6 +15,11 @@ const getApp = proxyquire('./app', {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const eventStore = new EventEmitter();
|
const eventStore = new EventEmitter();
|
||||||
|
const settingStore = {
|
||||||
|
get: () => {
|
||||||
|
Promise.resolve('secret');
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
const serverImpl = proxyquire('./server-impl', {
|
const serverImpl = proxyquire('./server-impl', {
|
||||||
'./app': getApp,
|
'./app': getApp,
|
||||||
@ -27,6 +32,7 @@ const serverImpl = proxyquire('./server-impl', {
|
|||||||
createStores() {
|
createStores() {
|
||||||
return {
|
return {
|
||||||
eventStore,
|
eventStore,
|
||||||
|
settingStore,
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
21
migrations/20200329191251-settings-secret.js
Normal file
21
migrations/20200329191251-settings-secret.js
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
/* eslint camelcase: "off" */
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
const crypto = require('crypto');
|
||||||
|
|
||||||
|
const settingsName = 'unleash.secret';
|
||||||
|
|
||||||
|
exports.up = function(db, cb) {
|
||||||
|
const secret = crypto.randomBytes(20).toString('hex');
|
||||||
|
|
||||||
|
db.runSql(
|
||||||
|
`
|
||||||
|
INSERT INTO settings(name, content)
|
||||||
|
VALUES('${settingsName}', '${JSON.stringify(secret)}')`,
|
||||||
|
cb
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.down = function(db, cb) {
|
||||||
|
db.runSql(`DELETE FROM settings WHERE name = '${settingsName}'`, cb);
|
||||||
|
};
|
@ -3,11 +3,9 @@
|
|||||||
require('db-migrate-shared').log.setLogLevel('error');
|
require('db-migrate-shared').log.setLogLevel('error');
|
||||||
|
|
||||||
const { getInstance } = require('db-migrate');
|
const { getInstance } = require('db-migrate');
|
||||||
const parseDbUrl = require('parse-database-url');
|
|
||||||
|
|
||||||
function migrateDb({ databaseUrl, databaseSchema = 'public' }) {
|
function migrateDb({ db, databaseSchema = 'public' }) {
|
||||||
const custom = parseDbUrl(databaseUrl);
|
const custom = Object.assign({}, db, { schema: databaseSchema });
|
||||||
custom.schema = databaseSchema;
|
|
||||||
const dbmigrate = getInstance(true, {
|
const dbmigrate = getInstance(true, {
|
||||||
cwd: __dirname,
|
cwd: __dirname,
|
||||||
config: { custom },
|
config: { custom },
|
||||||
|
@ -34,7 +34,7 @@
|
|||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "node server.js",
|
"start": "node server.js",
|
||||||
"start:google": "node examples/google-auth-unleash.js",
|
"start:google": "node examples/google-auth-unleash.js",
|
||||||
"start:dev": "NODE_ENV=development supervisor --ignore ./node_modules/,website server.js",
|
"start:dev": "NODE_ENV=development supervisor --ignore ./node_modules/,website server-dev.js",
|
||||||
"start:dev:pg": "pg_virtualenv npm run start:dev:pg-chain",
|
"start:dev:pg": "pg_virtualenv npm run start:dev:pg-chain",
|
||||||
"start:dev:pg-chain": "export DATABASE_URL=postgres://$PGUSER:$PGPASSWORD@localhost:$PGPORT/postgres ; db-migrate up && npm run start:dev",
|
"start:dev:pg-chain": "export DATABASE_URL=postgres://$PGUSER:$PGPASSWORD@localhost:$PGPORT/postgres ; db-migrate up && npm run start:dev",
|
||||||
"db-migrate": "db-migrate",
|
"db-migrate": "db-migrate",
|
||||||
@ -70,6 +70,7 @@
|
|||||||
"db-migrate": "^0.11.6",
|
"db-migrate": "^0.11.6",
|
||||||
"db-migrate-pg": "^1.0.0",
|
"db-migrate-pg": "^1.0.0",
|
||||||
"deep-diff": "^1.0.2",
|
"deep-diff": "^1.0.2",
|
||||||
|
"deepmerge": "^4.2.2",
|
||||||
"errorhandler": "^1.5.1",
|
"errorhandler": "^1.5.1",
|
||||||
"express": "^4.17.1",
|
"express": "^4.17.1",
|
||||||
"gravatar-url": "^3.1.0",
|
"gravatar-url": "^3.1.0",
|
||||||
|
15
server-dev.js
Normal file
15
server-dev.js
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
const unleash = require('./lib/server-impl');
|
||||||
|
|
||||||
|
unleash.start({
|
||||||
|
db: {
|
||||||
|
user: 'unleash_user',
|
||||||
|
password: 'passord',
|
||||||
|
host: 'localhost',
|
||||||
|
port: 5432,
|
||||||
|
database: 'unleash',
|
||||||
|
ssl: true,
|
||||||
|
},
|
||||||
|
enableRequestLogger: true,
|
||||||
|
});
|
@ -1,13 +1,12 @@
|
|||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
function getDatabaseUrl() {
|
const parseDbUrl = require('parse-database-url');
|
||||||
if (process.env.TEST_DATABASE_URL) {
|
|
||||||
return process.env.TEST_DATABASE_URL;
|
|
||||||
} else {
|
|
||||||
return 'postgres://unleash_user:passord@localhost:5432/unleash_test';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
getDatabaseUrl,
|
getDb: () => {
|
||||||
|
const url =
|
||||||
|
process.env.TEST_DATABASE_URL ||
|
||||||
|
'postgres://unleash_user:passord@localhost:5432/unleash_test';
|
||||||
|
return parseDbUrl(url);
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
@ -58,7 +58,7 @@ function createFeatures(store) {
|
|||||||
|
|
||||||
module.exports = async function init(databaseSchema = 'test', getLogger) {
|
module.exports = async function init(databaseSchema = 'test', getLogger) {
|
||||||
const options = {
|
const options = {
|
||||||
databaseUrl: require('./database-config').getDatabaseUrl(),
|
db: require('./database-config').getDb(),
|
||||||
databaseSchema,
|
databaseSchema,
|
||||||
minPool: 1,
|
minPool: 1,
|
||||||
maxPool: 1,
|
maxPool: 1,
|
||||||
|
28
test/e2e/stores/setting-store.e2e.test.js
Normal file
28
test/e2e/stores/setting-store.e2e.test.js
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
const test = require('ava');
|
||||||
|
const dbInit = require('../helpers/database-init');
|
||||||
|
const getLogger = require('../../fixtures/no-logger');
|
||||||
|
|
||||||
|
let stores;
|
||||||
|
|
||||||
|
test.before(async () => {
|
||||||
|
const db = await dbInit('setting_store_serial', getLogger);
|
||||||
|
stores = db.stores;
|
||||||
|
});
|
||||||
|
|
||||||
|
test.after(async () => {
|
||||||
|
await stores.db.destroy();
|
||||||
|
});
|
||||||
|
|
||||||
|
test.serial('should have api secret stored', async t => {
|
||||||
|
const secret = await stores.settingStore.get('unleash.secret');
|
||||||
|
t.assert(secret);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.serial('should insert arbitarty value', async t => {
|
||||||
|
const value = { b: 'hello' };
|
||||||
|
await stores.settingStore.insert('unleash.custom', value);
|
||||||
|
const ret = await stores.settingStore.get('unleash.custom');
|
||||||
|
t.deepEqual(ret, value);
|
||||||
|
});
|
6
test/fixtures/fake-setting-store.js
vendored
Normal file
6
test/fixtures/fake-setting-store.js
vendored
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
module.exports = () => ({
|
||||||
|
insert: () => Promise.resolve(),
|
||||||
|
get: () => Promise.resolve(),
|
||||||
|
});
|
2
test/fixtures/store.js
vendored
2
test/fixtures/store.js
vendored
@ -7,6 +7,7 @@ const featureToggleStore = require('./fake-feature-toggle-store');
|
|||||||
const eventStore = require('./fake-event-store');
|
const eventStore = require('./fake-event-store');
|
||||||
const strategyStore = require('./fake-strategies-store');
|
const strategyStore = require('./fake-strategies-store');
|
||||||
const contextFieldStore = require('./fake-context-store');
|
const contextFieldStore = require('./fake-context-store');
|
||||||
|
const settingStore = require('./fake-setting-store');
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
createStores: () => {
|
createStores: () => {
|
||||||
@ -25,6 +26,7 @@ module.exports = {
|
|||||||
eventStore: eventStore(),
|
eventStore: eventStore(),
|
||||||
strategyStore: strategyStore(),
|
strategyStore: strategyStore(),
|
||||||
contextFieldStore: contextFieldStore(),
|
contextFieldStore: contextFieldStore(),
|
||||||
|
settingStore: settingStore(),
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
@ -1725,6 +1725,11 @@ deep-is@~0.1.3:
|
|||||||
version "0.1.3"
|
version "0.1.3"
|
||||||
resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34"
|
resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34"
|
||||||
|
|
||||||
|
deepmerge@^4.2.2:
|
||||||
|
version "4.2.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955"
|
||||||
|
integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==
|
||||||
|
|
||||||
default-require-extensions@^3.0.0:
|
default-require-extensions@^3.0.0:
|
||||||
version "3.0.0"
|
version "3.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/default-require-extensions/-/default-require-extensions-3.0.0.tgz#e03f93aac9b2b6443fc52e5e4a37b3ad9ad8df96"
|
resolved "https://registry.yarnpkg.com/default-require-extensions/-/default-require-extensions-3.0.0.tgz#e03f93aac9b2b6443fc52e5e4a37b3ad9ad8df96"
|
||||||
|
Loading…
Reference in New Issue
Block a user