mirror of
https://github.com/Unleash/unleash.git
synced 2025-03-27 00:19:39 +01:00
chore: Create test db from template (#9265)
## About the changes
Based on the first hypothesis from
https://github.com/Unleash/unleash/pull/9264, I decided to find an
alternative way of initializing the DB, mainly trying to run migrations
only once and removing that from the actual test run.
I found in [Postgres template
databases](https://www.postgresql.org/docs/current/manage-ag-templatedbs.html)
an interesting option in combination with jest global initializer.
### Changes on how we use DBs for testing
Previously, we were relying on a single DB with multiple schemas to
isolate tests, but each schema was empty and required migrations or
custom DB initialization scripts.
With this method, we don't need to use different schema names
(apparently there's no templating for schemas), and we can use new
databases. We can also eliminate custom initialization code.
### Legacy tests
This method also highlighted some wrong assumptions in existing tests.
One example is the existence of `default` environment, that because of
being deprecated is no longer available, but because tests are creating
the expected db state manually, they were not updated to match the
existing db state.
To keep tests running green, I've added a configuration to use the
`legacy` test setup (24 tests). By migrating these, we'll speed up
tests, but the code of these tests has to be modified, so I leave this
for another PR.
## Downsides
1. The template db initialization happens at the beginning of any test,
so local development may suffer from slower unit tests. As a workaround
we could define an environment variable to disable the db migration
2. Proliferation of test dbs. In ephemeral environments, this is not a
problem, but for local development we should clean up from time to time.
There's the possibility of cleaning up test dbs using the db name as a
pattern:
2ed2e1c274/scripts/jest-setup.ts (L13-L18)
but I didn't want to add this code yet. Opinions?
## Benefits
1. It allows us migrate only once and still get the benefits of having a
well known state for tests.
3. It removes some of the custom setup for tests (which in some cases
ends up testing something not realistic)
4. It removes the need of testing migrations:
https://github.com/Unleash/unleash/blob/main/src/test/e2e/migrator.e2e.test.ts
as migrations are run at the start
5. Forces us to keep old tests up to date when we modify our database
This commit is contained in:
parent
a8ea174ead
commit
5e9698fe63
@ -57,9 +57,9 @@
|
||||
"test:report": "NODE_ENV=test PORT=4243 jest --reporters=\"default\" --reporters=\"jest-junit\"",
|
||||
"test:docker:cleanup": "docker rm -f unleash-postgres",
|
||||
"test:watch": "yarn test --watch",
|
||||
"test:coverage": "NODE_ENV=test PORT=4243 jest --coverage --testLocationInResults --outputFile=\"coverage/report.json\" --forceExit --testTimeout=10000",
|
||||
"test:coverage:jest": "NODE_ENV=test PORT=4243 jest --silent --ci --json --coverage --testLocationInResults --outputFile=\"report.json\" --forceExit --testTimeout=10000",
|
||||
"test:updateSnapshot": "NODE_ENV=test PORT=4243 jest --updateSnapshot --testTimeout=10000",
|
||||
"test:coverage": "NODE_ENV=test PORT=4243 jest --coverage --testLocationInResults --outputFile=\"coverage/report.json\" --forceExit",
|
||||
"test:coverage:jest": "NODE_ENV=test PORT=4243 jest --silent --ci --json --coverage --testLocationInResults --outputFile=\"report.json\" --forceExit",
|
||||
"test:updateSnapshot": "NODE_ENV=test PORT=4243 jest --updateSnapshot",
|
||||
"seed:setup": "ts-node --compilerOptions '{\"strictNullChecks\": false}' src/test/e2e/seed/segment.seed.ts",
|
||||
"seed:serve": "UNLEASH_DATABASE_NAME=unleash_test UNLEASH_DATABASE_SCHEMA=seed yarn run start:dev",
|
||||
"clean": "del-cli --force dist",
|
||||
@ -81,7 +81,7 @@
|
||||
"automock": false,
|
||||
"maxWorkers": 4,
|
||||
"testTimeout": 20000,
|
||||
"globalSetup": "./scripts/jest-setup.js",
|
||||
"globalSetup": "./scripts/jest-setup.ts",
|
||||
"transform": {
|
||||
"^.+\\.tsx?$": [
|
||||
"@swc/jest"
|
||||
|
@ -1,3 +0,0 @@
|
||||
module.exports = async () => {
|
||||
process.env.TZ = 'UTC';
|
||||
};
|
35
scripts/jest-setup.ts
Normal file
35
scripts/jest-setup.ts
Normal file
@ -0,0 +1,35 @@
|
||||
import { Client, type ClientConfig } from 'pg';
|
||||
import { migrateDb } from '../src/migrator';
|
||||
import { getDbConfig } from '../src/test/e2e/helpers/database-config';
|
||||
|
||||
let initializationPromise: Promise<void> | null = null;
|
||||
const initializeTemplateDb = (db: ClientConfig): Promise<void> => {
|
||||
if (!initializationPromise) {
|
||||
initializationPromise = (async () => {
|
||||
const testDBTemplateName = process.env.TEST_DB_TEMPLATE_NAME;
|
||||
const client = new Client(db);
|
||||
await client.connect();
|
||||
console.log(`Initializing template database ${testDBTemplateName}`);
|
||||
// code to clean up, but only on next run, we could do it at tear down... but is it really needed?
|
||||
// const result = await client.query(`select datname from pg_database where datname like 'unleashtestdb_%';`)
|
||||
// result.rows.forEach(async (row: any) => {
|
||||
// console.log(`Dropping test database ${row.datname}`);
|
||||
// await client.query(`DROP DATABASE ${row.datname}`);
|
||||
// });
|
||||
await client.query(`DROP DATABASE IF EXISTS ${testDBTemplateName}`);
|
||||
await client.query(`CREATE DATABASE ${testDBTemplateName}`);
|
||||
await client.end();
|
||||
await migrateDb({
|
||||
db: { ...db, database: testDBTemplateName },
|
||||
} as any);
|
||||
console.log(`Template database ${testDBTemplateName} migrated`);
|
||||
})();
|
||||
}
|
||||
return initializationPromise;
|
||||
};
|
||||
|
||||
export default async function globalSetup() {
|
||||
process.env.TZ = 'UTC';
|
||||
process.env.TEST_DB_TEMPLATE_NAME = 'unleash_template_db';
|
||||
await initializeTemplateDb(getDbConfig());
|
||||
}
|
@ -31,7 +31,7 @@ test(`Should indicate change request enabled status`, async () => {
|
||||
// change request enabled in enabled environment
|
||||
await db.rawDatabase('change_request_settings').insert({
|
||||
project: 'default',
|
||||
environment: 'default',
|
||||
environment: 'development',
|
||||
required_approvals: 1,
|
||||
});
|
||||
const enabledStatus =
|
||||
@ -41,7 +41,7 @@ test(`Should indicate change request enabled status`, async () => {
|
||||
// change request enabled in disabled environment
|
||||
await db.stores.projectStore.deleteEnvironmentForProject(
|
||||
'default',
|
||||
'default',
|
||||
'development',
|
||||
);
|
||||
const disabledStatus =
|
||||
await readModel.isChangeRequestsEnabledForProject('default');
|
||||
|
@ -19,7 +19,9 @@ let db: ITestDb;
|
||||
let eventStore: IEventStore;
|
||||
|
||||
beforeAll(async () => {
|
||||
db = await dbInit('dependent_features', getLogger);
|
||||
db = await dbInit('dependent_features', getLogger, {
|
||||
dbInitMethod: 'legacy' as const,
|
||||
});
|
||||
app = await setupAppWithCustomConfig(
|
||||
db.stores,
|
||||
{
|
||||
|
@ -27,7 +27,9 @@ let eventBus: EventEmitter;
|
||||
let featureLifecycleReadModel: IFeatureLifecycleReadModel;
|
||||
|
||||
beforeAll(async () => {
|
||||
db = await dbInit('feature_lifecycle', getLogger);
|
||||
db = await dbInit('feature_lifecycle', getLogger, {
|
||||
dbInitMethod: 'legacy' as const,
|
||||
});
|
||||
app = await setupAppWithAuth(
|
||||
db.stores,
|
||||
{
|
||||
|
@ -19,7 +19,9 @@ let db: ITestDb;
|
||||
let stores: IUnleashStores;
|
||||
|
||||
beforeAll(async () => {
|
||||
db = await dbInit('feature_search', getLogger);
|
||||
db = await dbInit('feature_search', getLogger, {
|
||||
dbInitMethod: 'legacy' as const,
|
||||
});
|
||||
app = await setupAppWithAuth(
|
||||
db.stores,
|
||||
{
|
||||
|
@ -11,7 +11,9 @@ let app: IUnleashTest;
|
||||
let db: ITestDb;
|
||||
|
||||
beforeAll(async () => {
|
||||
db = await dbInit('archive_test_serial', getLogger);
|
||||
db = await dbInit('archive_test_serial', getLogger, {
|
||||
dbInitMethod: 'legacy' as const,
|
||||
});
|
||||
app = await setupAppWithCustomConfig(
|
||||
db.stores,
|
||||
{
|
||||
|
@ -7,6 +7,7 @@ import {
|
||||
setupAppWithCustomConfig,
|
||||
} from '../../../../test/e2e/helpers/test-helper';
|
||||
import getLogger from '../../../../test/fixtures/no-logger';
|
||||
import type { IUnleashOptions } from '../../../internals';
|
||||
|
||||
let app: IUnleashTest;
|
||||
let db: ITestDb;
|
||||
@ -14,13 +15,12 @@ let db: ITestDb;
|
||||
const setupLastSeenAtTest = async (featureName: string) => {
|
||||
await app.createFeature(featureName);
|
||||
|
||||
await insertLastSeenAt(featureName, db.rawDatabase, 'default');
|
||||
await insertLastSeenAt(featureName, db.rawDatabase, 'development');
|
||||
await insertLastSeenAt(featureName, db.rawDatabase, 'production');
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
const config = {
|
||||
const config: Partial<IUnleashOptions> = {
|
||||
experimental: {
|
||||
flags: {
|
||||
strictSchemaValidation: true,
|
||||
@ -34,29 +34,6 @@ beforeAll(async () => {
|
||||
config,
|
||||
);
|
||||
app = await setupAppWithCustomConfig(db.stores, config, db.rawDatabase);
|
||||
|
||||
await db.stores.environmentStore.create({
|
||||
name: 'development',
|
||||
type: 'development',
|
||||
sortOrder: 1,
|
||||
enabled: true,
|
||||
});
|
||||
|
||||
await db.stores.environmentStore.create({
|
||||
name: 'production',
|
||||
type: 'production',
|
||||
sortOrder: 2,
|
||||
enabled: true,
|
||||
});
|
||||
|
||||
await app.services.projectService.addEnvironmentToProject(
|
||||
'default',
|
||||
'development',
|
||||
);
|
||||
await app.services.projectService.addEnvironmentToProject(
|
||||
'default',
|
||||
'production',
|
||||
);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
@ -67,7 +44,7 @@ afterAll(async () => {
|
||||
test('should return last seen at per env for /api/admin/features', async () => {
|
||||
await app.createFeature('lastSeenAtPerEnv');
|
||||
|
||||
await insertLastSeenAt('lastSeenAtPerEnv', db.rawDatabase, 'default');
|
||||
await insertLastSeenAt('lastSeenAtPerEnv', db.rawDatabase, 'development');
|
||||
|
||||
const response = await app.request
|
||||
.get('/api/admin/projects/default/features')
|
||||
@ -94,10 +71,7 @@ test('response should include last seen at per environment for multiple environm
|
||||
|
||||
const featureEnvironments = body.features[1].environments;
|
||||
|
||||
const [def, development, production] = featureEnvironments;
|
||||
|
||||
expect(def.name).toBe('default');
|
||||
expect(def.lastSeenAt).toEqual('2023-10-01T12:34:56.000Z');
|
||||
const [development, production] = featureEnvironments;
|
||||
|
||||
expect(development.name).toBe('development');
|
||||
expect(development.lastSeenAt).toEqual('2023-10-01T12:34:56.000Z');
|
||||
@ -117,10 +91,7 @@ test('response should include last seen at per environment for multiple environm
|
||||
const { body } = await app.request.get(`/api/admin/archive/features`);
|
||||
|
||||
const featureEnvironments = body.features[0].environments;
|
||||
const [def, development, production] = featureEnvironments;
|
||||
|
||||
expect(def.name).toBe('default');
|
||||
expect(def.lastSeenAt).toEqual('2023-10-01T12:34:56.000Z');
|
||||
const [development, production] = featureEnvironments;
|
||||
|
||||
expect(development.name).toBe('development');
|
||||
expect(development.lastSeenAt).toEqual('2023-10-01T12:34:56.000Z');
|
||||
@ -142,10 +113,7 @@ test('response should include last seen at per environment for multiple environm
|
||||
);
|
||||
|
||||
const featureEnvironments = body.features[0].environments;
|
||||
const [def, development, production] = featureEnvironments;
|
||||
|
||||
expect(def.name).toBe('default');
|
||||
expect(def.lastSeenAt).toEqual('2023-10-01T12:34:56.000Z');
|
||||
const [development, production] = featureEnvironments;
|
||||
|
||||
expect(development.name).toBe('development');
|
||||
expect(development.lastSeenAt).toEqual('2023-10-01T12:34:56.000Z');
|
||||
@ -163,13 +131,6 @@ test('response should include last seen at per environment correctly for a singl
|
||||
await setupLastSeenAtTest(`${featureName}4`);
|
||||
await setupLastSeenAtTest(`${featureName}5`);
|
||||
|
||||
await insertLastSeenAt(
|
||||
featureName,
|
||||
db.rawDatabase,
|
||||
'default',
|
||||
'2023-08-01T12:30:56.000Z',
|
||||
);
|
||||
|
||||
await insertLastSeenAt(
|
||||
featureName,
|
||||
db.rawDatabase,
|
||||
@ -189,10 +150,6 @@ test('response should include last seen at per environment correctly for a singl
|
||||
.expect(200);
|
||||
|
||||
const expected = [
|
||||
{
|
||||
name: 'default',
|
||||
lastSeenAt: '2023-08-01T12:30:56.000Z',
|
||||
},
|
||||
{
|
||||
name: 'development',
|
||||
lastSeenAt: '2023-08-01T12:30:56.000Z',
|
||||
|
@ -55,6 +55,7 @@ beforeAll(async () => {
|
||||
db = await dbInit(
|
||||
'feature_toggle_service_v2_service_serial',
|
||||
config.getLogger,
|
||||
{ dbInitMethod: 'legacy' as const },
|
||||
);
|
||||
unleashConfig = config;
|
||||
stores = db.stores;
|
||||
|
@ -17,7 +17,9 @@ let app: IUnleashTest;
|
||||
let db: ITestDb;
|
||||
|
||||
beforeAll(async () => {
|
||||
db = await dbInit('feature_strategy_auth_api_serial', getLogger);
|
||||
db = await dbInit('feature_strategy_auth_api_serial', getLogger, {
|
||||
dbInitMethod: 'legacy' as const,
|
||||
});
|
||||
app = await setupAppWithAuth(
|
||||
db.stores,
|
||||
{
|
||||
|
@ -92,7 +92,9 @@ const updateStrategy = async (
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
db = await dbInit('feature_strategy_api_serial', getLogger);
|
||||
db = await dbInit('feature_strategy_api_serial', getLogger, {
|
||||
dbInitMethod: 'legacy' as const,
|
||||
});
|
||||
app = await setupAppWithCustomConfig(
|
||||
db.stores,
|
||||
{
|
||||
|
@ -23,7 +23,9 @@ let app: IUnleashTest;
|
||||
let db: ITestDb;
|
||||
let frontendApiService: FrontendApiService;
|
||||
beforeAll(async () => {
|
||||
db = await dbInit('frontend_api', getLogger);
|
||||
db = await dbInit('frontend_api', getLogger, {
|
||||
dbInitMethod: 'legacy' as const,
|
||||
});
|
||||
app = await setupAppWithAuth(
|
||||
db.stores,
|
||||
{
|
||||
|
@ -44,12 +44,6 @@ const noEnvironmentEvent = (days: number) => {
|
||||
|
||||
beforeAll(async () => {
|
||||
db = await dbInit('product_changes_serial', getLogger);
|
||||
await db.rawDatabase('environments').insert({
|
||||
name: 'production',
|
||||
type: 'production',
|
||||
enabled: true,
|
||||
protected: false,
|
||||
});
|
||||
getProductionChanges = createGetProductionChanges(db.rawDatabase);
|
||||
});
|
||||
|
||||
@ -136,12 +130,6 @@ test('five events per day should be counted correctly', async () => {
|
||||
});
|
||||
|
||||
test('Events posted to a non production environment should not be included in count', async () => {
|
||||
await db.rawDatabase('environments').insert({
|
||||
name: 'development',
|
||||
type: 'development',
|
||||
enabled: true,
|
||||
protected: false,
|
||||
});
|
||||
await db.rawDatabase
|
||||
.table('events')
|
||||
.insert(mockRawEventDaysAgo(1, 'development'));
|
||||
|
@ -357,12 +357,6 @@ describe('bulk metrics', () => {
|
||||
enableApiToken: true,
|
||||
},
|
||||
});
|
||||
await authed.db('environments').insert({
|
||||
name: 'development',
|
||||
sort_order: 5000,
|
||||
type: 'development',
|
||||
enabled: true,
|
||||
});
|
||||
const clientToken =
|
||||
await authed.services.apiTokenService.createApiTokenWithProjects({
|
||||
tokenName: 'bulk-metrics-test',
|
||||
|
@ -10,7 +10,9 @@ let app: IUnleashTest;
|
||||
let db: ITestDb;
|
||||
|
||||
beforeAll(async () => {
|
||||
db = await dbInit('advanced_playground', getLogger);
|
||||
db = await dbInit('advanced_playground', getLogger, {
|
||||
dbInitMethod: 'legacy' as const,
|
||||
});
|
||||
app = await setupAppWithCustomConfig(
|
||||
db.stores,
|
||||
{
|
||||
|
@ -18,7 +18,9 @@ let eventService: EventService;
|
||||
|
||||
beforeAll(async () => {
|
||||
const config = createTestConfig();
|
||||
db = await dbInit('environment_service_serial', config.getLogger);
|
||||
db = await dbInit('environment_service_serial', config.getLogger, {
|
||||
dbInitMethod: 'legacy' as const,
|
||||
});
|
||||
stores = db.stores;
|
||||
eventService = createEventsService(db.rawDatabase, config);
|
||||
service = new EnvironmentService(stores, config, eventService);
|
||||
|
@ -10,7 +10,9 @@ let app: IUnleashTest;
|
||||
let db: ITestDb;
|
||||
|
||||
beforeAll(async () => {
|
||||
db = await dbInit('project_environments_api_serial', getLogger);
|
||||
db = await dbInit('project_environments_api_serial', getLogger, {
|
||||
dbInitMethod: 'legacy' as const,
|
||||
});
|
||||
app = await setupAppWithCustomConfig(
|
||||
db.stores,
|
||||
{
|
||||
|
@ -1274,7 +1274,7 @@ test('A newly created project only gets connected to enabled environments', asyn
|
||||
await projectService.createProject(project, user, auditUser);
|
||||
const connectedEnvs =
|
||||
await db.stores.projectStore.getEnvironmentsForProject(project.id);
|
||||
expect(connectedEnvs).toHaveLength(2); // default, connection_test
|
||||
expect(connectedEnvs).toHaveLength(1); // connection_test
|
||||
expect(
|
||||
connectedEnvs.some((e) => e.environment === enabledEnv),
|
||||
).toBeTruthy();
|
||||
@ -1321,7 +1321,6 @@ test('should have environments sorted in order', async () => {
|
||||
await db.stores.projectStore.getEnvironmentsForProject(project.id);
|
||||
|
||||
expect(connectedEnvs.map((e) => e.environment)).toEqual([
|
||||
'default',
|
||||
first,
|
||||
second,
|
||||
third,
|
||||
@ -2809,13 +2808,7 @@ describe('create project with environments', () => {
|
||||
disabledEnv,
|
||||
];
|
||||
|
||||
const allEnabledEnvs = [
|
||||
'QA',
|
||||
'default',
|
||||
'development',
|
||||
'production',
|
||||
'staging',
|
||||
];
|
||||
const allEnabledEnvs = ['QA', 'development', 'production', 'staging'];
|
||||
|
||||
beforeEach(async () => {
|
||||
await Promise.all(
|
||||
|
@ -17,7 +17,9 @@ let projectStore: IProjectStore;
|
||||
const testDate = '2023-10-01T12:34:56.000Z';
|
||||
|
||||
beforeAll(async () => {
|
||||
db = await dbInit('projects_api_serial', getLogger);
|
||||
db = await dbInit('projects_api_serial', getLogger, {
|
||||
dbInitMethod: 'legacy' as const,
|
||||
});
|
||||
app = await setupAppWithCustomConfig(
|
||||
db.stores,
|
||||
{
|
||||
|
@ -118,6 +118,7 @@ beforeAll(async () => {
|
||||
anonymiseEventLog: true,
|
||||
},
|
||||
},
|
||||
dbInitMethod: 'legacy' as const,
|
||||
};
|
||||
|
||||
db = await dbInit('segments_api_serial', getLogger, customOptions);
|
||||
|
@ -197,7 +197,9 @@ const createTestSegments = async () => {
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
db = await dbInit('segments', getLogger);
|
||||
db = await dbInit('segments', getLogger, {
|
||||
dbInitMethod: 'legacy' as const,
|
||||
});
|
||||
app = await setupAppWithCustomConfig(
|
||||
db.stores,
|
||||
{
|
||||
|
@ -1,20 +1,6 @@
|
||||
import { getDbConfig } from '../../test/e2e/helpers/database-config';
|
||||
import { createTestConfig } from '../../test/config/test-config';
|
||||
import { log } from 'db-migrate-shared';
|
||||
import { Client } from 'pg';
|
||||
import type { IDBOption } from '../../lib/types';
|
||||
import { migrateDb } from '../../migrator';
|
||||
import dbInit, { type ITestDb } from '../../test/e2e/helpers/database-init';
|
||||
import { SYSTEM_USER } from './core';
|
||||
|
||||
log.setLogLevel('error');
|
||||
|
||||
async function initSchema(db: IDBOption): Promise<void> {
|
||||
const client = new Client(db);
|
||||
await client.connect();
|
||||
await client.query(`DROP SCHEMA IF EXISTS ${db.schema} CASCADE`);
|
||||
await client.query(`CREATE SCHEMA IF NOT EXISTS ${db.schema}`);
|
||||
await client.end();
|
||||
}
|
||||
import getLogger from '../../test/fixtures/no-logger';
|
||||
|
||||
describe('System user definitions in code and db', () => {
|
||||
let dbDefinition: {
|
||||
@ -24,36 +10,21 @@ describe('System user definitions in code and db', () => {
|
||||
id: number;
|
||||
image_url: string | null;
|
||||
};
|
||||
let db: ITestDb;
|
||||
beforeAll(async () => {
|
||||
jest.setTimeout(15000);
|
||||
const config = createTestConfig({
|
||||
db: {
|
||||
...getDbConfig(),
|
||||
pool: { min: 1, max: 4 },
|
||||
schema: 'system_user_alignment_test',
|
||||
ssl: false,
|
||||
},
|
||||
});
|
||||
db = await dbInit('system_user_alignment_test', getLogger);
|
||||
|
||||
await initSchema(config.db);
|
||||
|
||||
const e2e = {
|
||||
...config.db,
|
||||
connectionTimeoutMillis: 2000,
|
||||
};
|
||||
|
||||
await migrateDb(config);
|
||||
|
||||
const client = new Client(config.db);
|
||||
await client.connect();
|
||||
|
||||
const query = await client.query(
|
||||
`select * from system_user_alignment_test.users where id = -1337;`,
|
||||
const query = await db.rawDatabase.raw(
|
||||
`select * from users where id = -1337;`,
|
||||
);
|
||||
|
||||
dbDefinition = query.rows[0];
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await db.destroy();
|
||||
});
|
||||
test('usernames match', () => {
|
||||
expect(SYSTEM_USER.username).toBe(dbDefinition.username);
|
||||
});
|
||||
|
@ -5,35 +5,50 @@ import { secondsToMilliseconds } from 'date-fns';
|
||||
|
||||
log.setLogLevel('error');
|
||||
|
||||
export async function migrateDb({ db }: IUnleashConfig): Promise<void> {
|
||||
const custom = {
|
||||
...db,
|
||||
connectionTimeoutMillis: secondsToMilliseconds(10),
|
||||
};
|
||||
async function noDatabaseUrl<T>(fn: () => Promise<T>): Promise<T> {
|
||||
// unset DATABASE_URL so it doesn't take presedence over the provided db config
|
||||
const dbUrlEnv = process.env.DATABASE_URL;
|
||||
delete process.env.DATABASE_URL;
|
||||
const result = fn();
|
||||
process.env.DATABASE_URL = dbUrlEnv;
|
||||
return result;
|
||||
}
|
||||
export async function migrateDb(
|
||||
{ db }: IUnleashConfig,
|
||||
stopAt?: string,
|
||||
): Promise<void> {
|
||||
return noDatabaseUrl(async () => {
|
||||
const custom = {
|
||||
...db,
|
||||
connectionTimeoutMillis: secondsToMilliseconds(10),
|
||||
};
|
||||
|
||||
// disable Intellij/WebStorm from setting verbose CLI argument to db-migrator
|
||||
process.argv = process.argv.filter((it) => !it.includes('--verbose'));
|
||||
const dbm = getInstance(true, {
|
||||
cwd: __dirname,
|
||||
config: { custom },
|
||||
env: 'custom',
|
||||
// disable Intellij/WebStorm from setting verbose CLI argument to db-migrator
|
||||
process.argv = process.argv.filter((it) => !it.includes('--verbose'));
|
||||
const dbm = getInstance(true, {
|
||||
cwd: __dirname,
|
||||
config: { custom },
|
||||
env: 'custom',
|
||||
});
|
||||
|
||||
return dbm.up(stopAt);
|
||||
});
|
||||
|
||||
return dbm.up();
|
||||
}
|
||||
|
||||
// This exists to ease testing
|
||||
export async function resetDb({ db }: IUnleashConfig): Promise<void> {
|
||||
const custom = {
|
||||
...db,
|
||||
connectionTimeoutMillis: secondsToMilliseconds(10),
|
||||
};
|
||||
return noDatabaseUrl(async () => {
|
||||
const custom = {
|
||||
...db,
|
||||
connectionTimeoutMillis: secondsToMilliseconds(10),
|
||||
};
|
||||
|
||||
const dbm = getInstance(true, {
|
||||
cwd: __dirname,
|
||||
config: { custom },
|
||||
env: 'custom',
|
||||
const dbm = getInstance(true, {
|
||||
cwd: __dirname,
|
||||
config: { custom },
|
||||
env: 'custom',
|
||||
});
|
||||
|
||||
return dbm.reset();
|
||||
});
|
||||
|
||||
return dbm.reset();
|
||||
}
|
||||
|
@ -9,7 +9,9 @@ let db: ITestDb;
|
||||
let app: IUnleashTest;
|
||||
|
||||
beforeAll(async () => {
|
||||
db = await dbInit('context_api_serial', getLogger);
|
||||
db = await dbInit('context_api_serial', getLogger, {
|
||||
dbInitMethod: 'legacy' as const,
|
||||
});
|
||||
app = await setupAppWithCustomConfig(
|
||||
db.stores,
|
||||
{
|
||||
|
@ -24,16 +24,6 @@ beforeAll(async () => {
|
||||
},
|
||||
db.rawDatabase,
|
||||
);
|
||||
await db.stores.environmentStore.create({
|
||||
name: 'development',
|
||||
type: 'development',
|
||||
enabled: true,
|
||||
});
|
||||
await db.stores.environmentStore.create({
|
||||
name: 'production',
|
||||
type: 'production',
|
||||
enabled: true,
|
||||
});
|
||||
await db.stores.environmentStore.create({
|
||||
name: 'customenvironment',
|
||||
type: 'production',
|
||||
|
@ -10,7 +10,9 @@ let app: IUnleashTest;
|
||||
let db: ITestDb;
|
||||
|
||||
beforeAll(async () => {
|
||||
db = await dbInit('environment_api_serial', getLogger);
|
||||
db = await dbInit('environment_api_serial', getLogger, {
|
||||
dbInitMethod: 'legacy' as const,
|
||||
});
|
||||
app = await setupAppWithCustomConfig(
|
||||
db.stores,
|
||||
{
|
||||
|
@ -14,7 +14,9 @@ let stores: IUnleashStores;
|
||||
let refreshDbMetrics: () => Promise<void>;
|
||||
|
||||
beforeAll(async () => {
|
||||
db = await dbInit('instance_admin_api_serial', getLogger);
|
||||
db = await dbInit('instance_admin_api_serial', getLogger, {
|
||||
dbInitMethod: 'legacy' as const,
|
||||
});
|
||||
stores = db.stores;
|
||||
await stores.settingStore.insert('instanceInfo', { id: 'test-static' });
|
||||
app = await setupAppWithCustomConfig(
|
||||
|
@ -11,9 +11,7 @@ let db: ITestDb;
|
||||
|
||||
beforeAll(async () => {
|
||||
db = await dbInit('metrics_serial', getLogger, {
|
||||
experimental: {
|
||||
flags: {},
|
||||
},
|
||||
dbInitMethod: 'legacy' as const,
|
||||
});
|
||||
app = await setupAppWithCustomConfig(
|
||||
db.stores,
|
||||
|
@ -12,7 +12,9 @@ let db: ITestDb;
|
||||
let user: IUser;
|
||||
|
||||
beforeAll(async () => {
|
||||
db = await dbInit('project_health_api_serial', getLogger);
|
||||
db = await dbInit('project_health_api_serial', getLogger, {
|
||||
dbInitMethod: 'legacy' as const,
|
||||
});
|
||||
app = await setupAppWithCustomConfig(
|
||||
db.stores,
|
||||
{
|
||||
|
@ -19,10 +19,6 @@ beforeAll(async () => {
|
||||
},
|
||||
},
|
||||
});
|
||||
await db.stores.environmentStore.create({
|
||||
name: 'development',
|
||||
type: 'development',
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
|
@ -20,14 +20,6 @@ beforeAll(async () => {
|
||||
},
|
||||
},
|
||||
});
|
||||
await db.stores.environmentStore.create({
|
||||
name: 'development',
|
||||
type: 'development',
|
||||
});
|
||||
await db.stores.environmentStore.create({
|
||||
name: 'production',
|
||||
type: 'production',
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
|
@ -9,7 +9,9 @@ let app: IUnleashTest;
|
||||
let db: ITestDb;
|
||||
|
||||
beforeAll(async () => {
|
||||
db = await dbInit('strategy_api_serial', getLogger);
|
||||
db = await dbInit('strategy_api_serial', getLogger, {
|
||||
dbInitMethod: 'legacy' as const,
|
||||
});
|
||||
app = await setupAppWithCustomConfig(db.stores, {
|
||||
experimental: {
|
||||
flags: {
|
||||
|
@ -11,12 +11,13 @@ let app: IUnleashTest;
|
||||
let db: ITestDb;
|
||||
|
||||
const featureName = 'feature.default.1';
|
||||
const username = 'test';
|
||||
const userId = -9999;
|
||||
const projectId = 'default';
|
||||
|
||||
beforeAll(async () => {
|
||||
db = await dbInit('feature_env_api_client', getLogger);
|
||||
db = await dbInit('feature_env_api_client', getLogger, {
|
||||
dbInitMethod: 'legacy' as const,
|
||||
});
|
||||
app = await setupAppWithCustomConfig(db.stores, {}, db.rawDatabase);
|
||||
|
||||
await app.services.featureToggleServiceV2.createFeatureToggle(
|
||||
|
@ -13,7 +13,9 @@ let db: ITestDb;
|
||||
|
||||
let defaultToken: IApiToken;
|
||||
beforeAll(async () => {
|
||||
db = await dbInit('metrics_two_api_client', getLogger);
|
||||
db = await dbInit('metrics_two_api_client', getLogger, {
|
||||
dbInitMethod: 'legacy' as const,
|
||||
});
|
||||
app = await setupAppWithAuth(db.stores, {}, db.rawDatabase);
|
||||
defaultToken =
|
||||
await app.services.apiTokenService.createApiTokenWithProjects({
|
||||
|
@ -14,7 +14,9 @@ let app: IUnleashTest;
|
||||
let db: ITestDb;
|
||||
|
||||
beforeAll(async () => {
|
||||
db = await dbInit('register_client', getLogger);
|
||||
db = await dbInit('register_client', getLogger, {
|
||||
dbInitMethod: 'legacy' as const,
|
||||
});
|
||||
app = await setupApp(db.stores);
|
||||
});
|
||||
|
||||
|
@ -11,13 +11,16 @@ import type EnvironmentStore from '../../../lib/features/project-environments/en
|
||||
import type { IUnleashStores } from '../../../lib/types';
|
||||
import type { IFeatureEnvironmentStore } from '../../../lib/types/stores/feature-environment-store';
|
||||
import { DEFAULT_ENV } from '../../../lib/util/constants';
|
||||
import type { IUnleashOptions, Knex } from '../../../lib/server-impl';
|
||||
import type {
|
||||
IUnleashConfig,
|
||||
IUnleashOptions,
|
||||
Knex,
|
||||
} from '../../../lib/server-impl';
|
||||
import { Client } from 'pg';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
|
||||
// require('db-migrate-shared').log.silence(false);
|
||||
|
||||
// because of migrator bug
|
||||
delete process.env.DATABASE_URL;
|
||||
|
||||
// because of db-migrate bug (https://github.com/Unleash/unleash/issues/171)
|
||||
process.setMaxListeners(0);
|
||||
|
||||
@ -88,22 +91,34 @@ async function setupDatabase(stores) {
|
||||
}
|
||||
|
||||
export interface ITestDb {
|
||||
config: IUnleashConfig;
|
||||
stores: IUnleashStores;
|
||||
reset: () => Promise<void>;
|
||||
destroy: () => Promise<void>;
|
||||
rawDatabase: Knex;
|
||||
}
|
||||
|
||||
type DBTestOptions = {
|
||||
dbInitMethod?: 'legacy' | 'template';
|
||||
stopMigrationAt?: string; // filename where migration should stop
|
||||
};
|
||||
|
||||
export default async function init(
|
||||
databaseSchema = 'test',
|
||||
getLogger: LogProvider = noLoggerProvider,
|
||||
configOverride: Partial<IUnleashOptions> = {},
|
||||
configOverride: Partial<IUnleashOptions & DBTestOptions> = {},
|
||||
): Promise<ITestDb> {
|
||||
const testDbName = `unleashtestdb_${uuidv4().replace(/-/g, '')}`;
|
||||
const useDbTemplate =
|
||||
(configOverride.dbInitMethod ?? 'template') === 'template';
|
||||
const testDBTemplateName = process.env.TEST_DB_TEMPLATE_NAME;
|
||||
const config = createTestConfig({
|
||||
db: {
|
||||
...getDbConfig(),
|
||||
pool: { min: 1, max: 4 },
|
||||
schema: databaseSchema,
|
||||
...(useDbTemplate
|
||||
? { database: testDbName }
|
||||
: { schema: databaseSchema }),
|
||||
ssl: false,
|
||||
},
|
||||
...configOverride,
|
||||
@ -111,29 +126,53 @@ export default async function init(
|
||||
});
|
||||
|
||||
log.setLogLevel('error');
|
||||
const db = createDb(config);
|
||||
|
||||
await db.raw(`DROP SCHEMA IF EXISTS ${config.db.schema} CASCADE`);
|
||||
await db.raw(`CREATE SCHEMA IF NOT EXISTS ${config.db.schema}`);
|
||||
await migrateDb(config);
|
||||
await db.destroy();
|
||||
if (useDbTemplate) {
|
||||
if (!testDBTemplateName) {
|
||||
throw new Error(
|
||||
'TEST_DB_TEMPLATE_NAME environment variable is not set',
|
||||
);
|
||||
}
|
||||
const client = new Client(getDbConfig());
|
||||
await client.connect();
|
||||
|
||||
await client.query(
|
||||
`CREATE DATABASE ${testDbName} TEMPLATE ${testDBTemplateName}`,
|
||||
);
|
||||
await client.end();
|
||||
} else {
|
||||
const db = createDb(config);
|
||||
|
||||
await db.raw(`DROP SCHEMA IF EXISTS ${config.db.schema} CASCADE`);
|
||||
await db.raw(`CREATE SCHEMA IF NOT EXISTS ${config.db.schema}`);
|
||||
await migrateDb(config, configOverride.stopMigrationAt);
|
||||
await db.destroy();
|
||||
}
|
||||
|
||||
const testDb = createDb(config);
|
||||
const stores = await createStores(config, testDb);
|
||||
const stores = createStores(config, testDb);
|
||||
stores.eventStore.setMaxListeners(0);
|
||||
const defaultRolePermissions = await getDefaultEnvRolePermissions(testDb);
|
||||
await resetDatabase(testDb);
|
||||
await setupDatabase(stores);
|
||||
await restoreRolePermissions(testDb, defaultRolePermissions);
|
||||
|
||||
if (!useDbTemplate) {
|
||||
const defaultRolePermissions =
|
||||
await getDefaultEnvRolePermissions(testDb);
|
||||
await resetDatabase(testDb);
|
||||
await setupDatabase(stores);
|
||||
await restoreRolePermissions(testDb, defaultRolePermissions);
|
||||
}
|
||||
|
||||
return {
|
||||
config,
|
||||
rawDatabase: testDb,
|
||||
stores,
|
||||
reset: async () => {
|
||||
const defaultRolePermissions =
|
||||
await getDefaultEnvRolePermissions(testDb);
|
||||
await resetDatabase(testDb);
|
||||
await setupDatabase(stores);
|
||||
await restoreRolePermissions(testDb, defaultRolePermissions);
|
||||
if (!useDbTemplate) {
|
||||
const defaultRolePermissions =
|
||||
await getDefaultEnvRolePermissions(testDb);
|
||||
await resetDatabase(testDb);
|
||||
await setupDatabase(stores);
|
||||
await restoreRolePermissions(testDb, defaultRolePermissions);
|
||||
}
|
||||
},
|
||||
destroy: async () => {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
|
@ -1,22 +1,14 @@
|
||||
import { getDbConfig } from './helpers/database-config';
|
||||
import { createTestConfig } from '../config/test-config';
|
||||
import { getInstance } from 'db-migrate';
|
||||
import dbInit, { type ITestDb } from '../../test/e2e/helpers/database-init';
|
||||
|
||||
import getLogger from '../../test/fixtures/no-logger';
|
||||
|
||||
import { log } from 'db-migrate-shared';
|
||||
import { Client } from 'pg';
|
||||
import type { IDBOption } from '../../lib/types';
|
||||
import { resetDb } from '../../migrator';
|
||||
|
||||
log.setLogLevel('error');
|
||||
|
||||
const schema = 'up_n_down_migrations_test';
|
||||
|
||||
async function initSchema(db: IDBOption): Promise<void> {
|
||||
const client = new Client(db);
|
||||
await client.connect();
|
||||
await client.query(`DROP SCHEMA IF EXISTS ${db.schema} CASCADE`);
|
||||
await client.query(`CREATE SCHEMA IF NOT EXISTS ${db.schema}`);
|
||||
await client.end();
|
||||
}
|
||||
|
||||
async function validateTablesHavePrimaryKeys(db: IDBOption) {
|
||||
const client = new Client(db);
|
||||
await client.connect();
|
||||
@ -31,7 +23,6 @@ async function validateTablesHavePrimaryKeys(db: IDBOption) {
|
||||
AND tc.constraint_type = 'PRIMARY KEY'
|
||||
WHERE
|
||||
t.table_type = 'BASE TABLE'
|
||||
AND t.table_schema = '${schema}'
|
||||
AND t.table_schema NOT IN ('pg_catalog', 'information_schema')
|
||||
AND tc.constraint_name IS NULL;
|
||||
`,
|
||||
@ -45,34 +36,15 @@ async function validateTablesHavePrimaryKeys(db: IDBOption) {
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
test('Up & down migrations work', async () => {
|
||||
jest.setTimeout(15000);
|
||||
const config = createTestConfig({
|
||||
db: {
|
||||
...getDbConfig(),
|
||||
pool: { min: 1, max: 4 },
|
||||
schema: schema,
|
||||
ssl: false,
|
||||
},
|
||||
});
|
||||
|
||||
await initSchema(config.db);
|
||||
|
||||
const e2e = {
|
||||
...config.db,
|
||||
connectionTimeoutMillis: 2000,
|
||||
};
|
||||
|
||||
// disable Intellij/WebStorm from setting verbose CLI argument to db-migrator
|
||||
process.argv = process.argv.filter((it) => !it.includes('--verbose'));
|
||||
const dbm = getInstance(true, {
|
||||
cwd: `${__dirname}/../../`, // relative to src/test/e2e
|
||||
config: { e2e },
|
||||
env: 'e2e',
|
||||
});
|
||||
|
||||
await dbm.up();
|
||||
await validateTablesHavePrimaryKeys(config.db);
|
||||
await dbm.reset();
|
||||
let db: ITestDb;
|
||||
afterAll(async () => {
|
||||
await db.destroy();
|
||||
});
|
||||
test('Up & down migrations work', async () => {
|
||||
db = await dbInit('system_user_migration', getLogger);
|
||||
// up migration is performed at the beginning of tests
|
||||
// here we just validate that the tables have primary keys
|
||||
await validateTablesHavePrimaryKeys(db.config.db);
|
||||
// then we test down migrations
|
||||
await resetDb(db.config);
|
||||
});
|
||||
|
@ -8,7 +8,9 @@ let stores: IUnleashStores;
|
||||
let db: ITestDb;
|
||||
|
||||
beforeAll(async () => {
|
||||
db = await dbInit('api_token_store_serial', getLogger);
|
||||
db = await dbInit('api_token_store_serial', getLogger, {
|
||||
dbInitMethod: 'legacy' as const,
|
||||
});
|
||||
stores = db.stores;
|
||||
});
|
||||
|
||||
|
@ -1,70 +1,37 @@
|
||||
import { getDbConfig } from './helpers/database-config';
|
||||
import { createTestConfig } from '../config/test-config';
|
||||
import { getInstance } from 'db-migrate';
|
||||
import { log } from 'db-migrate-shared';
|
||||
import { Client } from 'pg';
|
||||
import type { IDBOption } from '../../lib/types';
|
||||
import getLogger from '../../test/fixtures/no-logger';
|
||||
import dbInit, { type ITestDb } from '../../test/e2e/helpers/database-init';
|
||||
import { migrateDb } from '../../migrator';
|
||||
|
||||
log.setLogLevel('error');
|
||||
|
||||
async function initSchema(db: IDBOption): Promise<void> {
|
||||
const client = new Client(db);
|
||||
await client.connect();
|
||||
await client.query(`DROP SCHEMA IF EXISTS ${db.schema} CASCADE`);
|
||||
await client.query(`CREATE SCHEMA IF NOT EXISTS ${db.schema}`);
|
||||
await client.end();
|
||||
}
|
||||
|
||||
let db: ITestDb;
|
||||
afterAll(async () => {
|
||||
await db.destroy();
|
||||
});
|
||||
test('System user creation migration correctly sets is_system', async () => {
|
||||
jest.setTimeout(15000);
|
||||
const config = createTestConfig({
|
||||
db: {
|
||||
...getDbConfig(),
|
||||
pool: { min: 1, max: 4 },
|
||||
schema: 'system_user_migration_test',
|
||||
ssl: false,
|
||||
},
|
||||
db = await dbInit('system_user_migration', getLogger, {
|
||||
stopMigrationAt: '20231221143955-feedback-table.js',
|
||||
dbInitMethod: 'legacy',
|
||||
});
|
||||
|
||||
await initSchema(config.db);
|
||||
|
||||
const e2e = {
|
||||
...config.db,
|
||||
connectionTimeoutMillis: 2000,
|
||||
};
|
||||
|
||||
// disable Intellij/WebStorm from setting verbose CLI argument to db-migrator
|
||||
process.argv = process.argv.filter((it) => !it.includes('--verbose'));
|
||||
const dbm = getInstance(true, {
|
||||
cwd: `${__dirname}/../../`, // relative to src/test/e2e
|
||||
config: { e2e },
|
||||
env: 'e2e',
|
||||
});
|
||||
|
||||
// Run all migrations up to, and including, this one, the last one before the system user migration
|
||||
await dbm.up('20231221143955-feedback-table.js');
|
||||
|
||||
// Set up the test data
|
||||
const client = new Client(config.db);
|
||||
await client.connect();
|
||||
|
||||
await client.query(`
|
||||
INSERT INTO "system_user_migration_test"."users"
|
||||
await db.rawDatabase.raw(`
|
||||
INSERT INTO "users"
|
||||
(name, username, email, created_by_user_id)
|
||||
VALUES
|
||||
('Test Person', 'testperson', 'testperson@getunleash.io', 1);
|
||||
`);
|
||||
|
||||
// Run the migration
|
||||
await dbm.up('20231222071533-unleash-system-user.js');
|
||||
await migrateDb(db.config, '20231222071533-unleash-system-user.js');
|
||||
|
||||
// Check the results
|
||||
const { rows: userResults } = await client.query(`
|
||||
SELECT * FROM "system_user_migration_test"."users" ORDER BY id;
|
||||
const { rows: userResults } = await db.rawDatabase.raw(`
|
||||
SELECT * FROM "users" ORDER BY id;
|
||||
`);
|
||||
|
||||
await client.end();
|
||||
await dbm.reset();
|
||||
console.log(userResults.map((r) => `${r.username} (${r.id})`));
|
||||
|
||||
expect(userResults.length).toEqual(2);
|
||||
expect(userResults[0].is_system).toEqual(true);
|
||||
|
Loading…
Reference in New Issue
Block a user