From efe8b97a50c2235f9dda5fbecabf31bfa710d7f6 Mon Sep 17 00:00:00 2001 From: Victor Korzunin Date: Sat, 21 Aug 2021 03:01:20 +0200 Subject: [PATCH] fix(env): load and parse database url in runtime --- .plop/index.ts.hbs | 50 +++++------ .plop/plopfile.js | 20 +++++ .plop/utils/find.ts.hbs | 112 ++++++++++++++++++++++++ .plop/utils/index.ts.hbs | 2 + .plop/utils/parseDatabaseUrl.ts.hbs | 51 +++++++++++ prisma/models/index.ts | 78 +++++++++++------ prisma/models/utils/find.ts | 112 ++++++++++++++++++++++++ prisma/models/utils/index.ts | 2 + prisma/models/utils/parseDatabaseUrl.ts | 51 +++++++++++ prisma/schema.prisma | 2 +- src/index.ts | 33 ++++++- 11 files changed, 458 insertions(+), 55 deletions(-) create mode 100644 .plop/utils/find.ts.hbs create mode 100644 .plop/utils/index.ts.hbs create mode 100644 .plop/utils/parseDatabaseUrl.ts.hbs create mode 100644 prisma/models/utils/find.ts create mode 100644 prisma/models/utils/index.ts create mode 100644 prisma/models/utils/parseDatabaseUrl.ts diff --git a/.plop/index.ts.hbs b/.plop/index.ts.hbs index c4cf92a..cd0a7a2 100644 --- a/.plop/index.ts.hbs +++ b/.plop/index.ts.hbs @@ -1,35 +1,33 @@ import { ModelCtor, Sequelize } from 'sequelize'; +import { tryLoadEnvs } from '@prisma/sdk'; +import path from 'path'; +import { findSync, parseDatabaseUrl } from './utils'; + +const dirname = findSync(process.cwd(), ['{{relativeOutputDir}}', '{{slsRelativeOutputDir}}'], ['d'], ['d'], 1)[0] || __dirname; -import config from '../config/config.json'; {{#each models}} import { {{name}}Factory } from './{{name}}'; {{/each}} -const env = process.env.NODE_ENV != 'prd' ? 'development' : 'production'; -const {username, password, database, host, port} = config[env]; +const config = {{{config}}}; + +const loadedEnv = tryLoadEnvs({ + rootEnvPath: config.relativeEnvPaths.rootEnvPath && path.resolve(dirname, config.relativeEnvPaths.rootEnvPath), + schemaEnvPath: config.relativeEnvPaths.schemaEnvPath && path.resolve(dirname, config.relativeEnvPaths.schemaEnvPath), +}); +const env = loadedEnv ? loadedEnv.parsed : {}; +const databaseUrl = config.datasource.url.fromEnvVar + ? env[config.datasource.url.fromEnvVar] + : config.datasource.url.value; +const { driver, user, password, host, port, database } = parseDatabaseUrl(databaseUrl); export const createInstance = async () => { - const sequelize = new Sequelize( - database, - username, - password, - { - host, - port, - ssl: true, - dialect: 'postgres', - dialectModule: pg, - pool: {}, - dialectOptions: { - connectTimeout: process.env.CONNECTION_TIMEOUT - }, - define: { - freezeTableName: true, - timestamps: true, - paranoid: true - } - }, - ); + const sequelize = new Sequelize(database, user, password, { + host, + port, + ssl: true, + dialect: driver, + }); const models = { {{#each models}} @@ -37,7 +35,7 @@ export const createInstance = async () => { {{/each}} }; - Object.keys(models).forEach(model => { + Object.keys(models).forEach((model) => { if (models[model].associate) { models[model].associate(models); } @@ -51,6 +49,6 @@ export const createInstance = async () => { return { sequelize, - models + models, }; }; diff --git a/.plop/plopfile.js b/.plop/plopfile.js index a31404c..d843f09 100644 --- a/.plop/plopfile.js +++ b/.plop/plopfile.js @@ -1,6 +1,26 @@ const path = require('path'); module.exports = function (plop) { + plop.setGenerator('utils', { + actions: () => [ + { + type: 'add', + path: 'utils/find.ts', + templateFile: path.join(__dirname, './utils/find.ts.hbs'), + }, + { + type: 'add', + path: 'utils/parseDatabaseUrl.ts', + templateFile: path.join(__dirname, './utils/parseDatabaseUrl.ts.hbs'), + }, + { + type: 'add', + path: 'utils/index.ts', + templateFile: path.join(__dirname, './utils/index.ts.hbs'), + }, + ], + }); + plop.setGenerator('index.ts', { actions: () => [ { diff --git a/.plop/utils/find.ts.hbs b/.plop/utils/find.ts.hbs new file mode 100644 index 0000000..4dde614 --- /dev/null +++ b/.plop/utils/find.ts.hbs @@ -0,0 +1,112 @@ +import fs from 'fs'; +import path from 'path'; + +type ItemType = 'd' | 'f' | 'l'; +type Handler = (base: string, item: string, type: ItemType) => boolean | string; + +/** + * Transform a dirent to a file type + * @param dirent + * @returns + */ +function direntToType(dirent: fs.Dirent | fs.Stats) { + return dirent.isFile() ? 'f' : dirent.isDirectory() ? 'd' : dirent.isSymbolicLink() ? 'l' : undefined; +} + +/** + * Is true if at least one matched + * @param string to match aigainst + * @param regexs to be matched with + * @returns + */ +function isMatched(string: string, regexs: (RegExp | string)[]) { + for (const regex of regexs) { + if (typeof regex === 'string') { + if (string.includes(regex)) { + return true; + } + } else if (regex.exec(string)) { + return true; + } + } + + return false; +} + +/** + * Find paths that match a set of regexes + * @param root to start from + * @param match to match against + * @param types to select files, folders, links + * @param deep to recurse in the directory tree + * @param limit to limit the results + * @param handler to further filter results + * @param found to add to already found + * @param seen to add to already seen + * @returns found paths (symlinks preserved) + */ +export function findSync( + root: string, + match: (RegExp | string)[], + types: ('f' | 'd' | 'l')[] = ['f', 'd', 'l'], + deep: ('d' | 'l')[] = [], + limit: number = Infinity, + handler: Handler = () => true, + found: string[] = [], + seen: Record = {} +) { + try { + const realRoot = fs.realpathSync(root); + + // we make sure not to loop infinitely + if (seen[realRoot]) { + return found; + } + + // we stop if we found enough results + if (limit - found.length <= 0) { + return found; + } + + // we check that the root is a directory + if (direntToType(fs.statSync(realRoot)) !== 'd') { + return found; + } + + // we list the items in the current root + const items = fs.readdirSync(root, { withFileTypes: true }); + + //seen[realRoot] = true + for (const item of items) { + // we get the file info for each item + const itemName = item.name; + const itemType = direntToType(item); + const itemPath = path.join(root, item.name); + + // if the item is one of the selected + if (itemType && types.includes(itemType)) { + // if the path of an item has matched + if (isMatched(itemPath, match)) { + const value = handler(root, itemName, itemType); + + // if we changed the path value + if (typeof value === 'string') { + found.push(value); + } + // if we kept the default path + else if (value === true) { + found.push(itemPath); + } + } + } + + if (deep.includes(itemType as any)) { + // dive within the directory tree + // we recurse and continue mutating `found` + findSync(itemPath, match, types, deep, limit, handler, found, seen); + } + } + } catch {} + + return found; +} diff --git a/.plop/utils/index.ts.hbs b/.plop/utils/index.ts.hbs new file mode 100644 index 0000000..c30c943 --- /dev/null +++ b/.plop/utils/index.ts.hbs @@ -0,0 +1,2 @@ +export * from './find'; +export * from './parseDatabaseUrl'; diff --git a/.plop/utils/parseDatabaseUrl.ts.hbs b/.plop/utils/parseDatabaseUrl.ts.hbs new file mode 100644 index 0000000..f6cf07b --- /dev/null +++ b/.plop/utils/parseDatabaseUrl.ts.hbs @@ -0,0 +1,51 @@ +import url from 'url'; +import querystring from 'querystring'; + +export function parseDatabaseUrl(databaseUrl: string) { + const parsedUrl = url.parse(databaseUrl, false, true); + + // Query parameters end up directly in the configuration. + const config = querystring.parse(parsedUrl.query); + + config.driver = (parsedUrl.protocol || 'sqlite3:') + // The protocol coming from url.parse() has a trailing : + .replace(/\:$/, ''); + + // Cloud Foundry will sometimes set a 'mysql2' scheme instead of 'mysql'. + if (config.driver == 'mysql2') config.driver = 'mysql'; + + // url.parse() produces an "auth" that looks like "user:password". No + // individual fields, unfortunately. + if (parsedUrl.auth) { + const userPassword = parsedUrl.auth.split(':', 2); + config.user = userPassword[0]; + if (userPassword.length > 1) { + config.password = userPassword[1]; + } + } + + if (config.driver === 'sqlite3') { + if (parsedUrl.hostname) { + if (parsedUrl.pathname) { + // Relative path. + config.filename = parsedUrl.hostname + parsedUrl.pathname; + } else { + // Just a filename. + config.filename = parsedUrl.hostname; + } + } else { + // Absolute path. + config.filename = parsedUrl.pathname; + } + } else { + // Some drivers (e.g., redis) don't have database names. + if (parsedUrl.pathname) { + config.database = parsedUrl.pathname.replace(/^\//, '').replace(/\/$/, ''); + } + + if (parsedUrl.hostname) config.host = parsedUrl.hostname; + if (parsedUrl.port) config.port = parsedUrl.port; + } + + return config; +} diff --git a/prisma/models/index.ts b/prisma/models/index.ts index f19a5cc..8769af8 100644 --- a/prisma/models/index.ts +++ b/prisma/models/index.ts @@ -1,41 +1,67 @@ import { ModelCtor, Sequelize } from 'sequelize'; +import { tryLoadEnvs } from '@prisma/sdk'; +import path from 'path'; +import { findSync, parseDatabaseUrl } from './utils'; + +const dirname = findSync(process.cwd(), ['prisma/models', 'models'], ['d'], ['d'], 1)[0] || __dirname; -import config from '../config/config.json'; import { UserFactory } from './User'; import { PostFactory } from './Post'; -const env = process.env.NODE_ENV != 'prd' ? 'development' : 'production'; -const {username, password, database, host, port} = config[env]; +const config = { + "generator": { + "name": "models", + "provider": { + "fromEnvVar": null, + "value": "node ./dist/generator.js" + }, + "output": { + "value": "/Users/victor/Projects/_own/prisma-sequelize-generator/prisma/models", + "fromEnvVar": "null" + }, + "config": {}, + "binaryTargets": [], + "previewFeatures": [] + }, + "relativeEnvPaths": { + "rootEnvPath": "../../.env", + "schemaEnvPath": "../../.env" + }, + "datasource": { + "name": "db", + "provider": "postgresql", + "activeProvider": "postgresql", + "url": { + "fromEnvVar": "DATABASE_URL", + "value": null + } + } +}; + +const loadedEnv = tryLoadEnvs({ + rootEnvPath: config.relativeEnvPaths.rootEnvPath && path.resolve(dirname, config.relativeEnvPaths.rootEnvPath), + schemaEnvPath: config.relativeEnvPaths.schemaEnvPath && path.resolve(dirname, config.relativeEnvPaths.schemaEnvPath), +}); +const env = loadedEnv ? loadedEnv.parsed : {}; +const databaseUrl = config.datasource.url.fromEnvVar + ? env[config.datasource.url.fromEnvVar] + : config.datasource.url.value; +const { driver, user, password, host, port, database } = parseDatabaseUrl(databaseUrl); export const createInstance = async () => { - const sequelize = new Sequelize( - database, - username, - password, - { - host, - port, - ssl: true, - dialect: 'postgres', - dialectModule: pg, - pool: {}, - dialectOptions: { - connectTimeout: process.env.CONNECTION_TIMEOUT - }, - define: { - freezeTableName: true, - timestamps: true, - paranoid: true - } - }, - ); + const sequelize = new Sequelize(database, user, password, { + host, + port, + ssl: true, + dialect: driver, + }); const models = { User: UserFactory(sequelize), Post: PostFactory(sequelize), }; - Object.keys(models).forEach(model => { + Object.keys(models).forEach((model) => { if (models[model].associate) { models[model].associate(models); } @@ -49,6 +75,6 @@ export const createInstance = async () => { return { sequelize, - models + models, }; }; diff --git a/prisma/models/utils/find.ts b/prisma/models/utils/find.ts new file mode 100644 index 0000000..4dde614 --- /dev/null +++ b/prisma/models/utils/find.ts @@ -0,0 +1,112 @@ +import fs from 'fs'; +import path from 'path'; + +type ItemType = 'd' | 'f' | 'l'; +type Handler = (base: string, item: string, type: ItemType) => boolean | string; + +/** + * Transform a dirent to a file type + * @param dirent + * @returns + */ +function direntToType(dirent: fs.Dirent | fs.Stats) { + return dirent.isFile() ? 'f' : dirent.isDirectory() ? 'd' : dirent.isSymbolicLink() ? 'l' : undefined; +} + +/** + * Is true if at least one matched + * @param string to match aigainst + * @param regexs to be matched with + * @returns + */ +function isMatched(string: string, regexs: (RegExp | string)[]) { + for (const regex of regexs) { + if (typeof regex === 'string') { + if (string.includes(regex)) { + return true; + } + } else if (regex.exec(string)) { + return true; + } + } + + return false; +} + +/** + * Find paths that match a set of regexes + * @param root to start from + * @param match to match against + * @param types to select files, folders, links + * @param deep to recurse in the directory tree + * @param limit to limit the results + * @param handler to further filter results + * @param found to add to already found + * @param seen to add to already seen + * @returns found paths (symlinks preserved) + */ +export function findSync( + root: string, + match: (RegExp | string)[], + types: ('f' | 'd' | 'l')[] = ['f', 'd', 'l'], + deep: ('d' | 'l')[] = [], + limit: number = Infinity, + handler: Handler = () => true, + found: string[] = [], + seen: Record = {} +) { + try { + const realRoot = fs.realpathSync(root); + + // we make sure not to loop infinitely + if (seen[realRoot]) { + return found; + } + + // we stop if we found enough results + if (limit - found.length <= 0) { + return found; + } + + // we check that the root is a directory + if (direntToType(fs.statSync(realRoot)) !== 'd') { + return found; + } + + // we list the items in the current root + const items = fs.readdirSync(root, { withFileTypes: true }); + + //seen[realRoot] = true + for (const item of items) { + // we get the file info for each item + const itemName = item.name; + const itemType = direntToType(item); + const itemPath = path.join(root, item.name); + + // if the item is one of the selected + if (itemType && types.includes(itemType)) { + // if the path of an item has matched + if (isMatched(itemPath, match)) { + const value = handler(root, itemName, itemType); + + // if we changed the path value + if (typeof value === 'string') { + found.push(value); + } + // if we kept the default path + else if (value === true) { + found.push(itemPath); + } + } + } + + if (deep.includes(itemType as any)) { + // dive within the directory tree + // we recurse and continue mutating `found` + findSync(itemPath, match, types, deep, limit, handler, found, seen); + } + } + } catch {} + + return found; +} diff --git a/prisma/models/utils/index.ts b/prisma/models/utils/index.ts new file mode 100644 index 0000000..c30c943 --- /dev/null +++ b/prisma/models/utils/index.ts @@ -0,0 +1,2 @@ +export * from './find'; +export * from './parseDatabaseUrl'; diff --git a/prisma/models/utils/parseDatabaseUrl.ts b/prisma/models/utils/parseDatabaseUrl.ts new file mode 100644 index 0000000..f6cf07b --- /dev/null +++ b/prisma/models/utils/parseDatabaseUrl.ts @@ -0,0 +1,51 @@ +import url from 'url'; +import querystring from 'querystring'; + +export function parseDatabaseUrl(databaseUrl: string) { + const parsedUrl = url.parse(databaseUrl, false, true); + + // Query parameters end up directly in the configuration. + const config = querystring.parse(parsedUrl.query); + + config.driver = (parsedUrl.protocol || 'sqlite3:') + // The protocol coming from url.parse() has a trailing : + .replace(/\:$/, ''); + + // Cloud Foundry will sometimes set a 'mysql2' scheme instead of 'mysql'. + if (config.driver == 'mysql2') config.driver = 'mysql'; + + // url.parse() produces an "auth" that looks like "user:password". No + // individual fields, unfortunately. + if (parsedUrl.auth) { + const userPassword = parsedUrl.auth.split(':', 2); + config.user = userPassword[0]; + if (userPassword.length > 1) { + config.password = userPassword[1]; + } + } + + if (config.driver === 'sqlite3') { + if (parsedUrl.hostname) { + if (parsedUrl.pathname) { + // Relative path. + config.filename = parsedUrl.hostname + parsedUrl.pathname; + } else { + // Just a filename. + config.filename = parsedUrl.hostname; + } + } else { + // Absolute path. + config.filename = parsedUrl.pathname; + } + } else { + // Some drivers (e.g., redis) don't have database names. + if (parsedUrl.pathname) { + config.database = parsedUrl.pathname.replace(/^\//, '').replace(/\/$/, ''); + } + + if (parsedUrl.hostname) config.host = parsedUrl.hostname; + if (parsedUrl.port) config.port = parsedUrl.port; + } + + return config; +} diff --git a/prisma/schema.prisma b/prisma/schema.prisma index d4b1ae0..6b01e8c 100644 --- a/prisma/schema.prisma +++ b/prisma/schema.prisma @@ -1,4 +1,4 @@ -generator client { +generator models { provider = "node ./dist/generator.js" } diff --git a/src/index.ts b/src/index.ts index 80c409d..51568e4 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,10 +1,13 @@ import { generatorHandler } from '@prisma/generator-helper'; -import { parseEnvValue } from '@prisma/sdk'; +import { parseEnvValue, getEnvPaths } from '@prisma/sdk'; import nodePlop from 'node-plop'; import * as path from 'path'; import { PrismaTypeToSequelizeType } from './mappers'; +// eslint-disable-next-line @typescript-eslint/no-var-requires +// const pkg = require('../package.json'); + generatorHandler({ onManifest() { return { @@ -25,11 +28,37 @@ generatorHandler({ try { const plop = nodePlop(path.join(__dirname, '../.plop/plopfile.js'), { destBasePath: outputDir, force: true }); + const utilsGenerator = plop.getGenerator('utils'); const indexGenerator = plop.getGenerator('index.ts'); const modelGenerator = plop.getGenerator('Model.ts'); + const schemaDir = options.schemaPath ? path.dirname(options.schemaPath) : process.cwd(); + const schemaPath = path.join(schemaDir, 'prisma.schema'); + const envPaths = getEnvPaths(schemaPath, { cwd: outputDir }); + + const config = { + generator: options.generator, + relativeEnvPaths: { + rootEnvPath: envPaths.rootEnvPath && path.relative(outputDir, envPaths.rootEnvPath), + schemaEnvPath: envPaths.schemaEnvPath && path.relative(outputDir, envPaths.schemaEnvPath), + }, + // relativePath: path.relative(outputDir, schemaDir), + // clientVersion: pkg.version, + // engineVersion: options.version, + // datasourceNames: options.datasources.map((d) => d.name), + datasource: options.datasources[0], + }; + const relativeOutputDir = path.relative(process.cwd(), outputDir); + const slsRelativeOutputDir = path.relative(process.cwd(), outputDir).split(path.sep).slice(1).join(path.sep); + await Promise.all([ - indexGenerator.runActions({ models: options.dmmf.datamodel.models }), + utilsGenerator.runActions({}), + indexGenerator.runActions({ + models: options.dmmf.datamodel.models, + config: JSON.stringify(config, null, 2), + relativeOutputDir, + slsRelativeOutputDir, + }), ...options.dmmf.datamodel.models.map((model) => modelGenerator.runActions({ model,