diff --git a/.eslintrc.base.js b/.eslintrc.base.js new file mode 100644 index 0000000..9684533 --- /dev/null +++ b/.eslintrc.base.js @@ -0,0 +1,435 @@ +"use strict"; +module.exports = (projectRoot, extraRules = {}) => ({ + root: true, // fix possible "Plugin %s was conflicted between %s.json and %s.json" errors + env: { + jest: true, + browser: true, + es6: true, + node: true, + }, + extends: [ + "eslint:recommended", + "plugin:react/recommended", + "plugin:@typescript-eslint/eslint-recommended", + "plugin:import/recommended", + ], + globals: { + Atomics: "readonly", + SharedArrayBuffer: "readonly", + }, + parser: "@typescript-eslint/parser", + parserOptions: { + ecmaFeatures: { + jsx: true, + }, + ecmaVersion: 2018, + sourceType: "module", + tsconfigRootDir: projectRoot, + project: "tsconfig.json", + warnOnUnsupportedTypeScriptVersion: false, + }, + plugins: [ + "@typescript-eslint", + "import", + "lodash", + "node", + "react-hooks", + "react", + "typescript-enum", + "typescript-sort-keys", + "unused-imports", + ], + settings: { + react: { + version: "detect", + }, + "import/parsers": { + "@typescript-eslint/parser": [".ts", ".tsx"], + }, + "import/resolver": { + typescript: { + project: projectRoot, + }, + }, + }, + ignorePatterns: [ + "node_modules", + "dist", + "webpack.config.ts", + "**/bin/**", + "*.d.ts", + "*.js", + ], + rules: { + // TODO: slowly enable no-extraneous-dependencies rule below. For now, it's + // enforced only for some packages. + // + // In an ideal world, the root package.json should have 0 dependencies, and + // all packages/* should define their own dependencies by themselves, + // independently and locally. The rule below is exactly for that: it ensures + // that all package's dependencies are explicitly mentioned in its + // package.json, and no dependencies are borrowed implicitly from the root + // node_modules. + // + // In real life though, enforcing packages independency is dangerous: we may + // e.g. start accidentally bundle 2 React or 2 Redux versions if we forget + // to sync their versions in different monorepo packages' package.json + // files. (There must be some other lint rule for this hopefully.) + // + // In all cases, we should treat node_modules folders content as something + // secondary and transient. (It's true even now with the new "yarn + // Plug-n-Play" technology which we don't use yet.) The source of truth is + // always package.json (enforced by lint) and yarn.lock (defines the exact + // contents of all node_modules folders, bit by bit). In this schema, it + // doesn't matter at all, does yarn use hoisting or not. + // + // "import/no-extraneous-dependencies": "error"; + + "node/prefer-global/process": "error", + "node/prefer-global/console": "error", + "node/prefer-global/buffer": "error", + "node/prefer-global/url-search-params": "error", + "node/prefer-global/url": "error", + + "require-atomic-updates": "off", + "no-prototype-builtins": "off", + "react/prop-types": "off", + "react/no-unescaped-entities": "off", + "react-hooks/rules-of-hooks": "error", + "react-hooks/exhaustive-deps": "warn", + "@typescript-eslint/no-misused-promises": "error", + "@typescript-eslint/promise-function-async": "error", + "arrow-body-style": ["error", "as-needed"], + "@typescript-eslint/await-thenable": "error", + "@typescript-eslint/no-floating-promises": ["error", { ignoreVoid: false }], + "@typescript-eslint/unbound-method": ["error", { ignoreStatic: true }], + "@typescript-eslint/return-await": ["error"], + "@typescript-eslint/array-type": ["error", { default: "array-simple" }], + "@typescript-eslint/ban-ts-comment": ["error"], + "@typescript-eslint/no-useless-constructor": ["error"], + "@typescript-eslint/prefer-optional-chain": ["error"], + "@typescript-eslint/consistent-type-imports": ["error"], + eqeqeq: ["error"], + "object-shorthand": ["error", "always"], + "@typescript-eslint/unbound-method": ["error"], + "@typescript-eslint/no-implicit-any-catch": [ + "error", + { allowExplicitAny: true }, + ], + "typescript-enum/no-const-enum": ["error"], // not supported in SWC + + "@typescript-eslint/naming-convention": [ + "error", + { + selector: "variable", + format: ["camelCase", "PascalCase", "UPPER_CASE"], + leadingUnderscore: "allow", + trailingUnderscore: "allow", + filter: { + regex: "^__webpack", + match: false, + }, + }, + ], + + // Disable in favour of @typescript-eslint/no-unused-vars. + "no-unused-vars": "off", + "@typescript-eslint/no-unused-vars": [ + "error", + { + args: "all", + argsIgnorePattern: "^_", + varsIgnorePattern: "^_", + ignoreRestSiblings: true, + }, + ], + "@typescript-eslint/member-ordering": [ + "error", + { + // + // ATTENTION: the rules here are not simple, mainly because of this: + // https://github.com/typescript-eslint/typescript-eslint/issues/6133 + // + // Besides that, we also want contradictory things, like: + // + // 1. Having constructor close to fields definition (because people + // often define fields in the constructor arguments), although it + // logically should've been below static methods. + // 2. Having all abstract things in the class grouped, irregardless on + // their public/protected/private modifiers. + // + default: [ + "signature", + "call-signature", + + // Typically, class constants (that's why they're on top). + "public-static-field", + "public-static-get", + "public-static-set", + "protected-static-field", + "protected-static-get", + "protected-static-set", + + // All concrete fields. What's interesting is that the order we + // emotionally want here for properties is private-protected-public, + // which is the opposite to the order of methods (which is + // public-protected-private). This is likely because the methods are + // bulky, and properties are lean. + "private-static-field", + "private-instance-field", + "public-instance-field", + "public-abstract-field", + "public-abstract-get", + "public-abstract-set", + + // Protected fields and methods are grouped, because eslint currently + // doesn't distinguish fields assigned with a lambda FROM methods, and + // we often times expose abstract protected overridable lambdas: + // https://github.com/typescript-eslint/typescript-eslint/issues/6133 + "protected-abstract-field", + "protected-abstract-get", + "protected-abstract-set", + "protected-abstract-method", + "public-abstract-method", // the only exception; it's to group all abstract things too + "protected-instance-field", + "protected-constructor", + "protected-static-method", + "protected-instance-get", + "protected-instance-set", + "protected-instance-method", + + // Public constructor, instance methods, static methods. + "public-constructor", // often defines more public/protected/private properties, so should be close to fields + "public-static-method", + "public-instance-get", + "public-instance-set", + "public-instance-method", + + // Private constructor, instance methods, static methods. + "private-constructor", + "private-static-method", + "private-instance-get", + "private-instance-set", + "private-instance-method", + "private-static-get", + "private-static-set", + ], + }, + ], + + "no-constant-condition": ["error", { checkLoops: false }], + "no-buffer-constructor": ["error"], + "no-console": ["error"], + curly: ["error", "all"], + "no-case-declarations": "off", + + "padding-line-between-statements": "off", + "@typescript-eslint/padding-line-between-statements": [ + "error", + // Force empty lines. + { + blankLine: "always", + prev: ["block", "block-like", "function", "class", "interface", "type"], + next: "*", + }, + { + blankLine: "always", + prev: "import", + next: [ + "const", + "if", + "let", + "var", + "export", + "function", + "class", + "interface", + "type", + ], + }, + { + blankLine: "always", + prev: "*", + next: ["function", "class", "interface", "type"], + }, + // Allow one-liner functions without extra spacing (hacky): + { blankLine: "any", prev: "singleline-const", next: "*" }, + { blankLine: "any", prev: "singleline-var", next: "*" }, + { blankLine: "any", prev: "singleline-let", next: "*" }, + ], + + "no-restricted-properties": [ + "error", + { + object: "window", + property: "location", + message: + "We use React Router and History to control the location of our web or desktop app. Prefer `useLocation` in React components and `historyFromContext` in Redux Saga.", + }, + ...(projectRoot.endsWith("client") + ? [ + { + object: "window", + property: "document", + message: "Please use `useDocument` from `useDocument.tsx`.", + }, + ...[ + "addEventListener", + "removeEventListener", + "getElementById", + "documentElement", + "activeElement", + "querySelectorAll", + ].map((property) => ({ + object: "document", + property, + message: "Please use `useDocument` from `useDocument.tsx`.", + })), + ] + : []), + ], + + "no-restricted-globals": [ + "warn", + { + name: "location", + message: + "We use React Router and History to control the location of our web or desktop app. Prefer `useLocation` in React components and `historyFromContext` in Redux Saga.", + }, + ], + + "no-restricted-syntax": [ + "error", + { + selector: (() => { + const RE_BAD = "/([a-z0-9_ ]|^)E[Ii][Dd]|(^|[-_: ])eid/"; + return [ + `Identifier[name=${RE_BAD}]`, + `Literal[value=${RE_BAD}]`, + `TemplateElement[value.raw=${RE_BAD}]`, + `TSInterfaceDeclaration[id.name=${RE_BAD}]`, + ].join(","); + })(), + message: + 'Do not use "eid" or "EID" as a part of a name/field/type. Instead, prefer externalID or external_id.', + }, + ], + + "prefer-const": [ + "error", + { + destructuring: "all", + }, + ], + + "no-var": "error", + "no-void": "error", + + "react/forbid-dom-props": [ + "error", + { + forbid: [ + { + propName: "style", + message: "Please use CSS Modules instead", + }, + ], + }, + ], + "react/forbid-component-props": [ + "error", + { + forbid: [ + { + propName: "style", + message: "Please use CSS Modules instead", + }, + ], + }, + ], + "no-sequences": ["error"], + // Too noisy about `react` and other node_modules + "import/default": 0, + // This complains about React.forwardRef, ReactDOM.render, etc. + "import/no-named-as-default-member": 0, + // This complains about "apollo" exporting ApolloClient as a default and as a + // named import at the same time. + "import/no-named-as-default": 0, + // Does not seem to work well with node_modules + "import/named": 0, + "import/newline-after-import": "error", + "import/order": [ + "error", + { + groups: ["builtin", "external", "index", "parent", "sibling"], + pathGroups: [ + { + pattern: "./**.module.css", + group: "sibling", + position: "after", + }, + { + pattern: "./**.module.scss", + group: "sibling", + position: "after", + }, + ], + alphabetize: { + order: "asc", + caseInsensitive: true, + }, + }, + ], + "unused-imports/no-unused-imports": "error", + "no-restricted-imports": [ + "error", + { + patterns: [ + { + group: ["react-router"], + message: + "Please use react-router-dom instead, since react-router's useLocation() doesn't work properly with StaticRouter on server side.", + }, + ], + }, + ], + // Fixes a common mistake: `a ?? b < c` which feels like `(a ?? b) < c`, but + // actually is `a ?? (b < c)` + "no-mixed-operators": [ + "error", + { + allowSamePrecedence: false, + groups: [ + ["??", "+"], + ["??", "-"], + ["??", "*"], + ["??", "/"], + ["??", "%"], + ["??", "**"], + ["??", "&"], + ["??", "|"], + ["??", "^"], + ["??", "~"], + ["??", "<<"], + ["??", ">>"], + ["??", ">>>"], + ["??", "=="], + ["??", "!="], + ["??", "==="], + ["??", "!=="], + ["??", ">"], + ["??", ">="], + ["??", "<"], + ["??", "<="], + ["??", "&&"], + ["??", "||"], + ["??", "in"], + ["??", "instanceof"], + ], + }, + ], + + quotes: ["error", "double", { avoidEscape: true }], + ...extraRules, + }, +}); diff --git a/.eslintrc.js b/.eslintrc.js index a175c7d..3544413 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -1,5 +1,9 @@ "use strict"; -module.exports = require("../../.eslintrc.base.js")(__dirname, { +module.exports = require("./.eslintrc.base.js")(__dirname, { "import/no-extraneous-dependencies": "error", - "lodash/import-scope": ["error", "method"], + "@typescript-eslint/explicit-function-return-type": [ + "error", + { allowExpressions: true, allowedNames: ["configure"] }, + ], + "lodash/import-scope": ["error", "method"] }); diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..8e9f154 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,26 @@ +name: "CI Full Run" +on: + pull_request: + branches: + - main + - grok/*/* + push: + branches: + - main +jobs: + test: + runs-on: ubuntu-latest + strategy: + matrix: + node-version: ["20.x"] + steps: + - uses: actions/checkout@v4 + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v3 + with: + node-version: ${{ matrix.node-version }} + - run: npm install -g pnpm --force + - run: pnpm install + - run: pnpm run build + - run: pnpm run lint + - run: pnpm run test diff --git a/.github/workflows/semgrep.yml b/.github/workflows/semgrep.yml new file mode 100644 index 0000000..eda07d1 --- /dev/null +++ b/.github/workflows/semgrep.yml @@ -0,0 +1,36 @@ +# Name of this GitHub Actions workflow. +name: Semgrep + +on: + # Scan changed files in PRs (diff-aware scanning): + pull_request: + branches: ['main'] + + # Schedule the CI job (this method uses cron syntax): + schedule: + - cron: '0 0 * * MON-FRI' + +jobs: + semgrep: + # User definable name of this GitHub Actions job. + name: Scan + # If you are self-hosting, change the following `runs-on` value: + runs-on: ubuntu-latest + + container: + # A Docker image with Semgrep installed. Do not change this. + image: returntocorp/semgrep@sha256:6c7ab81e4d1fd25a09f89f1bd52c984ce107c6ff33affef6ca3bc626a4cc479b + + # Skip any PR created by dependabot to avoid permission issues: + if: (github.actor != 'dependabot[bot]') + + steps: + # Fetch project source with GitHub Actions Checkout. + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 + # Run the "semgrep ci" command on the command line of the docker image. + - run: semgrep ci + env: + # Connect to Semgrep Cloud Platform through your SEMGREP_APP_TOKEN. + # Generate a token from Semgrep Cloud Platform > Settings + # and add it to your GitHub secrets. + SEMGREP_APP_TOKEN: ${{ secrets.SEMGREP_APP_TOKEN }} diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..6314675 --- /dev/null +++ b/.gitignore @@ -0,0 +1,11 @@ +dist + +# Common in both .gitignore and .npmignore +node_modules +package-lock.json +yarn.lock +pnpm-lock.yaml +.DS_Store +*.log +*.tmp +*.swp diff --git a/.npmignore b/.npmignore new file mode 100644 index 0000000..7ec72e7 --- /dev/null +++ b/.npmignore @@ -0,0 +1,14 @@ +dist/__tests__ +dist/**/__tests__ +dist/tsconfig.tsbuildinfo +.npmrc + +# Common in both .gitignore and .npmignore +node_modules +package-lock.json +yarn.lock +pnpm-lock.yaml +.DS_Store +*.log +*.tmp +*.swp diff --git a/.npmrc b/.npmrc new file mode 100644 index 0000000..c52ad5f --- /dev/null +++ b/.npmrc @@ -0,0 +1 @@ +# Published to https://www.npmjs.com diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 0000000..69c81d9 --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,8 @@ +{ + "recommendations": [ + "dbaeumer.vscode-eslint", + "esbenp.prettier-vscode", + "mhutchie.git-graph", + "trentrand.git-rebase-shortcuts" + ] +} diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 0000000..7076e3d --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,20 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "label": "git grok: push local commits as individual PRs", + "detail": "Install git-grok first: https://github.com/dimikot/git-grok", + "type": "shell", + "command": "git grok", + "problemMatcher": [], + "hide": false + }, + { + "label": "git rebase --interactive", + "detail": "Opens a UI for interactive rebase (install \"Git rebase shortcuts\" extension).", + "type": "shell", + "command": "GIT_EDITOR=\"code --wait\" git rebase -i", + "problemMatcher": [] + } + ] +} diff --git a/README.md b/README.md new file mode 100644 index 0000000..ddde1ac --- /dev/null +++ b/README.md @@ -0,0 +1,126 @@ +# @clickup/pg-mig: PostgreSQL schema migration tool with micro-sharding and clustering support + +See also [Full API documentation](https://github.com/clickup/pg-mig/blob/master/docs/modules.md). + +![CI run](https://github.com/clickup/pg-mig/actions/workflows/ci.yml/badge.svg?branch=main) + +The tool allows to create a PostgreSQL database schema (with tables, indexes, +sequences, functions etc.) and apply it consistently across multiple PG hosts +(even more, across multiple micro-shard schemas on multiple hosts). The behavior +is transactional per each micro-shard per version ("all or nothing"). + +In other words, **pg-mig** helps to keep your database clusters' schemas identical +(each micro-shard schema will have exactly the same DDL structure as any other +schema on all other PG hosts). + +# Usage + +``` +pg-mig + [--migdir=path/to/my-migrations/directory] + [--hosts=master1,master2,...] + [--port=5432] + [--user=user-which-can-apply-ddl] + [--pass=password] + [--db=my-database-name] + [--undo=20191107201239.my-migration-name.sh] + [--make=my-migration-name@sh] + [--parallelism=8] + [--dry] + [--list] + [--ci] +``` + +All of the arguments are optional: the tool tries to use `PGHOST`, `PGPORT`, +`PGUSER`, `PGPASSWORD`, `PGDATABASE` environment variables which are standard +for e.g. `psql`. + +It also uses `PGMIGDIR` environment variable as a default value for `--migdir` +option. + +When running in default mode, **pg-mig** tool reads (in order) the migration +versions `*.up.sql` files from the migration directory and applies them all of +the hostnames passed (of course, checking whether it has already been applied +before or not). See below for more details. + +## Migration Version File Format + +The migration version file name has the following format, examples: + +``` +20191107201239.add-table-abc.sh0000.up.sql +20191107201239.add-table-abc.sh0000.dn.sql +20231317204837.some-other-name.sh.up.sql +20231317204837.some-other-name.sh.dn.sql +20231203493744.anything-works.public.up.sql +20231203493744.anything-works.public.dn.sql +``` + +Here, + +- the 1st part is a UTC timestamp when the migration version file was created, +- the 2nd part is a descriptive name of the migration (can be arbitrary), +- the 3rd part is the "PostgreSQL schema name prefix" (micro-shard name prefix) +- the 4th part is either "up" ("up" migration) or "dn" ("down" migration). + Up-migrations roll the database schema version forward, and down-migrations + allow to undo the changes. + +It is the responsibility of the user to create up- and down-migration SQL files. +Basically, the user provides DDL SQL queries on how to roll the database schema +forward and how to roll it backward. + +You can use any `psql`-specific instructions in `*.sql` files: they are fed to +`psql` tool directly. E.g. you can use environment variables, `\echo`, `\ir` for +inclusion etc. See [psql +documentation](https://www.postgresql.org/docs/current/app-psql.html) for +details. + +## Applying the Migrations + +Each migration version will be applied (in order) to all PG schemas (aka +micro-shards) on all hosts whose names start from the provided prefix (if +multiple migration files match some schema, then only the file with the longest +prefix will be used; in the above example, prefix "sh" effectively works as "sh* +except sh0000" wildcard). + +The main idea is that, if the migration file application succeeds, then it will +be remembered on the corresponding PG host, in the corresponding schema +(micro-shard) itself. So next time when you run the tool, it will understand +that the migration version has already been applied, and won't try to apply it +again. + +When the tool runs, it prints a live-updating progress, which migration version +file is in progress on which PG host in which schema (micro-shard). In the end, +it prints the final versions map across all of the hosts and schemas. + +## Undoing the Migrations + +If `--undo` argument is used, then the tool will try to run the down-migration +for the the corresponding version everywhere. If it succeeds, then it will +remember that fact on the corresponding PG host in the corresponding schema. +Only the very latest migration version applied can be undone. + +Undoing migrations in production is not recommended (since the code which uses +the database may rely on its new structure), although you can use it of course. +The main use case for undoing the migrations is while development: you may want +to test your DDL statements multiple times, or you may pull from Git and get +someone else's migration before yours, so you'll need to undo your migration and +recreate its files. + +## Creating the New Migration Files + +If `--make` argument is used, **pg-mig** creates a new pair of empty files in the +migration directory. E.g. if you run: + +``` +pg-mig --migdir=my-dir --make=my-migration-name@sh +``` + +then it will create a pair of files which looks like +`my-dir/20231203493744.my-migration-name.sh.up.sql` and +`my-dir/20231203493744.my-migration-name.sh.dn.sql` which you can edit further. + +New migration version files can only be appended in the end. If **pg-mig** detects +that you try to apply migrations which conflict with the existing migration +versions remembered in the database, it will print the error and refuse to +continue. This is similar to "fast-forward" mode in Git. diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000..ea94832 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,39 @@ +# Security + +Keeping our clients' data secure is an absolute top priority at ClickUp. Our goal is to provide a secure environment, while also being mindful of application performance and the overall user experience. + +ClickUp believes effective disclosure of security vulnerabilities requires mutual trust, respect, transparency and common good between ClickUp and Security Researchers. Together, our vigilant expertise promotes the continued security and privacy of ClickUp customers, products, and services. + +If you think you've found a security vulnerability in any ClickUp-owned repository, please let us know as outlined below. + +ClickUp defines a security vulnerability as an unintended weakness or exposure that could be used to compromise the integrity, availability or confidentiality of our products and services. + +## Our Commitment to Reporters + +- **Trust**. We maintain trust and confidentiality in our professional exchanges with security researchers. +- **Respect**. We treat all researchers with respect and recognize your contribution for keeping our customers safe and secure. +- **Transparency**. We will work with you to validate and remediate reported vulnerabilities in accordance with our commitment to security and privacy. +- **Common Good**. We investigate and remediate issues in a manner consistent with protecting the safety and security of those potentially affected by a reported vulnerability. + +## What We Ask of Reporters + +- **Trust**. We request that you communicate about potential vulnerabilities in a responsible manner, providing sufficient time and information for our team to validate and address potential issues. +- **Respect**. We request that researchers make every effort to avoid privacy violations, degradation of user experience, disruption to production systems, and destruction of data during security testing. +- **Transparency**. We request that researchers provide the technical details and background necessary for our team to identify and validate reported issues, using the form below. +- **Common Good**. We request that researchers act for the common good, protecting user privacy and security by refraining from publicly disclosing unverified vulnerabilities until our team has had time to validate and address reported issues. + +## Vulnerability Reporting + +ClickUp recommends that you share the details of any suspected vulnerabilities across any asset owned, controlled, or operated by ClickUp (or that would reasonably impact the security of ClickUp and our users) using our vulnerability disclosure form at . The ClickUp Security team will acknowledge receipt of each valid vulnerability report, conduct a thorough investigation, and then take appropriate action for resolution. + +## Safe Harbor + +When conducting vulnerability research according to this policy, we consider this research to be: + +- Authorized in accordance with the Computer Fraud and Abuse Act (CFAA) (and/or similar state laws), and we will not initiate or support legal action against you for accidental, good faith violations of this policy; +- Exempt from the Digital Millennium Copyright Act (DMCA), and we will not bring a claim against you for circumvention of technology controls; +- Exempt from restrictions in our Terms & Conditions that would interfere with conducting security research, and we waive those restrictions on a limited basis for work done under this policy; and +- Lawful, helpful to the overall security of the Internet, and conducted in good faith. +- You are expected, as always, to comply with all applicable laws. + +If at any time you have concerns or are uncertain whether your security research is consistent with this policy, please inquire via before going any further. diff --git a/docs/.nojekyll b/docs/.nojekyll new file mode 100644 index 0000000..e2ac661 --- /dev/null +++ b/docs/.nojekyll @@ -0,0 +1 @@ +TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false. \ No newline at end of file diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 0000000..014d2dd --- /dev/null +++ b/docs/README.md @@ -0,0 +1,128 @@ +@clickup/pg-mig / [Exports](modules.md) + +# @clickup/pg-mig: PostgreSQL schema migration tool with micro-sharding and clustering support + +See also [Full API documentation](https://github.com/clickup/pg-mig/blob/master/docs/modules.md). + +![CI run](https://github.com/clickup/pg-mig/actions/workflows/ci.yml/badge.svg?branch=main) + +The tool allows to create a PostgreSQL database schema (with tables, indexes, +sequences, functions etc.) and apply it consistently across multiple PG hosts +(even more, across multiple micro-shard schemas on multiple hosts). The behavior +is transactional per each micro-shard per version ("all or nothing"). + +In other words, **pg-mig** helps to keep your database clusters' schemas identical +(each micro-shard schema will have exactly the same DDL structure as any other +schema on all other PG hosts). + +# Usage + +``` +pg-mig + [--migdir=path/to/my-migrations/directory] + [--hosts=master1,master2,...] + [--port=5432] + [--user=user-which-can-apply-ddl] + [--pass=password] + [--db=my-database-name] + [--undo=20191107201239.my-migration-name.sh] + [--make=my-migration-name@sh] + [--parallelism=8] + [--dry] + [--list] + [--ci] +``` + +All of the arguments are optional: the tool tries to use `PGHOST`, `PGPORT`, +`PGUSER`, `PGPASSWORD`, `PGDATABASE` environment variables which are standard +for e.g. `psql`. + +It also uses `PGMIGDIR` environment variable as a default value for `--migdir` +option. + +When running in default mode, **pg-mig** tool reads (in order) the migration +versions `*.up.sql` files from the migration directory and applies them all of +the hostnames passed (of course, checking whether it has already been applied +before or not). See below for more details. + +## Migration Version File Format + +The migration version file name has the following format, examples: + +``` +20191107201239.add-table-abc.sh0000.up.sql +20191107201239.add-table-abc.sh0000.dn.sql +20231317204837.some-other-name.sh.up.sql +20231317204837.some-other-name.sh.dn.sql +20231203493744.anything-works.public.up.sql +20231203493744.anything-works.public.dn.sql +``` + +Here, + +- the 1st part is a UTC timestamp when the migration version file was created, +- the 2nd part is a descriptive name of the migration (can be arbitrary), +- the 3rd part is the "PostgreSQL schema name prefix" (micro-shard name prefix) +- the 4th part is either "up" ("up" migration) or "dn" ("down" migration). + Up-migrations roll the database schema version forward, and down-migrations + allow to undo the changes. + +It is the responsibility of the user to create up- and down-migration SQL files. +Basically, the user provides DDL SQL queries on how to roll the database schema +forward and how to roll it backward. + +You can use any `psql`-specific instructions in `*.sql` files: they are fed to +`psql` tool directly. E.g. you can use environment variables, `\echo`, `\ir` for +inclusion etc. See [psql +documentation](https://www.postgresql.org/docs/current/app-psql.html) for +details. + +## Applying the Migrations + +Each migration version will be applied (in order) to all PG schemas (aka +micro-shards) on all hosts whose names start from the provided prefix (if +multiple migration files match some schema, then only the file with the longest +prefix will be used; in the above example, prefix "sh" effectively works as "sh* +except sh0000" wildcard). + +The main idea is that, if the migration file application succeeds, then it will +be remembered on the corresponding PG host, in the corresponding schema +(micro-shard) itself. So next time when you run the tool, it will understand +that the migration version has already been applied, and won't try to apply it +again. + +When the tool runs, it prints a live-updating progress, which migration version +file is in progress on which PG host in which schema (micro-shard). In the end, +it prints the final versions map across all of the hosts and schemas. + +## Undoing the Migrations + +If `--undo` argument is used, then the tool will try to run the down-migration +for the the corresponding version everywhere. If it succeeds, then it will +remember that fact on the corresponding PG host in the corresponding schema. +Only the very latest migration version applied can be undone. + +Undoing migrations in production is not recommended (since the code which uses +the database may rely on its new structure), although you can use it of course. +The main use case for undoing the migrations is while development: you may want +to test your DDL statements multiple times, or you may pull from Git and get +someone else's migration before yours, so you'll need to undo your migration and +recreate its files. + +## Creating the New Migration Files + +If `--make` argument is used, **pg-mig** creates a new pair of empty files in the +migration directory. E.g. if you run: + +``` +pg-mig --migdir=my-dir --make=my-migration-name@sh +``` + +then it will create a pair of files which looks like +`my-dir/20231203493744.my-migration-name.sh.up.sql` and +`my-dir/20231203493744.my-migration-name.sh.dn.sql` which you can edit further. + +New migration version files can only be appended in the end. If **pg-mig** detects +that you try to apply migrations which conflict with the existing migration +versions remembered in the database, it will print the error and refuse to +continue. This is similar to "fast-forward" mode in Git. diff --git a/docs/modules.md b/docs/modules.md new file mode 100644 index 0000000..b8d2051 --- /dev/null +++ b/docs/modules.md @@ -0,0 +1,69 @@ +[@clickup/pg-mig](README.md) / Exports + +# @clickup/pg-mig + +## Functions + +### main + +▸ **main**(): `Promise`\<`boolean`\> + +CLI tool entry point. This function is run when `pg-mig` is called from the +command line. Accepts parameters from process.argv. See `migrate()` for +option names. + +If no options are passed, uses `PGHOST`, `PGPORT`, `PGUSER`, `PGPASSWORD`, +`PGDATABASE` environment variables which are standard for e.g. `psql`. + +You can pass multiple hosts separated by comma or semicolon. + +Examples: +``` +pg-mig --make=my-migration-name@sh +pg-mig --make=other-migration-name@sh0000 +pg-mig --undo 20191107201239.my-migration-name.sh +pg-mig +``` + +#### Returns + +`Promise`\<`boolean`\> + +#### Defined in + +[src/cli.ts:39](https://github.com/clickup/pg-mig/blob/master/src/cli.ts#L39) + +___ + +### migrate + +▸ **migrate**(`options`): `Promise`\<`boolean`\> + +Similar to main(), but accepts options explicitly, not from process.argv. +This function is meant to be called from other tools. + +#### Parameters + +| Name | Type | Description | +| :------ | :------ | :------ | +| `options` | `Object` | - | +| `options.migDir` | `string` | The directory the migration versions are loaded from. | +| `options.hosts` | `string`[] | List of PostgreSQL master hostnames. The migration versions in `migDir` will be applied to all of them. | +| `options.port` | `number` | PostgreSQL port on each hosts. | +| `options.user` | `string` | PostgreSQL user on each host. | +| `options.pass` | `string` | PostgreSQL password on each host. | +| `options.db` | `string` | PostgreSQL database name on each host. | +| `options.parallelism?` | `number` | How many schemas to process in parallel (defaults to 10). | +| `options.undo?` | `string` | If passed, switches the action to undo the provided migration version. | +| `options.make?` | `string` | If passed, switches the action to create a new migration version. | +| `options.dry?` | `boolean` | If true, prints what it plans to do, but doesn't change anything. | +| `options.list?` | `boolean` | Lists all versions in `migDir`. | +| `options.ci?` | `boolean` | If true, then doesn't use logUpdate() and doesn't replace lines; instead, prints logs to stdout line by line. | + +#### Returns + +`Promise`\<`boolean`\> + +#### Defined in + +[src/cli.ts:79](https://github.com/clickup/pg-mig/blob/master/src/cli.ts#L79) diff --git a/internal/clean.sh b/internal/clean.sh new file mode 100644 index 0000000..f82a4a1 --- /dev/null +++ b/internal/clean.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -e + +rm -rf dist yarn.lock package-lock.json pnpm-lock.yaml *.log diff --git a/internal/deploy.sh b/internal/deploy.sh new file mode 100644 index 0000000..7b12dc6 --- /dev/null +++ b/internal/deploy.sh @@ -0,0 +1,7 @@ +#!/bin/bash +set -e + +npm run build +npm run lint +npm run test +npm publish --access=public diff --git a/internal/docs.sh b/internal/docs.sh new file mode 100644 index 0000000..ea628f6 --- /dev/null +++ b/internal/docs.sh @@ -0,0 +1,6 @@ +#!/bin/bash +set -e + +rm -rf docs +typedoc --plugin typedoc-plugin-markdown --plugin typedoc-plugin-merge-modules +sed -i '' -E 's#packages/[^/]+/##g' $(find docs -type f -name '*.md') diff --git a/internal/lint.sh b/internal/lint.sh new file mode 100644 index 0000000..8815fd6 --- /dev/null +++ b/internal/lint.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -e + +eslint . --ext .ts --cache --cache-location dist/.eslintcache diff --git a/jest.config.js b/jest.config.js index 7ba2d4c..c0ea465 100644 --- a/jest.config.js +++ b/jest.config.js @@ -1,5 +1,13 @@ "use strict"; - module.exports = { - ...require("../../jest.config.base")(), + roots: ["/src"], + testMatch: ["**/*.test.ts"], + clearMocks: true, + restoreMocks: true, + ...(process.env.IN_JEST_PROJECT + ? {} + : { forceExit: true, testTimeout: 30000, forceExit: true }), + transform: { + "\\.ts$": "ts-jest", + }, }; diff --git a/package.json b/package.json index c1e273d..bccc46c 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { - "name": "pg-mig", - "description": "Postgres database migration tool with multi-schema (sharding) support", - "version": "2.10.291", + "name": "@clickup/pg-mig", + "description": "PostgreSQL schema migration tool with micro-sharding and clustering support", + "version": "2.10.296", "license": "MIT", "main": "dist/cli.js", "types": "dist/cli.d.ts", @@ -9,10 +9,15 @@ "pg-mig": "./dist/cli.js" }, "scripts": { - "build": "tsc.sh", - "dev": "tight-loop.sh tsc.sh --watch", - "lint": "lint.sh", - "test": "test.sh" + "build": "tsc", + "dev": "tsc --watch --preserveWatchOutput", + "lint": "bash internal/lint.sh", + "test": "jest", + "docs": "bash internal/docs.sh", + "clean": "bash internal/clean.sh", + "copy-package-to-public-dir": "copy-package-to-public-dir.sh", + "backport-package-from-public-dir": "backport-package-from-public-dir.sh", + "deploy": "bash internal/deploy.sh" }, "dependencies": { "await-semaphore": "^0.1.3", @@ -26,13 +31,36 @@ "table-layout": "^1.0.2" }, "devDependencies": { - "@types/shell-quote": "^1.7.1" + "@types/jest": "^29.5.5", + "@types/lodash": "^4.14.175", + "@types/minimist": "^1.2.2", + "@types/node": "^20.4.1", + "@types/pg": "^8.6.1", + "@types/prompts": "^2.4.0", + "@types/shell-quote": "^1.7.1", + "@types/sprintf-js": "^1.1.2", + "@typescript-eslint/eslint-plugin": "^5.59.6", + "@typescript-eslint/parser": "^5.59.6", + "eslint-import-resolver-typescript": "^3.5.5", + "eslint-plugin-import": "^2.27.5", + "eslint-plugin-lodash": "^7.4.0", + "eslint-plugin-node": "^11.1.0", + "eslint-plugin-react-hooks": "^4.6.0", + "eslint-plugin-react": "^7.32.2", + "eslint-plugin-typescript-enum": "^2.1.0", + "eslint-plugin-typescript-sort-keys": "^2.3.0", + "eslint-plugin-unused-imports": "^2.0.0", + "eslint": "^8.40.0", + "jest": "^29.7.0", + "prettier": "3.2.1", + "ts-jest": "^29.1.1", + "typedoc-plugin-markdown": "^3.16.0", + "typedoc-plugin-merge-modules": "^5.1.0", + "typedoc": "^0.25.2", + "typescript": "^5.2.2" }, "repository": { "type": "git", - "url": "git://github.com/time-loop/github-packages" - }, - "publishConfig": { - "registry": "https://npm.pkg.github.com/" + "url": "git://github.com/clickup/pg-mig" } } diff --git a/src/cli.ts b/src/cli.ts index 3c81767..54c159f 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -2,11 +2,13 @@ import { basename } from "path"; import sortBy from "lodash/sortBy"; import throttle from "lodash/throttle"; import logUpdate from "log-update"; -import { Dest } from "./Dest"; -import { Grid } from "./Grid"; -import type { Chain } from "./Patch"; -import { Patch } from "./Patch"; -import { Registry } from "./Registry"; +import { Dest } from "./internal/Dest"; +import { Grid } from "./internal/Grid"; +import { Args } from "./internal/helpers/Args"; +import { makeMigration } from "./internal/helpers/makeMigration"; +import type { Chain } from "./internal/Patch"; +import { Patch } from "./internal/Patch"; +import { Registry } from "./internal/Registry"; import { printError, printSuccess, @@ -14,65 +16,94 @@ import { renderGrid, renderLatestVersions, renderPatchSummary, -} from "./render"; -import { Args } from "./utils/Args"; -import { makeMigration } from "./utils/makeMigration"; - -// Examples: -// yarn db:migrate --make=space_members_add_email@sh0000 -// yarn db:migrate --undo 20191107201239.space_members.sh0000 -// yarn db:migrate --undo 20191107201238.space_users_remove.sh - -export async function main() { +} from "./internal/render"; + +/** + * CLI tool entry point. This function is run when `pg-mig` is called from the + * command line. Accepts parameters from process.argv. See `migrate()` for + * option names. + * + * If no options are passed, uses `PGHOST`, `PGPORT`, `PGUSER`, `PGPASSWORD`, + * `PGDATABASE` environment variables which are standard for e.g. `psql`. + * + * You can pass multiple hosts separated by comma or semicolon. + * + * Examples: + * ``` + * pg-mig --make=my-migration-name@sh + * pg-mig --make=other-migration-name@sh0000 + * pg-mig --undo 20191107201239.my-migration-name.sh + * pg-mig + * ``` + */ +export async function main(): Promise { const args = new Args( process.argv, // Notice that we use --migdir and not --dir, because @mapbox/node-pre-gyp // used by bcrypt conflicts with --dir option. [ + "migdir", "hosts", "port", "user", "pass", "db", - "migdir", - "parallelism", "undo", "make", + "parallelism", ], ["dry", "ci", "list"], ); return migrate({ + migDir: args.get("migdir", process.env["PGMIGDIR"]), hosts: args - .get("hosts", process.env.PGHOST || "localhost") + .get("hosts", process.env["PGHOST"] || "127.0.0.1") .split(/[\s,;]+/), - port: parseInt(args.get("port", process.env.PGPORT || "5432")), - user: args.get("user", process.env.PGUSER || ""), - pass: args.get("pass", process.env.PGPASSWORD || ""), - db: args.get("db", process.env.PGDATABASE), - undo: args.get("undo", "empty"), + port: parseInt(args.get("port", process.env["PGPORT"] || "5432")), + user: args.get("user", process.env["PGUSER"] || ""), + pass: args.get("pass", process.env["PGPASSWORD"] || ""), + db: args.get("db", process.env["PGDATABASE"]), + undo: args.getOptional("undo"), + make: args.getOptional("make"), + parallelism: parseInt(args.get("parallelism", "0")) || undefined, dry: args.flag("dry"), list: args.flag("list"), - make: args.get("make", ""), - migDir: args.get("migdir"), - parallelism: parseInt(args.get("parallelism", "0")) || 10, ci: args.flag("ci"), }); } +/** + * Similar to main(), but accepts options explicitly, not from process.argv. + * This function is meant to be called from other tools. + */ export async function migrate(options: { + /** The directory the migration versions are loaded from. */ + migDir: string; + /** List of PostgreSQL master hostnames. The migration versions in `migDir` + * will be applied to all of them. */ hosts: string[]; + /** PostgreSQL port on each hosts. */ port: number; + /** PostgreSQL user on each host. */ user: string; + /** PostgreSQL password on each host. */ pass: string; + /** PostgreSQL database name on each host. */ db: string; - undo: string; - dry: boolean; - list: boolean; - make: string; - migDir: string; - parallelism: number; - ci: boolean; -}) { + /** How many schemas to process in parallel (defaults to 10). */ + parallelism?: number; + /** If passed, switches the action to undo the provided migration version. */ + undo?: string; + /** If passed, switches the action to create a new migration version. */ + make?: string; + /** If true, prints what it plans to do, but doesn't change anything. */ + dry?: boolean; + /** Lists all versions in `migDir`. */ + list?: boolean; + /** If true, then doesn't use logUpdate() and doesn't replace lines; instead, + * prints logs to stdout line by line. */ + ci?: boolean; +}): Promise { const hostDests = options.hosts.map( (host) => new Dest( @@ -88,11 +119,11 @@ export async function migrate(options: { printText(`Running on ${options.hosts}:${options.port} ${options.db}`); - if (options.make) { + if (options.make !== undefined) { // example: create_table_x@sh const [migrationName, schemaPrefix] = options.make.split("@"); - const usage = "Format: --make=migration_name@schema_prefix"; + if (!migrationName?.match(/^[a-z0-9_]+$/)) { printError("migration_name is missing or incorrect"); printText(usage); @@ -143,9 +174,7 @@ export async function migrate(options: { return false; } - const patch = new Patch(hostDests, registry, { - undo: options.undo !== "empty" ? options.undo : undefined, - }); + const patch = new Patch(hostDests, registry, { undo: options.undo }); const chains = await patch.getChains(); const [summary, hasWork] = renderPatchSummary(chains); @@ -183,7 +212,12 @@ export async function migrate(options: { ], })) : []; - const grid = new Grid(chains, options.parallelism, beforeChains, afterChains); + const grid = new Grid( + chains, + options.parallelism ?? 10, + beforeChains, + afterChains, + ); const success = await grid.run( throttle(() => { diff --git a/src/Dest.ts b/src/internal/Dest.ts similarity index 94% rename from src/Dest.ts rename to src/internal/Dest.ts index 0bf33df..c6cb8c1 100644 --- a/src/Dest.ts +++ b/src/internal/Dest.ts @@ -20,7 +20,7 @@ export class Dest { /** * Returns a Dest switched to a different schema. */ - createSchemaDest(schema: string) { + createSchemaDest(schema: string): Dest { return new Dest( this.host, this.port, @@ -34,7 +34,7 @@ export class Dest { /** * Returns a human-readable representation of the dest. */ - toString() { + toString(): string { return this.host + ":" + this.schema; } @@ -46,7 +46,7 @@ export class Dest { file: string, newVersions: string[] | null, onOut: (proc: Psql) => void = () => {}, - ) { + ): Promise { const psql = new Psql( this, dirname(file), @@ -97,7 +97,7 @@ export class Dest { /** * Returns all the shard-like schemas from the DB. */ - async loadSchemas() { + async loadSchemas(): Promise { return this.queryCol( "SELECT nspname FROM pg_namespace WHERE nspname NOT LIKE '%\\_%'", ); @@ -107,9 +107,11 @@ export class Dest { * Given a list of schemas, extracts versions for each schema * (which is a list of migration names). */ - async loadVersionsBySchema(schemas: string[]) { + async loadVersionsBySchema( + schemas: string[], + ): Promise> { if (!schemas.length) { - return new Map(); + return new Map(); } const inClause = schemas.map((v) => this.escape(v)).join(", "); @@ -139,7 +141,7 @@ export class Dest { /** * SQL value quoting. */ - private escape(v: string) { + private escape(v: string): string { return "'" + ("" + v).replace(/'/g, "''") + "'"; } diff --git a/src/Grid.ts b/src/internal/Grid.ts similarity index 98% rename from src/Grid.ts rename to src/internal/Grid.ts index 11c8d19..c4ceda4 100644 --- a/src/Grid.ts +++ b/src/internal/Grid.ts @@ -213,7 +213,10 @@ class Worker { } } - private async acquireSemaphore(maxWorkers: number, key: string) { + private async acquireSemaphore( + maxWorkers: number, + key: string, + ): Promise<() => void> { let semaphore = this.semaphores[key]; if (!semaphore) { semaphore = this.semaphores[key] = new Semaphore(maxWorkers); diff --git a/src/Patch.ts b/src/internal/Patch.ts similarity index 100% rename from src/Patch.ts rename to src/internal/Patch.ts diff --git a/src/Psql.ts b/src/internal/Psql.ts similarity index 92% rename from src/Psql.ts rename to src/internal/Psql.ts index 4587ec1..6c68134 100644 --- a/src/Psql.ts +++ b/src/internal/Psql.ts @@ -27,27 +27,27 @@ export class Psql { this._cmdline = "psql " + quote(this._args); } - get code() { + get code(): number | null { return this._code; } - get stdout() { + get stdout(): string { return this._stdout; } - get stderr() { + get stderr(): string { return this._stderr; } - get out() { + get out(): string { return this._out; } - get cmdline() { + get cmdline(): string { return this._cmdline; } - get lastOutLine() { + get lastOutLine(): string { let pos = this._out.lastIndexOf("\n"); let end = this._out.length; // Find the 1st non-empty line scanning backward. @@ -71,7 +71,7 @@ export class Psql { PGUSER: this.dest.user, PGPASSWORD: this.dest.pass, PGDATABASE: this.dest.db, - PATH: process.env.PATH, + PATH: process.env["PATH"], }, }); diff --git a/src/Registry.ts b/src/internal/Registry.ts similarity index 91% rename from src/Registry.ts rename to src/internal/Registry.ts index 4adaad9..bb69259 100644 --- a/src/Registry.ts +++ b/src/internal/Registry.ts @@ -1,9 +1,9 @@ import { existsSync, lstatSync, readdirSync, readFileSync } from "fs"; import { basename } from "path"; import sortBy from "lodash/sortBy"; -import { DefaultMap } from "./utils/DefaultMap"; -import { extractVars } from "./utils/extractVars"; -import { validateCreateIndexConcurrently } from "./utils/validateCreateIndexConcurrently"; +import { DefaultMap } from "./helpers/DefaultMap"; +import { extractVars } from "./helpers/extractVars"; +import { validateCreateIndexConcurrently } from "./helpers/validateCreateIndexConcurrently"; /** * One migration file (either *.up.* or *.dn.*). @@ -83,7 +83,7 @@ export class Registry { ); } - get prefixes() { + get prefixes(): string[] { return Array.from(this.entriesByPrefix.keys()); } @@ -123,24 +123,24 @@ export class Registry { return entriesBySchema; } - getVersions() { + getVersions(): string[] { return [...this.versions]; } - hasVersion(version: string) { + hasVersion(version: string): boolean { return this.versions.has(version); } - extractVersion(name: string) { + extractVersion(name: string): string { const matches = name.match(/^\d+\.[^.]+\.[^.]+/); return matches ? matches[0] : name; } } -function schemaNameMatchesPrefix(schema: string, prefix: string) { +function schemaNameMatchesPrefix(schema: string, prefix: string): boolean { return ( schema.startsWith(prefix) && - schema.substring(prefix.length).match(/^(\d|$)/s) + !!schema.substring(prefix.length).match(/^(\d|$)/s) ); } diff --git a/src/utils/Args.ts b/src/internal/helpers/Args.ts similarity index 78% rename from src/utils/Args.ts rename to src/internal/helpers/Args.ts index f240941..54156ac 100644 --- a/src/utils/Args.ts +++ b/src/internal/helpers/Args.ts @@ -13,16 +13,20 @@ export class Args { }); } + getOptional(name: TStringArgs): string | undefined { + return this.args[name]; + } + get(name: TStringArgs, def?: string): string { const v = this.args[name] !== undefined ? this.args[name] : def; if (v === undefined) { - throw "Parameter " + name + " is missing"; + throw `Parameter ${name} is missing`; } return v; } - flag(name: TFlagArgs) { + flag(name: TFlagArgs): boolean { return !!this.args[name]; } } diff --git a/src/utils/DefaultMap.ts b/src/internal/helpers/DefaultMap.ts similarity index 100% rename from src/utils/DefaultMap.ts rename to src/internal/helpers/DefaultMap.ts diff --git a/src/utils/__tests__/extractVars.test.ts b/src/internal/helpers/__tests__/extractVars.test.ts similarity index 100% rename from src/utils/__tests__/extractVars.test.ts rename to src/internal/helpers/__tests__/extractVars.test.ts diff --git a/src/utils/__tests__/validateCreateIndexConcurrently.test.ts b/src/internal/helpers/__tests__/validateCreateIndexConcurrently.test.ts similarity index 100% rename from src/utils/__tests__/validateCreateIndexConcurrently.test.ts rename to src/internal/helpers/__tests__/validateCreateIndexConcurrently.test.ts diff --git a/src/utils/collapse.ts b/src/internal/helpers/collapse.ts similarity index 90% rename from src/utils/collapse.ts rename to src/internal/helpers/collapse.ts index 1ed2a29..924ea6f 100644 --- a/src/utils/collapse.ts +++ b/src/internal/helpers/collapse.ts @@ -1,7 +1,7 @@ import { multirange } from "multi-integer-range"; import { DefaultMap } from "./DefaultMap"; -export function collapse(list: string[]) { +export function collapse(list: string[]): string[] { const res = []; const numberSuffixes = new DefaultMap(); for (const s of list.sort()) { diff --git a/src/utils/extractVars.ts b/src/internal/helpers/extractVars.ts similarity index 100% rename from src/utils/extractVars.ts rename to src/internal/helpers/extractVars.ts diff --git a/src/utils/makeMigration.ts b/src/internal/helpers/makeMigration.ts similarity index 96% rename from src/utils/makeMigration.ts rename to src/internal/helpers/makeMigration.ts index d7942d0..edd5ca7 100644 --- a/src/utils/makeMigration.ts +++ b/src/internal/helpers/makeMigration.ts @@ -6,7 +6,7 @@ export async function makeMigration( migrationDir: string, migrationName: string, schemaPrefix: string, -) { +): Promise { const utcTimestamp = moment(Date.now()).utc().format("YYYYMMDDHHmmss"); const migrationFilenameBase = `${utcTimestamp}.${migrationName}.${schemaPrefix}`; diff --git a/src/utils/validateCreateIndexConcurrently.ts b/src/internal/helpers/validateCreateIndexConcurrently.ts similarity index 100% rename from src/utils/validateCreateIndexConcurrently.ts rename to src/internal/helpers/validateCreateIndexConcurrently.ts diff --git a/src/render.ts b/src/internal/render.ts similarity index 91% rename from src/render.ts rename to src/internal/render.ts index 981f76b..cf7092f 100644 --- a/src/render.ts +++ b/src/internal/render.ts @@ -2,10 +2,10 @@ import chalk from "chalk"; import sortBy from "lodash/sortBy"; import type { Dest } from "./Dest"; import type { Grid } from "./Grid"; +import { collapse } from "./helpers/collapse"; +import { DefaultMap } from "./helpers/DefaultMap"; import type { Chain } from "./Patch"; import type { Registry } from "./Registry"; -import { collapse } from "./utils/collapse"; -import { DefaultMap } from "./utils/DefaultMap"; const Table = require("table-layout"); @@ -16,7 +16,7 @@ const TABLE_OPTIONS = { maxWidth: process.stdout.columns - 2, }; -export function renderGrid(grid: Grid) { +export function renderGrid(grid: Grid): string { const activeRows: string[][] = []; const errorRows: string[][] = []; for (const worker of sortBy( @@ -118,7 +118,10 @@ export function renderPatchSummary(chains: Chain[]): [string, boolean] { ]; } -export async function renderLatestVersions(dests: Dest[], registry: Registry) { +export async function renderLatestVersions( + dests: Dest[], + registry: Registry, +): Promise { const destsGrouped = new DefaultMap(); await Promise["all"]( dests.map(async (dest) => { @@ -147,19 +150,19 @@ export async function renderLatestVersions(dests: Dest[], registry: Registry) { ); } -export function printText(text: string) { +export function printText(text: string): void { // eslint-disable-next-line no-console return console.log(text); } -export function printSuccess(text: string) { +export function printSuccess(text: string): void { return printText(chalk.green("" + text)); } -export function printError(error: any) { +export function printError(error: unknown): void { return printText(chalk.red("Error: " + error)); } -function formatHost(host: string) { +function formatHost(host: string): string { return host.match(/^\d+\.\d+\.\d+\.\d+$/) ? host : host.replace(/\..*/, ""); } diff --git a/tsconfig.base.json b/tsconfig.base.json new file mode 100644 index 0000000..783953d --- /dev/null +++ b/tsconfig.base.json @@ -0,0 +1,31 @@ +{ + "include": ["src/**/*"], + "compilerOptions": { + "allowJs": true, + "declaration": true, + "declarationMap": true, + "disableReferencedProjectLoad": true, + "disableSourceOfProjectReferenceRedirect": true, + "esModuleInterop": true, + "experimentalDecorators": true, + "incremental": true, + "lib": ["ES2019"], + "module": "Node16", + "noEmitOnError": true, + "noErrorTruncation": true, + "noImplicitOverride": true, + "noImplicitReturns": true, + "noPropertyAccessFromIndexSignature": true, + "outDir": "dist", + "pretty": true, + "removeComments": false, + "resolveJsonModule": true, + "rootDir": "src", + "skipLibCheck": true, + "sourceMap": true, + "strict": true, + "target": "ES2019", + "tsBuildInfoFile": "dist/tsconfig.tsbuildinfo", + "types": ["node", "jest"] + } +} diff --git a/tsconfig.json b/tsconfig.json index 8d8828a..8342ac4 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,13 +1,4 @@ { - "extends": "../../tsconfig.base.json", - "include": ["src/**/*"], - "compilerOptions": { - "tsBuildInfoFile": "dist/tsconfig.tsbuildinfo", - "emitDeclarationOnly": true, // turns on SWC - "rootDir": "src", - "outDir": "dist", - "lib": ["es2019"], - "types": ["node", "jest"], - "module": "commonjs" - } + "extends": "./tsconfig.base.json", + "include": ["src/**/*"] } diff --git a/typedoc.config.js b/typedoc.config.js new file mode 100644 index 0000000..bc744b8 --- /dev/null +++ b/typedoc.config.js @@ -0,0 +1,20 @@ +"use strict"; +const { basename } = require("path"); + +module.exports = { + entryPoints: ["src"], + exclude: ["**/internal/**", "**/__tests__/**", "**/node_modules/**"], + entryPointStrategy: "expand", + mergeModulesMergeMode: "project", + sort: ["source-order"], + out: "docs", + logLevel: "Warn", + hideGenerator: true, + excludeInternal: true, + excludePrivate: true, + categorizeByGroup: true, + hideInPageTOC: true, + gitRevision: "master", + sourceLinkTemplate: `https://github.com/clickup/${basename(__dirname)}/blob/master/{path}#L{line}`, + basePath: ".", +};