diff --git a/.github/linters/.cspell.json b/.github/linters/.cspell.json
index 91d9784b..ccd3be6c 100644
--- a/.github/linters/.cspell.json
+++ b/.github/linters/.cspell.json
@@ -24,6 +24,7 @@
"FORCEINCLUDE",
"FULLNAME",
"Flexi",
+ "gitdir",
"hardlinks",
"Iframe",
"knip",
@@ -119,6 +120,7 @@
"quotepath",
"recentsha",
"repogitdiff",
+ "revparse",
"rulesets",
"samlssoconfig",
"samlssoconfigs",
diff --git a/README.md b/README.md
index 99392c02..f3af2210 100644
--- a/README.md
+++ b/README.md
@@ -537,9 +537,11 @@ console.log(JSON.stringify(work))
- [fast-xml-parser](https://github.com/NaturalIntelligence/fast-xml-parser) - Validate XML, Parse XML to JS/JSON and vise versa, or parse XML to Nimn rapidly without C/C++ based libraries and no callback
- [fs-extra](https://github.com/jprichardson/node-fs-extra) - Node.js: extra methods for the fs object like copy(), remove(), mkdirs().
- [ignore](https://github.com/kaelzhang/node-ignore#readme) - is a manager, filter and parser which implemented in pure JavaScript according to the .gitignore spec 2.22.1.
+- [isomorphic-git](https://github.com/isomorphic-git/isomorphic-git) - A pure JavaScript implementation of git for node and browsers!
- [lodash](https://github.com/lodash/lodash) - A modern JavaScript utility library delivering modularity, performance & extras.
-- [xmlbuilder2](https://github.com/oozcitak/xmlbuilder2) - An XML builder for node.js.
- [MegaLinter](https://megalinter.io) - Open-Source tool for CI/CD workflows that analyzes the consistency of your code, IAC, configuration, and scripts
+- [simple-git](https://github.com/steveukx/git-js) - A light weight interface for running git commands in any node.js application.
+- [xmlbuilder2](https://github.com/oozcitak/xmlbuilder2) - An XML builder for node.js.
## Versioning
diff --git a/__tests__/functional/main.test.ts b/__tests__/functional/main.test.ts
index d3fe9cdf..69d36d0a 100644
--- a/__tests__/functional/main.test.ts
+++ b/__tests__/functional/main.test.ts
@@ -7,7 +7,7 @@ const mockValidateConfig = jest.fn()
jest.mock('../../src/utils/cliHelper', () => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const actualModule: any = jest.requireActual('../../src/utils/cliHelper')
- return jest.fn().mockImplementation(function () {
+ return jest.fn().mockImplementation(() => {
return {
...actualModule,
validateConfig: mockValidateConfig,
@@ -19,7 +19,7 @@ const mockGetLines = jest.fn()
jest.mock('../../src/utils/repoGitDiff', () => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const actualModule: any = jest.requireActual('../../src/utils/repoGitDiff')
- return jest.fn().mockImplementation(function () {
+ return jest.fn().mockImplementation(() => {
return {
...actualModule,
getLines: mockGetLines,
@@ -33,7 +33,7 @@ jest.mock('../../src/service/diffLineInterpreter', () => {
const actualModule: any = jest.requireActual(
'../../src/service/diffLineInterpreter'
)
- return jest.fn().mockImplementation(function () {
+ return jest.fn().mockImplementation(() => {
return {
...actualModule,
process: mockProcess,
diff --git a/__tests__/unit/lib/adapter/GitAdapter.test.ts b/__tests__/unit/lib/adapter/GitAdapter.test.ts
new file mode 100644
index 00000000..ee08ea4f
--- /dev/null
+++ b/__tests__/unit/lib/adapter/GitAdapter.test.ts
@@ -0,0 +1,919 @@
+'use strict'
+import { expect, jest, describe, it } from '@jest/globals'
+import { getWork } from '../../../__utils__/globalTestHelper'
+import { Config } from '../../../../src/types/config'
+import GitAdapter, {
+ contentWalker,
+ diffLineWalker,
+ filePathWalker,
+ iterate,
+} from '../../../../src/adapter/GitAdapter'
+import {
+ getLFSObjectContentPath,
+ isLFS,
+} from '../../../../src/utils/gitLfsHelper'
+import { readFile } from 'fs-extra'
+import { WalkerEntry, WalkerIterateCallback } from 'isomorphic-git'
+
+const mockedDirExists = jest.fn()
+const mockedFileExists = jest.fn()
+const mockedRaw = jest.fn()
+const mockedSetConfig = jest.fn()
+const mockedRevParse = jest.fn()
+const mockedReadObject = jest.fn()
+const mockedReadBlob = jest.fn()
+const mockedWalk = jest.fn()
+
+jest.mock('simple-git', () => {
+ return {
+ simpleGit: jest.fn(() => ({
+ raw: mockedRaw,
+ revparse: mockedRevParse,
+ })),
+ }
+})
+jest.mock('isomorphic-git', () => ({
+ setConfig: function () {
+ // eslint-disable-next-line prefer-rest-params
+ return mockedSetConfig(...arguments)
+ },
+ readObject: function () {
+ // eslint-disable-next-line prefer-rest-params
+ return mockedReadObject(...arguments)
+ },
+ readBlob: function () {
+ // eslint-disable-next-line prefer-rest-params
+ return mockedReadBlob(...arguments)
+ },
+ walk: function () {
+ // eslint-disable-next-line prefer-rest-params
+ return mockedWalk(...arguments)
+ },
+ TREE: jest.fn(),
+}))
+jest.mock('../../../../src/utils/fsUtils', () => {
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ const actualModule: any = jest.requireActual('../../../../src/utils/fsUtils')
+
+ return {
+ ...actualModule,
+ dirExists: () => mockedDirExists(),
+ fileExists: () => mockedFileExists(),
+ }
+})
+
+jest.mock('../../../../src/utils/gitLfsHelper')
+jest.mock('fs-extra')
+
+const isLFSmocked = jest.mocked(isLFS)
+const getLFSObjectContentPathMocked = jest.mocked(getLFSObjectContentPath)
+const readFileMocked = jest.mocked(readFile)
+
+describe('GitAdapter', () => {
+ let config: Config
+ beforeEach(() => {
+ const work = getWork()
+ config = work.config
+ })
+ describe('getInstance', () => {
+ it('should return an instance of GitAdapter', () => {
+ // Arrange
+
+ // Act
+ const gitAdapter = GitAdapter.getInstance(config)
+
+ // Assert
+ expect(gitAdapter).toBeInstanceOf(GitAdapter)
+ })
+
+ it('should return the same instance of GitAdapter', () => {
+ // Arrange
+
+ // Act
+ const gitAdapter1 = GitAdapter.getInstance(config)
+ const gitAdapter2 = GitAdapter.getInstance(config)
+
+ // Assert
+ expect(gitAdapter1).toBe(gitAdapter2)
+ })
+
+ describe('when different config is passed', () => {
+ it('should return different instance of GitAdapter', () => {
+ // Arrange
+
+ // Act
+ const gitAdapter1 = GitAdapter.getInstance(config)
+ const gitAdapter2 = GitAdapter.getInstance({} as Config)
+
+ // Assert
+ expect(gitAdapter1).not.toBe(gitAdapter2)
+ })
+ })
+ })
+
+ describe('setGitDir', () => {
+ it('should set gitdir with git repository', async () => {
+ // Arrange
+ mockedDirExists.mockImplementation(() => Promise.resolve(true))
+ mockedFileExists.mockImplementation(() => Promise.resolve(false))
+ const gitAdapter = GitAdapter.getInstance({
+ ...config,
+ repo: 'repository',
+ })
+
+ // Act
+ await gitAdapter.setGitDir()
+
+ // Assert
+ expect(mockedDirExists).toBeCalledTimes(1)
+ expect(mockedFileExists).not.toBeCalled()
+ })
+
+ it('should set gitdir with submodules', async () => {
+ // Arrange
+ mockedDirExists.mockImplementation(() => Promise.resolve(false))
+ mockedFileExists.mockImplementation(() => Promise.resolve(true))
+ readFileMocked.mockResolvedValue(Buffer.from('content') as never)
+ const gitAdapter = GitAdapter.getInstance({
+ ...config,
+ repo: 'submodule',
+ })
+
+ // Act
+ await gitAdapter.setGitDir()
+
+ // Assert
+ expect(mockedDirExists).toBeCalledTimes(1)
+ expect(mockedFileExists).toBeCalledTimes(1)
+ })
+
+ it('should throw when no git material is found', async () => {
+ // Arrange
+ expect.assertions(1)
+ mockedDirExists.mockImplementation(() => Promise.resolve(false))
+ mockedFileExists.mockImplementation(() => Promise.resolve(false))
+ const gitAdapter = GitAdapter.getInstance({
+ ...config,
+ repo: 'not git material',
+ })
+
+ // Act
+ try {
+ await gitAdapter.setGitDir()
+ } catch (error) {
+ // Assert
+ expect(error).toBeDefined()
+ }
+ })
+
+ it('should set gitdir once', async () => {
+ // Arrange
+ mockedDirExists.mockImplementation(() => Promise.resolve(true))
+ mockedFileExists.mockImplementation(() => Promise.resolve(false))
+ const gitAdapter = GitAdapter.getInstance({
+ ...config,
+ repo: 'repository',
+ })
+ await gitAdapter.setGitDir()
+
+ // Act
+ await gitAdapter.setGitDir()
+
+ // Assert
+ expect(mockedDirExists).toBeCalledTimes(1)
+ expect(mockedFileExists).not.toBeCalled()
+ })
+ })
+
+ describe('configureRepository', () => {
+ it('should call setConfig', async () => {
+ // Arrange
+ const gitAdapter = GitAdapter.getInstance(config)
+
+ // Act
+ await gitAdapter.configureRepository()
+
+ // Assert
+ expect(mockedSetConfig).toBeCalledTimes(1)
+ expect(mockedSetConfig).toBeCalledWith(
+ expect.objectContaining({
+ dir: config.repo,
+ path: 'core.quotepath',
+ value: 'off',
+ })
+ )
+ })
+ })
+
+ describe('parseRev', () => {
+ it('should call resolveRef', async () => {
+ // Arrange
+ const expected = 'sha'
+ const gitAdapter = GitAdapter.getInstance(config)
+ mockedRevParse.mockImplementation(() => Promise.resolve(expected))
+
+ // Act
+ const result = await gitAdapter.parseRev('ref')
+
+ // Assert
+ expect(result).toStrictEqual(expected)
+ expect(mockedRevParse).toBeCalledTimes(1)
+ expect(mockedRevParse).toBeCalledWith(expect.arrayContaining(['ref']))
+ })
+ })
+
+ describe('pathExists', () => {
+ describe('when readObject returns a type', () => {
+ it.each(['tree', 'blob'])('returns true when type is %s', async type => {
+ // Arrange
+ const gitAdapter = GitAdapter.getInstance(config)
+ mockedReadObject.mockImplementation(() => Promise.resolve({ type }))
+
+ // Act
+ const result = await gitAdapter.pathExists('path')
+
+ // Assert
+ expect(result).toBe(true)
+ expect(mockedReadObject).toBeCalledTimes(1)
+ expect(mockedReadObject).toBeCalledWith(
+ expect.objectContaining({
+ dir: config.repo,
+ oid: config.to,
+ filepath: 'path',
+ })
+ )
+ })
+ it.each(['test', 'other', null, undefined, -1])(
+ 'returns false when type is not "blob" nor "tree"',
+ async type => {
+ // Arrange
+ const gitAdapter = GitAdapter.getInstance(config)
+ mockedReadObject.mockImplementation(() => Promise.resolve({ type }))
+
+ // Act
+ const result = await gitAdapter.pathExists('path')
+
+ // Assert
+ expect(result).toBe(false)
+ expect(mockedReadObject).toBeCalledTimes(1)
+ expect(mockedReadObject).toBeCalledWith(
+ expect.objectContaining({
+ dir: config.repo,
+ oid: config.to,
+ filepath: 'path',
+ })
+ )
+ }
+ )
+ })
+ describe('when readObject throws', () => {
+ it('returns false', async () => {
+ // Arrange
+ const gitAdapter = GitAdapter.getInstance(config)
+ mockedReadObject.mockImplementation(() => Promise.reject())
+
+ // Act
+ const result = await gitAdapter.pathExists('path')
+
+ // Assert
+ expect(result).toBe(false)
+ expect(mockedReadObject).toBeCalledTimes(1)
+ expect(mockedReadObject).toBeCalledWith(
+ expect.objectContaining({
+ dir: config.repo,
+ oid: config.to,
+ filepath: 'path',
+ })
+ )
+ })
+ })
+ })
+
+ describe('getFirstCommitRef', () => {
+ it('should return the first commit ref', async () => {
+ // Arrange
+ const gitAdapter = GitAdapter.getInstance(config)
+ const expected = 'firstCommitRef'
+ mockedRaw.mockImplementation(() => Promise.resolve(expected))
+
+ // Act
+ const result = await gitAdapter.getFirstCommitRef()
+
+ // Assert
+ expect(result).toBe(expected)
+ expect(mockedRaw).toBeCalledTimes(1)
+ })
+ })
+
+ describe('getStringContent', () => {
+ describe('when readBlob returns a blob', () => {
+ describe('when blob references a LFS file', () => {
+ it('returns content from LFS', async () => {
+ // Arrange
+ const gitAdapter = GitAdapter.getInstance(config)
+ mockedReadBlob.mockImplementation(() =>
+ Promise.resolve({ blob: Buffer.from('test') })
+ )
+ isLFSmocked.mockReturnValueOnce(true)
+ getLFSObjectContentPathMocked.mockReturnValueOnce('lfs/path')
+ readFileMocked.mockResolvedValue(null as never)
+ // Act
+ const result = await gitAdapter.getStringContent({
+ path: '',
+ oid: config.to,
+ })
+
+ // Assert
+ expect(result).toBe('')
+ expect(mockedReadBlob).toBeCalledWith(
+ expect.objectContaining({
+ dir: config.repo,
+ oid: config.to,
+ filepath: '',
+ })
+ )
+ })
+ })
+ describe('when blob does not reference a LFS file', () => {
+ it('return blob as a string', async () => {
+ // Arrange
+ const expected = 'test'
+ const gitAdapter = GitAdapter.getInstance(config)
+ mockedReadBlob.mockImplementation(() =>
+ Promise.resolve({ blob: Buffer.from(expected) })
+ )
+ isLFSmocked.mockReturnValueOnce(false)
+ // Act
+ const result = await gitAdapter.getStringContent({
+ path: '',
+ oid: config.to,
+ })
+
+ // Assert
+ expect(result).toBe(expected)
+ expect(mockedReadBlob).toBeCalledWith(
+ expect.objectContaining({
+ dir: config.repo,
+ oid: config.to,
+ filepath: '',
+ })
+ )
+ })
+ })
+ })
+ describe('when readBlob throws exception', () => {
+ describe('when error name is NotFoundError', () => {
+ it('returns empty content', async () => {
+ // Arrange
+ const gitAdapter = GitAdapter.getInstance(config)
+ mockedReadBlob.mockImplementation(() => {
+ const error = new Error()
+ error.name = 'NotFoundError'
+ return Promise.reject(error)
+ })
+ // Act
+ const result = await gitAdapter.getStringContent({
+ path: '',
+ oid: config.to,
+ })
+
+ // Assert
+ expect(result).toBe('')
+ expect(mockedReadBlob).toBeCalledWith(
+ expect.objectContaining({
+ dir: config.repo,
+ oid: config.to,
+ filepath: '',
+ })
+ )
+ })
+ })
+ describe('when error name is not NotFoundError', () => {
+ it('throws the exception', async () => {
+ // Arrange
+ expect.assertions(1)
+ const gitAdapter = GitAdapter.getInstance(config)
+ mockedReadBlob.mockImplementation(() =>
+ Promise.reject(new Error('test'))
+ )
+ // Act
+ try {
+ await gitAdapter.getStringContent({
+ path: '',
+ oid: config.to,
+ })
+ } catch {
+ // Assert
+ expect(mockedReadBlob).toBeCalledWith(
+ expect.objectContaining({
+ dir: config.repo,
+ oid: config.to,
+ filepath: '',
+ })
+ )
+ }
+ })
+ })
+ })
+ })
+
+ describe('getFilesPath', () => {
+ it('calls walk', async () => {
+ // Arrange
+ const gitAdapter = GitAdapter.getInstance(config)
+
+ // Act
+ await gitAdapter.getFilesPath(config.source)
+
+ // Assert
+ expect(mockedWalk).toBeCalled()
+ })
+ })
+
+ describe('getFilesFrom', () => {
+ describe('when path is a directory', () => {
+ it('returns the list of files under this directory', async () => {
+ // Arrange
+ const content = 'content'
+ const path = 'relative/path'
+ const gitAdapter = GitAdapter.getInstance(config)
+ mockedReadObject.mockImplementation(() =>
+ Promise.resolve({ type: 'tree' })
+ )
+ mockedWalk.mockImplementation(() =>
+ Promise.resolve([
+ {
+ path,
+ content: new TextEncoder().encode(content),
+ },
+ ])
+ )
+
+ // Act
+ const result = await gitAdapter.getFilesFrom('directory/path')
+
+ // Assert
+
+ expect(result).toEqual(
+ expect.arrayContaining([{ path, content: Buffer.from(content) }])
+ )
+ })
+ })
+ describe('when path is a file', () => {
+ it('returns the file content', async () => {
+ // Arrange
+ const content = 'content'
+ const path = 'file/path'
+ const gitAdapter = GitAdapter.getInstance(config)
+ mockedReadObject.mockImplementation(() =>
+ Promise.resolve({
+ type: 'blob',
+ object: new TextEncoder().encode(content),
+ })
+ )
+
+ // Act
+ const result = await gitAdapter.getFilesFrom('file/path')
+
+ // Assert
+
+ expect(result).toEqual(
+ expect.arrayContaining([{ path, content: Buffer.from(content) }])
+ )
+ })
+ })
+ describe('when path is not a directory nor a file', () => {
+ it('throw an exception', async () => {
+ // Arrange
+ expect.assertions(1)
+ const gitAdapter = GitAdapter.getInstance(config)
+ mockedReadObject.mockImplementation(() => Promise.resolve({}))
+
+ // Act
+ try {
+ await gitAdapter.getFilesFrom('wrong/path')
+ } catch (error) {
+ // Assert
+ const err = error as Error
+ expect(err.message).toBe(
+ `Path wrong/path does not exist in ${config.to}`
+ )
+ }
+ })
+ })
+ })
+
+ describe('getDiffLines', () => {
+ it('calls walk', async () => {
+ // Arrange
+ const gitAdapter = GitAdapter.getInstance(config)
+
+ // Act
+ await gitAdapter.getDiffLines()
+
+ // Assert
+ expect(mockedWalk).toBeCalled()
+ })
+ })
+
+ describe('filePathWalker', () => {
+ describe('when filepath should be ignored', () => {
+ describe('when filepath is "."', () => {
+ it('returns undefined', async () => {
+ // Arrange
+
+ // Act
+ const result = await filePathWalker('')('.', [null])
+
+ // Assert
+ expect(result).toBe(undefined)
+ })
+ })
+
+ describe('when filepath is not subfolder of path', () => {
+ it('returns undefined', async () => {
+ // Arrange
+
+ // Act
+ const result = await filePathWalker('dir')('another-dir/file', [null])
+
+ // Assert
+ expect(result).toBe(undefined)
+ })
+ })
+
+ describe('when type is not blob', () => {
+ it('returns undefined', async () => {
+ // Arrange
+ const entry = {
+ type: jest.fn(() => Promise.resolve('not-blob')),
+ } as unknown as WalkerEntry
+
+ // Act
+ const result = await filePathWalker('dir')('dir/file', [entry])
+
+ // Assert
+ expect(result).toBe(undefined)
+ })
+ })
+ })
+ describe('when path is afile', () => {
+ it('returns the normalized file path ', async () => {
+ // Arrange
+ const entry = {
+ type: jest.fn(() => Promise.resolve('blob')),
+ } as unknown as WalkerEntry
+
+ // Act
+ const result = await filePathWalker('dir')('dir/file', [entry])
+
+ // Assert
+ expect(result).toBe('dir/file')
+ })
+ })
+ })
+
+ describe('diffLineWalker', () => {
+ describe('when filepath should be ignored', () => {
+ describe('when filepath is "."', () => {
+ it('returns undefined', async () => {
+ // Arrange
+
+ // Act
+ const result = await diffLineWalker(config)('.', [null])
+
+ // Assert
+ expect(result).toBe(undefined)
+ })
+ })
+
+ describe(`when filepath does not start with "config.source"`, () => {
+ it.each(['not-force-app', 'force-app-extended'])(
+ 'returns undefined',
+ async root => {
+ // Arrange
+
+ // Act
+ const result = await diffLineWalker({
+ ...config,
+ source: 'force-app',
+ })(`${root}/test.file`, [null])
+
+ // Assert
+ expect(result).toBe(undefined)
+ }
+ )
+ })
+
+ describe('when first version of the file is not a blob', () => {
+ it('returns undefined', async () => {
+ // Arrange
+ const entry = {
+ type: jest.fn(() => Promise.resolve('not-blob')),
+ } as unknown as WalkerEntry
+
+ // Act
+ const result = await diffLineWalker(config)('file/path', [entry])
+
+ // Assert
+ expect(result).toBe(undefined)
+ })
+ })
+
+ describe('when second version of the file is not a blob', () => {
+ it('returns undefined', async () => {
+ // Arrange
+ const firstEntry = {
+ type: jest.fn(() => Promise.resolve('blob')),
+ } as unknown as WalkerEntry
+ const secondEntry = {
+ type: jest.fn(() => Promise.resolve('not-blob')),
+ } as unknown as WalkerEntry
+
+ // Act
+ const result = await diffLineWalker(config)('file/path', [
+ firstEntry,
+ secondEntry,
+ ])
+
+ // Assert
+ expect(result).toBe(undefined)
+ })
+ })
+
+ describe('when both oid are equals', () => {
+ it('returns undefined', async () => {
+ // Arrange
+ const firstEntry = {
+ type: jest.fn(() => Promise.resolve('blob')),
+ oid: jest.fn(() => 10),
+ } as unknown as WalkerEntry
+ const secondEntry = {
+ type: jest.fn(() => Promise.resolve('blob')),
+ oid: jest.fn(() => 10),
+ } as unknown as WalkerEntry
+
+ // Act
+ const result = await diffLineWalker(config)('file/path', [
+ firstEntry,
+ secondEntry,
+ ])
+
+ // Assert
+ expect(result).toBe(undefined)
+ })
+ })
+ })
+
+ describe('when filepath should be treated', () => {
+ describe(`when filepath starts with "config.source"`, () => {
+ it('returns the normalized path', async () => {
+ // Arrange
+ const firstEntry = {
+ type: jest.fn(() => Promise.resolve('blob')),
+ oid: jest.fn(() => undefined),
+ } as unknown as WalkerEntry
+ const secondEntry = {
+ type: jest.fn(() => Promise.resolve('blob')),
+ oid: jest.fn(() => 10),
+ } as unknown as WalkerEntry
+
+ // Act
+ const result = await diffLineWalker({
+ ...config,
+ source: 'force-app',
+ })('force-app/test.file', [firstEntry, secondEntry])
+
+ // Assert
+ expect(result).toBe('A\tforce-app/test.file')
+ })
+ })
+ describe('when file is added', () => {
+ it('returns the addition type and normalized path', async () => {
+ // Arrange
+ const firstEntry = {
+ type: jest.fn(() => Promise.resolve('blob')),
+ oid: jest.fn(() => undefined),
+ } as unknown as WalkerEntry
+ const secondEntry = {
+ type: jest.fn(() => Promise.resolve('blob')),
+ oid: jest.fn(() => 10),
+ } as unknown as WalkerEntry
+
+ // Act
+ const result = await diffLineWalker(config)('file/path', [
+ firstEntry,
+ secondEntry,
+ ])
+
+ // Assert
+ expect(result).toBe('A\tfile/path')
+ })
+ })
+ describe('when file is deleted', () => {
+ it('returns the deletion type and normalized path', async () => {
+ // Arrange
+ const firstEntry = {
+ type: jest.fn(() => Promise.resolve('blob')),
+ oid: jest.fn(() => 10),
+ } as unknown as WalkerEntry
+ const secondEntry = {
+ type: jest.fn(() => Promise.resolve('blob')),
+ oid: jest.fn(() => undefined),
+ } as unknown as WalkerEntry
+
+ // Act
+ const result = await diffLineWalker(config)('file/path', [
+ firstEntry,
+ secondEntry,
+ ])
+
+ // Assert
+ expect(result).toBe('D\tfile/path')
+ })
+ })
+ describe('when file is modified', () => {
+ it('returns the modification type and normalized path', async () => {
+ // Arrange
+ const firstEntry = {
+ type: jest.fn(() => Promise.resolve('blob')),
+ oid: jest.fn(() => 10),
+ } as unknown as WalkerEntry
+ const secondEntry = {
+ type: jest.fn(() => Promise.resolve('blob')),
+ oid: jest.fn(() => 11),
+ } as unknown as WalkerEntry
+
+ // Act
+ const result = await diffLineWalker(config)('file/path', [
+ firstEntry,
+ secondEntry,
+ ])
+
+ // Assert
+ expect(result).toBe('M\tfile/path')
+ })
+
+ describe('when whitespace should be ignored', () => {
+ describe('when files contains only whitespace differences', () => {
+ it('returns undefined', async () => {
+ // Arrange
+ const firstEntry = {
+ type: jest.fn(() => Promise.resolve('blob')),
+ oid: jest.fn(() => 10),
+ content: jest.fn(() =>
+ Promise.resolve(Buffer.from(' \t\n
'))
+ ),
+ } as unknown as WalkerEntry
+ const secondEntry = {
+ type: jest.fn(() => Promise.resolve('blob')),
+ oid: jest.fn(() => 11),
+ content: jest.fn(() =>
+ Promise.resolve(Buffer.from(' \t\n
\t\n'))
+ ),
+ } as unknown as WalkerEntry
+
+ // Act
+ const result = await diffLineWalker({
+ ...config,
+ ignoreWhitespace: true,
+ })('file/path', [firstEntry, secondEntry])
+
+ // Assert
+ expect(result).toBe(undefined)
+ })
+ })
+ describe('when files contains whitespace and non whitespace differences', () => {
+ it('returns the modification type and normalized path', async () => {
+ // Arrange
+ const firstEntry = {
+ type: jest.fn(() => Promise.resolve('blob')),
+ oid: jest.fn(() => 10),
+ content: jest.fn(() =>
+ Promise.resolve(Buffer.from(' \t\n
'))
+ ),
+ } as unknown as WalkerEntry
+ const secondEntry = {
+ type: jest.fn(() => Promise.resolve('blob')),
+ oid: jest.fn(() => 11),
+ content: jest.fn(() =>
+ Promise.resolve(
+ Buffer.from(' \t\ninformation
\t\n')
+ )
+ ),
+ } as unknown as WalkerEntry
+
+ // Act
+ const result = await diffLineWalker({
+ ...config,
+ ignoreWhitespace: true,
+ })('file/path', [firstEntry, secondEntry])
+
+ // Assert
+ expect(result).toBe('M\tfile/path')
+ })
+ })
+ })
+ })
+ })
+ })
+
+ describe('contentWalker', () => {
+ describe('when filepath should be ignored', () => {
+ describe('when filepath is "."', () => {
+ it('returns undefined', async () => {
+ // Arrange
+
+ // Act
+ const result = await contentWalker('')('.', [null])
+
+ // Assert
+ expect(result).toBe(undefined)
+ })
+ })
+
+ describe('when filepath is not subfolder of path', () => {
+ it('returns undefined', async () => {
+ // Arrange
+
+ // Act
+ const result = await contentWalker('dir')('another-dir/file', [null])
+
+ // Assert
+ expect(result).toBe(undefined)
+ })
+ })
+
+ describe('when type is not blob', () => {
+ it('returns undefined', async () => {
+ // Arrange
+ const entry = {
+ type: jest.fn(() => Promise.resolve('not-blob')),
+ } as unknown as WalkerEntry
+
+ // Act
+ const result = await contentWalker('dir')('dir/file', [entry])
+
+ // Assert
+ expect(result).toBe(undefined)
+ })
+ })
+ })
+ describe('when path is afile', () => {
+ it('returns the normalized file path ', async () => {
+ // Arrange
+ const content = new TextEncoder().encode('content')
+ const entry = {
+ type: jest.fn(() => Promise.resolve('blob')),
+ content: jest.fn(() => Promise.resolve(content)),
+ } as unknown as WalkerEntry
+
+ // Act
+ const result = await contentWalker('dir')('dir/file', [entry])
+
+ // Assert
+ expect(result).toStrictEqual({
+ path: 'dir/file',
+ content,
+ })
+ })
+ })
+ })
+
+ describe('iterate', () => {
+ it('call walk on every children', async () => {
+ // Arrange
+ const children = [
+ Array.from(
+ { length: 2 },
+ (_, index) => ({ type: 'blob', oid: index }) as unknown as WalkerEntry
+ ),
+ ]
+ const walkMock = jest.fn()
+
+ // Act
+ const iterable: IterableIterator> =
+ children[Symbol.iterator]()
+ const result = await iterate(walkMock, iterable)
+
+ // Assert
+ expect(result.length).toBe(children.length)
+ expect(walkMock).toBeCalledTimes(children.length)
+ })
+
+ it('do not walk without children', async () => {
+ // Arrange
+ const children = [] as Array
+ const walkMock = jest.fn()
+
+ // Act
+ const iterable: IterableIterator> =
+ children[Symbol.iterator]()
+ const result = await iterate(walkMock, iterable)
+
+ // Assert
+ expect(result.length).toBe(0)
+ expect(walkMock).not.toBeCalled()
+ })
+ })
+})
diff --git a/__tests__/unit/lib/metadata/MetadataRepositoryImpl.test.ts b/__tests__/unit/lib/metadata/MetadataRepositoryImpl.test.ts
index ab9ef21f..38e50cb7 100644
--- a/__tests__/unit/lib/metadata/MetadataRepositoryImpl.test.ts
+++ b/__tests__/unit/lib/metadata/MetadataRepositoryImpl.test.ts
@@ -8,6 +8,19 @@ describe('MetadataRepositoryImpl', () => {
let sut: MetadataRepository
beforeEach(() => {
sut = new MetadataRepositoryImpl([
+ {
+ directoryName: 'aura',
+ inFolder: false,
+ metaFile: false,
+ xmlName: 'AuraDefinitionBundle',
+ },
+ {
+ directoryName: 'applications',
+ inFolder: false,
+ metaFile: false,
+ suffix: 'app',
+ xmlName: 'CustomApplication',
+ },
{
directoryName: 'documents',
inFolder: true,
@@ -251,6 +264,20 @@ describe('MetadataRepositoryImpl', () => {
expect(result).toBeUndefined()
})
})
+
+ describe('when it should not match on extension', () => {
+ it('matches on folder', () => {
+ // Act
+ const result = sut.get(
+ 'Z force-app/main/folder/aura/TestApp/TestApp.app'
+ )
+
+ // Assert
+ expect(result).toStrictEqual(
+ expect.objectContaining({ directoryName: 'aura' })
+ )
+ })
+ })
})
describe('getFullyQualifiedName', () => {
diff --git a/__tests__/unit/lib/post-processor/flowTranslationProcessor.test.ts b/__tests__/unit/lib/post-processor/flowTranslationProcessor.test.ts
index e85249d1..7db25214 100644
--- a/__tests__/unit/lib/post-processor/flowTranslationProcessor.test.ts
+++ b/__tests__/unit/lib/post-processor/flowTranslationProcessor.test.ts
@@ -6,12 +6,10 @@ import FlowTranslationProcessor from '../../../../src/post-processor/flowTransla
import { parseXmlFileToJson } from '../../../../src/utils/fxpHelper'
import {
FLOW_XML_NAME,
- METAFILE_SUFFIX,
- TRANSLATION_EXTENSION,
TRANSLATION_TYPE,
} from '../../../../src/constant/metadataConstants'
-import { writeFile, scanExtension } from '../../../../src/utils/fsHelper'
-import { isSubDir, readFile } from '../../../../src/utils/fsUtils'
+import { isSubDir, readFile, treatPathSep } from '../../../../src/utils/fsUtils'
+import { writeFile, readDir } from '../../../../src/utils/fsHelper'
import { Work } from '../../../../src/types/work'
import { MetadataRepository } from '../../../../src/metadata/MetadataRepository'
@@ -19,11 +17,13 @@ jest.mock('fs-extra')
jest.mock('../../../../src/utils/fsHelper')
jest.mock('../../../../src/utils/fsUtils')
-const mockedScanExtension = jest.mocked(scanExtension)
+const mockedReadDir = jest.mocked(readDir)
const mockedParseXmlFileToJson = jest.mocked(parseXmlFileToJson)
const mockedIsSubDir = jest.mocked(isSubDir)
const mockedPathExists = jest.mocked(pathExists)
const mockedReadFile = jest.mocked(readFile)
+const mockTreatPathSep = jest.mocked(treatPathSep)
+mockTreatPathSep.mockImplementation(data => data)
const mockIgnores = jest.fn()
jest.mock('../../../../src/utils/ignoreHelper', () => ({
@@ -48,7 +48,6 @@ jest.mock('../../../../src/utils/fxpHelper', () => {
const FR = 'fr'
const EN = 'en'
const flowFullName = 'test-flow'
-const EXTENSION = `${TRANSLATION_EXTENSION}${METAFILE_SUFFIX}`
describe('FlowTranslationProcessor', () => {
let work: Work
@@ -64,7 +63,7 @@ describe('FlowTranslationProcessor', () => {
mockIgnores.mockReset()
work = getWork()
sut = new FlowTranslationProcessor(work, metadata)
- mockedScanExtension.mockResolvedValue([`${FR}.translation-meta.xml`])
+ mockedReadDir.mockResolvedValue([`${FR}.translation-meta.xml`])
})
describe('when no flow have been modified', () => {
@@ -73,7 +72,7 @@ describe('FlowTranslationProcessor', () => {
await sut.process()
// Assert
- expect(mockedScanExtension).not.toHaveBeenCalled()
+ expect(mockedReadDir).not.toHaveBeenCalled()
expect(work.diffs.package.has(TRANSLATION_TYPE)).toBeFalsy()
})
})
@@ -90,17 +89,16 @@ describe('FlowTranslationProcessor', () => {
describe('when there is no translation file', () => {
beforeEach(() => {
// Arrange
- mockedScanExtension.mockResolvedValue([])
+ mockedReadDir.mockResolvedValue([])
})
it('should not add translation file', async () => {
// Act
await sut.process()
// Assert
- expect(mockedScanExtension).toHaveBeenCalledTimes(1)
- expect(mockedScanExtension).toHaveBeenCalledWith(
+ expect(mockedReadDir).toHaveBeenCalledTimes(1)
+ expect(mockedReadDir).toHaveBeenCalledWith(
work.config.source,
- EXTENSION,
work.config
)
expect(parseXmlFileToJson).not.toHaveBeenCalled()
@@ -120,10 +118,9 @@ describe('FlowTranslationProcessor', () => {
// Assert
expect(work.diffs.package.has(TRANSLATION_TYPE)).toBeFalsy()
- expect(mockedScanExtension).toHaveBeenCalledTimes(1)
- expect(mockedScanExtension).toHaveBeenCalledWith(
+ expect(mockedReadDir).toHaveBeenCalledTimes(1)
+ expect(mockedReadDir).toHaveBeenCalledWith(
work.config.source,
- EXTENSION,
work.config
)
expect(parseXmlFileToJson).toHaveBeenCalledTimes(1)
@@ -144,10 +141,9 @@ describe('FlowTranslationProcessor', () => {
// Assert
expect(work.diffs.package.has(TRANSLATION_TYPE)).toBeTruthy()
- expect(mockedScanExtension).toHaveBeenCalledTimes(1)
- expect(mockedScanExtension).toHaveBeenCalledWith(
+ expect(mockedReadDir).toHaveBeenCalledTimes(1)
+ expect(mockedReadDir).toHaveBeenCalledWith(
work.config.source,
- EXTENSION,
work.config
)
expect(parseXmlFileToJson).toHaveBeenCalledTimes(1)
@@ -172,10 +168,9 @@ describe('FlowTranslationProcessor', () => {
await sut.process()
// Assert
- expect(mockedScanExtension).toHaveBeenCalledTimes(1)
- expect(mockedScanExtension).toHaveBeenCalledWith(
+ expect(mockedReadDir).toHaveBeenCalledTimes(1)
+ expect(mockedReadDir).toHaveBeenCalledWith(
work.config.source,
- EXTENSION,
work.config
)
expect(parseXmlFileToJson).toHaveBeenCalled()
@@ -215,10 +210,9 @@ describe('FlowTranslationProcessor', () => {
await sut.process()
// Assert
- expect(mockedScanExtension).toHaveBeenCalledTimes(1)
- expect(mockedScanExtension).toHaveBeenCalledWith(
+ expect(mockedReadDir).toHaveBeenCalledTimes(1)
+ expect(mockedReadDir).toHaveBeenCalledWith(
work.config.source,
- EXTENSION,
work.config
)
expect(parseXmlFileToJson).toHaveBeenCalled()
@@ -234,7 +228,7 @@ describe('FlowTranslationProcessor', () => {
describe('when there is multiple translation file with multiple flow def', () => {
beforeEach(() => {
// Arrange
- mockedScanExtension.mockResolvedValue([
+ mockedReadDir.mockResolvedValue([
`${FR}.translation-meta.xml`,
`${EN}.translation-meta.xml`,
])
@@ -253,10 +247,9 @@ describe('FlowTranslationProcessor', () => {
// Assert
expect(work.diffs.package.has(TRANSLATION_TYPE)).toBeFalsy()
- expect(mockedScanExtension).toHaveBeenCalledTimes(1)
- expect(mockedScanExtension).toHaveBeenCalledWith(
+ expect(mockedReadDir).toHaveBeenCalledTimes(1)
+ expect(mockedReadDir).toHaveBeenCalledWith(
work.config.source,
- EXTENSION,
work.config
)
expect(parseXmlFileToJson).toHaveBeenCalledTimes(2)
@@ -290,10 +283,9 @@ describe('FlowTranslationProcessor', () => {
// Assert
expect(work.diffs.package.has(TRANSLATION_TYPE)).toBeTruthy()
- expect(mockedScanExtension).toHaveBeenCalledTimes(1)
- expect(mockedScanExtension).toHaveBeenCalledWith(
+ expect(mockedReadDir).toHaveBeenCalledTimes(1)
+ expect(mockedReadDir).toHaveBeenCalledWith(
work.config.source,
- EXTENSION,
work.config
)
expect(parseXmlFileToJson).toHaveBeenCalledTimes(2)
@@ -317,10 +309,9 @@ describe('FlowTranslationProcessor', () => {
// Assert
expect(work.diffs.package.has(TRANSLATION_TYPE)).toBeFalsy()
- expect(mockedScanExtension).toHaveBeenCalledTimes(1)
- expect(mockedScanExtension).toHaveBeenCalledWith(
+ expect(mockedReadDir).toHaveBeenCalledTimes(1)
+ expect(mockedReadDir).toHaveBeenCalledWith(
work.config.source,
- EXTENSION,
work.config
)
expect(parseXmlFileToJson).not.toHaveBeenCalled()
@@ -343,10 +334,9 @@ describe('FlowTranslationProcessor', () => {
// Assert
expect(work.diffs.package.has(TRANSLATION_TYPE)).toBeTruthy()
- expect(mockedScanExtension).toHaveBeenCalledTimes(1)
- expect(mockedScanExtension).toHaveBeenCalledWith(
+ expect(mockedReadDir).toHaveBeenCalledTimes(1)
+ expect(mockedReadDir).toHaveBeenCalledWith(
work.config.source,
- EXTENSION,
work.config
)
expect(parseXmlFileToJson).toHaveBeenCalledTimes(1)
@@ -368,10 +358,9 @@ describe('FlowTranslationProcessor', () => {
// Assert
expect(work.diffs.package.has(TRANSLATION_TYPE)).toBeFalsy()
- expect(mockedScanExtension).toHaveBeenCalledTimes(1)
- expect(mockedScanExtension).toHaveBeenCalledWith(
+ expect(mockedReadDir).toHaveBeenCalledTimes(1)
+ expect(mockedReadDir).toHaveBeenCalledWith(
work.config.source,
- EXTENSION,
work.config
)
expect(parseXmlFileToJson).not.toHaveBeenCalled()
diff --git a/__tests__/unit/lib/post-processor/includeProcessor.test.ts b/__tests__/unit/lib/post-processor/includeProcessor.test.ts
index 197e37e1..d0011b19 100644
--- a/__tests__/unit/lib/post-processor/includeProcessor.test.ts
+++ b/__tests__/unit/lib/post-processor/includeProcessor.test.ts
@@ -18,15 +18,13 @@ jest.mock('../../../../src/service/diffLineInterpreter', () => {
})
})
-const mockGetAllFilesAsLineStream = jest.fn()
-jest.mock('../../../../src/utils/repoSetup', () => {
- return jest.fn().mockImplementation(() => {
- return {
- getAllFilesAsLineStream: mockGetAllFilesAsLineStream,
- getFirstCommitRef: jest.fn(),
- }
- })
-})
+const mockGetFilesPath = jest.fn()
+jest.mock('../../../../src/adapter/GitAdapter', () => ({
+ getInstance: jest.fn(() => ({
+ getFilesPath: mockGetFilesPath,
+ getFirstCommitRef: jest.fn(),
+ })),
+}))
jest.mock('../../../../src/utils/ignoreHelper')
const mockedBuildIncludeHelper = jest.mocked(buildIncludeHelper)
@@ -65,7 +63,7 @@ describe('IncludeProcessor', () => {
describe('when include is configured', () => {
beforeAll(() => {
- mockGetAllFilesAsLineStream.mockImplementation(() => ['test'])
+ mockGetFilesPath.mockImplementation(() => Promise.resolve(['test']))
})
describe('when no file matches the patterns', () => {
@@ -107,7 +105,7 @@ describe('IncludeProcessor', () => {
describe('when includeDestructive is configured', () => {
beforeAll(() => {
- mockGetAllFilesAsLineStream.mockImplementation(() => ['test'])
+ mockGetFilesPath.mockImplementation(() => Promise.resolve(['test']))
})
describe('when no file matches the patterns', () => {
beforeEach(() => {
diff --git a/__tests__/unit/lib/service/diffLineInterpreter.test.ts b/__tests__/unit/lib/service/diffLineInterpreter.test.ts
index c356ed48..0ea0ebbc 100644
--- a/__tests__/unit/lib/service/diffLineInterpreter.test.ts
+++ b/__tests__/unit/lib/service/diffLineInterpreter.test.ts
@@ -9,9 +9,9 @@ const mockHandle = jest.fn()
jest.mock('../../../../src/service/typeHandlerFactory', () => {
return jest.fn().mockImplementation(() => {
return {
- getTypeHandler: jest.fn().mockImplementation(() => {
- return { handle: mockHandle }
- }),
+ getTypeHandler: jest
+ .fn()
+ .mockImplementation(() => ({ handle: mockHandle })),
}
})
})
@@ -35,25 +35,25 @@ describe('DiffLineInterpreter', () => {
})
describe('when called with lines', () => {
- it('process each lines', () => {
+ it('process each lines', async () => {
// Arrange
const lines = ['test']
// Act
- sut.process(lines)
+ await sut.process(lines)
// Assert
- expect(mockHandle).toBeCalledTimes(1)
+ expect(mockHandle).toBeCalledTimes(lines.length)
})
})
describe('when called without lines', () => {
- it('it does not process anything', () => {
+ it('it does not process anything', async () => {
// Arrange
const lines: string[] = []
// Act
- sut.process(lines)
+ await sut.process(lines)
// Assert
expect(mockHandle).not.toBeCalled()
diff --git a/__tests__/unit/lib/utils/childProcessUtils.test.ts b/__tests__/unit/lib/utils/childProcessUtils.test.ts
deleted file mode 100644
index 78a26303..00000000
--- a/__tests__/unit/lib/utils/childProcessUtils.test.ts
+++ /dev/null
@@ -1,262 +0,0 @@
-'use strict'
-import { expect, describe, it } from '@jest/globals'
-import {
- EOLRegex,
- getSpawnContent,
- getSpawnContentByLine,
- treatPathSep,
- sanitizePath,
-} from '../../../../src/utils/childProcessUtils'
-import { EventEmitter, Readable } from 'stream'
-import { sep } from 'path'
-import { ChildProcess, spawn } from 'child_process'
-
-jest.mock('child_process')
-
-const mockedSpawn = jest.mocked(spawn)
-
-const cmd = 'command'
-const args = ['arg1', 'arg2']
-
-const arrangeStream = (
- data: Buffer | string,
- error: string | null,
- isError: boolean
-) => {
- const getReadable = (content: Buffer | string | null) => {
- return new Readable({
- read() {
- if (content) this.push(content)
- this.push(null)
- },
- })
- }
- const stream: ChildProcess = new EventEmitter() as ChildProcess
- stream.stdout = getReadable(data)
- stream.stderr = getReadable(error)
- setTimeout(() => stream.emit('close', isError ? 1 : 0), 0)
- return stream
-}
-
-describe('childProcessUtils', () => {
- describe('getSpawnContent', () => {
- describe.each([Buffer.from('text'), 'text'])(
- 'when spawn returns %o',
- content => {
- it('returns Buffer', async () => {
- // Arrange
- const stream = arrangeStream(content, null, false)
- mockedSpawn.mockReturnValue(stream)
-
- // Act
- const result = await getSpawnContent(cmd, args)
-
- // Assert
- expect(result).toEqual(Buffer.from('text'))
- })
- }
- )
-
- describe('when stream emits error', () => {
- it('throws the error', async () => {
- // Arrange
- expect.assertions(1)
- const mockedStream = arrangeStream(
- 'irrelevant std out output',
- 'error',
- true
- )
- mockedSpawn.mockReturnValue(mockedStream)
-
- // Act
- try {
- await getSpawnContent(cmd, args)
-
- // Assert
- } catch (error) {
- expect((error as Error).message).toEqual('error')
- }
- })
- })
-
- describe('when stream emits error but no stderr output', () => {
- it('throws an empty error', async () => {
- // Arrange
- expect.assertions(1)
- const stream = arrangeStream('irrelevant std out output', null, true)
- mockedSpawn.mockReturnValue(stream)
-
- // Act
- try {
- await getSpawnContent(cmd, args)
-
- // Assert
- } catch (error) {
- expect((error as Error).message).toEqual('')
- }
- })
- })
- })
- describe('getSpawnContentByLine', () => {
- describe('when called with lines', () => {
- it('gives an array containing those lines', async () => {
- // Arrange
- const input = 'multiline\ntext'
-
- const stream = arrangeStream(input, null, false)
- mockedSpawn.mockReturnValue(stream)
-
- // Act
- const lines = await getSpawnContentByLine(cmd, args)
-
- // Assert
- expect(lines).toEqual(expect.arrayContaining(input.split('\n')))
- })
- })
-
- describe('when stream has no content in stdout', () => {
- it('returns no lines', async () => {
- // Arrange
- const stream = arrangeStream('', null, false)
- mockedSpawn.mockReturnValue(stream)
-
- // Act
- const lines = await getSpawnContentByLine(cmd, args)
-
- // Assert
- expect(lines).toEqual([])
- })
- })
-
- describe('when stream emits error', () => {
- it('throws the error', async () => {
- // Arrange
- expect.assertions(1)
- const mockedStream = arrangeStream(
- 'irrelevant std out output',
- 'error',
- true
- )
- mockedSpawn.mockReturnValue(mockedStream)
-
- // Act
- try {
- await getSpawnContentByLine(cmd, args)
-
- // Assert
- } catch (error) {
- expect((error as Error).message).toEqual('error')
- }
- })
- })
-
- describe('when stream emits error but no stderr output', () => {
- it('throws an empty error', async () => {
- // Arrange
- expect.assertions(1)
- const stream = arrangeStream('irrelevant std out output', null, true)
- mockedSpawn.mockReturnValue(stream)
-
- // Act
- try {
- await getSpawnContentByLine(cmd, args)
-
- // Assert
- } catch (error) {
- expect((error as Error).message).toEqual('')
- }
- })
- })
- })
- describe('treatPathSep', () => {
- it(`replace / by ${sep}`, () => {
- // Arrange
- const input = 'test///test//test/test'
-
- // Act
- const result = treatPathSep(input)
-
- // Assert
- expect(result).toBe(`test${sep}test${sep}test${sep}test`)
- })
-
- it(`replace \\ by ${sep}`, () => {
- // Arrange
- const input = 'test\\\\\\test\\\\test\\test'
-
- // Act
- const result = treatPathSep(input)
-
- // Assert
- expect(result).toBe(`test${sep}test${sep}test${sep}test`)
- })
- })
- describe('sanitizePath', () => {
- it(`returns path with '${sep}' separator`, () => {
- // Arrange
- const input = 'test\\test/test'
-
- // Act
- const result = sanitizePath(input)
-
- // Assert
- expect(result).toBe(`test${sep}test${sep}test`)
- })
-
- it(`normalize path`, () => {
- // Arrange
- const input = 'test/test\\../test'
-
- // Act
- const result = sanitizePath(input)
-
- // Assert
- expect(result).toBe(`test${sep}test`)
- })
- })
- describe('EOLRegex', () => {
- it('matches CR LF', () => {
- // Arrange
- const input = 'test\r\ntest'
-
- // Act
- const matches = EOLRegex.test(input)
-
- // Assert
- expect(matches).toBe(true)
- })
-
- it('matches LF', () => {
- // Arrange
- const input = 'testtest\n'
-
- // Act
- const matches = EOLRegex.test(input)
-
- // Assert
- expect(matches).toBe(true)
- })
-
- it('does not matches CR only', () => {
- // Arrange
- const input = 'test\rtest'
-
- // Act
- const matches = EOLRegex.test(input)
-
- // Assert
- expect(matches).toBe(false)
- })
-
- it('does not matches any string ', () => {
- // Arrange
- const input = 'test,test'
-
- // Act
- const matches = EOLRegex.test(input)
-
- // Assert
- expect(matches).toBe(false)
- })
- })
-})
diff --git a/__tests__/unit/lib/utils/cliHelper.test.ts b/__tests__/unit/lib/utils/cliHelper.test.ts
index 73f4ffcc..fce30391 100644
--- a/__tests__/unit/lib/utils/cliHelper.test.ts
+++ b/__tests__/unit/lib/utils/cliHelper.test.ts
@@ -1,59 +1,48 @@
'use strict'
import { expect, jest, describe, it } from '@jest/globals'
import { getWork } from '../../../__utils__/globalTestHelper'
-import {
- COMMIT_REF_TYPE,
- TAG_REF_TYPE,
-} from '../../../../src/constant/gitConstants'
import CLIHelper from '../../../../src/utils/cliHelper'
import { getLatestSupportedVersion } from '../../../../src/metadata/metadataManager'
import messages from '../../../../src/locales/en'
import { Work } from '../../../../src/types/work'
-import { isGit } from '../../../../src/utils/fsHelper'
-import { readFile, dirExists, fileExists } from '../../../../src/utils/fsUtils'
+import {
+ readFile,
+ dirExists,
+ fileExists,
+ sanitizePath,
+} from '../../../../src/utils/fsUtils'
import { format } from 'util'
-jest.mock('../../../../src/utils/childProcessUtils', () => {
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- const actualModule: any = jest.requireActual(
- '../../../../src/utils/childProcessUtils'
- )
- return {
- ...actualModule,
- getSpawnContent: jest.fn(),
- }
-})
-
-const mockGetCommitRefType = jest.fn()
-jest.mock('../../../../src/utils/repoSetup', () => {
- return jest.fn().mockImplementation(function () {
- return {
- repoConfiguration: jest.fn(),
- getCommitRefType: mockGetCommitRefType,
- }
- })
-})
+const mockParseRev = jest.fn()
+const mockConfigureRepository = jest.fn()
+const setGitDirMock = jest.fn()
+jest.mock('../../../../src/adapter/GitAdapter', () => ({
+ getInstance: () => ({
+ parseRev: mockParseRev,
+ configureRepository: mockConfigureRepository,
+ setGitDir: setGitDirMock,
+ }),
+}))
-jest.mock('../../../../src/utils/fsHelper')
jest.mock('../../../../src/utils/fsUtils')
-
const mockedReadFile = jest.mocked(readFile)
+const mockedSanitizePath = jest.mocked(sanitizePath)
const mockedDirExists = jest.mocked(dirExists)
const mockedFileExists = jest.mocked(fileExists)
-const mockedIsGit = jest.mocked(isGit)
+
+mockedSanitizePath.mockImplementation(data => data)
describe(`test if the application`, () => {
let work: Work
beforeEach(() => {
work = getWork()
+ work.config.repo = '.'
work.config.to = 'test'
work.config.apiVersion = 46
mockedFileExists.mockImplementation(() => Promise.resolve(true))
mockedDirExists.mockImplementation(() => Promise.resolve(true))
- mockedIsGit.mockImplementation(() => Promise.resolve(true))
- mockGetCommitRefType.mockImplementation(() =>
- Promise.resolve(COMMIT_REF_TYPE)
- )
+ setGitDirMock.mockImplementation(() => Promise.resolve(true))
+ mockParseRev.mockImplementation(() => Promise.resolve('ref'))
})
it('resume nicely when everything is well configured', async () => {
@@ -75,7 +64,7 @@ describe(`test if the application`, () => {
})
it('throws errors when repo is not a git repository', async () => {
- mockedIsGit.mockResolvedValueOnce(false)
+ setGitDirMock.mockImplementationOnce(() => Promise.reject(new Error()))
const cliHelper = new CLIHelper({
...work,
config: {
@@ -332,11 +321,8 @@ describe(`test if the application`, () => {
})
it('throws errors when "-t" is not a valid sha pointer', async () => {
- mockGetCommitRefType.mockImplementationOnce(() =>
- Promise.resolve('not a valid sha pointer')
- )
- mockGetCommitRefType.mockImplementationOnce(() =>
- Promise.resolve(TAG_REF_TYPE)
+ mockParseRev.mockImplementationOnce(() =>
+ Promise.reject(new Error('not a valid sha pointer'))
)
const notHeadSHA = 'test'
const cliHelper = new CLIHelper({
@@ -354,11 +340,9 @@ describe(`test if the application`, () => {
})
it('throws errors when "-f" is not a valid sha pointer', async () => {
- mockGetCommitRefType.mockImplementationOnce(() =>
- Promise.resolve('not a valid sha pointer')
- )
- mockGetCommitRefType.mockImplementationOnce(() =>
- Promise.resolve(COMMIT_REF_TYPE)
+ mockParseRev.mockImplementationOnce(() => Promise.resolve('ref'))
+ mockParseRev.mockImplementationOnce(() =>
+ Promise.reject(new Error('not a valid sha pointer'))
)
const notHeadSHA = 'test'
const cliHelper = new CLIHelper({
@@ -376,11 +360,12 @@ describe(`test if the application`, () => {
})
it('throws errors when "-t" and "-f" are not a valid sha pointer', async () => {
- mockGetCommitRefType.mockImplementationOnce(() =>
- Promise.resolve('not a valid sha pointer')
+ expect.assertions(2)
+ mockParseRev.mockImplementationOnce(() =>
+ Promise.reject(new Error('not a valid sha pointer'))
)
- mockGetCommitRefType.mockImplementationOnce(() =>
- Promise.resolve('not a valid sha pointer')
+ mockParseRev.mockImplementationOnce(() =>
+ Promise.reject(new Error('not a valid sha pointer'))
)
const notHeadSHA = 'test'
const cliHelper = new CLIHelper({
@@ -393,23 +378,21 @@ describe(`test if the application`, () => {
},
})
- await expect(cliHelper.validateConfig()).rejects.toThrow(
- format(messages.errorParameterIsNotGitSHA, 'from', notHeadSHA)
- )
-
- await expect(cliHelper.validateConfig()).rejects.toThrow(
- format(messages.errorParameterIsNotGitSHA, 'to', notHeadSHA)
- )
+ try {
+ await cliHelper.validateConfig()
+ } catch (err) {
+ const error = err as Error
+ expect(error.message).toContain(
+ format(messages.errorParameterIsNotGitSHA, 'from', notHeadSHA)
+ )
+ expect(error.message).toContain(
+ format(messages.errorParameterIsNotGitSHA, 'to', notHeadSHA)
+ )
+ }
})
it('do not throw errors when "-t" and "-f" are valid sha pointer', async () => {
// Arrange
- mockGetCommitRefType.mockImplementationOnce(() =>
- Promise.resolve(TAG_REF_TYPE)
- )
- mockGetCommitRefType.mockImplementationOnce(() =>
- Promise.resolve(COMMIT_REF_TYPE)
- )
const notHeadSHA = 'test'
const cliHelper = new CLIHelper({
@@ -430,7 +413,7 @@ describe(`test if the application`, () => {
it('do not throw errors when repo contains submodule git file', async () => {
expect.assertions(1)
- mockedIsGit.mockResolvedValueOnce(true)
+ setGitDirMock.mockImplementationOnce(() => Promise.resolve(true))
const cliHelper = new CLIHelper({
...work,
config: {
@@ -445,7 +428,7 @@ describe(`test if the application`, () => {
it('do not throw errors when repo submodule git folder', async () => {
expect.assertions(1)
- mockedIsGit.mockResolvedValueOnce(true)
+ setGitDirMock.mockImplementationOnce(() => Promise.resolve(true))
const cliHelper = new CLIHelper({
...work,
config: {
diff --git a/__tests__/unit/lib/utils/fsHelper.test.ts b/__tests__/unit/lib/utils/fsHelper.test.ts
index caa5df63..6e6671af 100644
--- a/__tests__/unit/lib/utils/fsHelper.test.ts
+++ b/__tests__/unit/lib/utils/fsHelper.test.ts
@@ -3,54 +3,38 @@ import { expect, jest, describe, it } from '@jest/globals'
import { getWork } from '../../../__utils__/globalTestHelper'
import {
copyFiles,
- gitPathSeparatorNormalizer,
- isGit,
pathExists,
readDir,
readPathFromGit,
- scan,
- scanExtension,
writeFile,
} from '../../../../src/utils/fsHelper'
import {
IgnoreHelper,
buildIgnoreHelper,
} from '../../../../src/utils/ignoreHelper'
-import {
- getSpawnContent,
- treatPathSep,
-} from '../../../../src/utils/childProcessUtils'
-import { readFile as fsReadFile, Stats, outputFile, stat } from 'fs-extra'
-import {
- isLFS,
- getLFSObjectContentPath,
-} from '../../../../src/utils/gitLfsHelper'
-import { EOL } from 'os'
+import { outputFile } from 'fs-extra'
import { Work } from '../../../../src/types/work'
import { Config } from '../../../../src/types/config'
import { Ignore } from 'ignore'
jest.mock('fs-extra')
-jest.mock('../../../../src/utils/gitLfsHelper')
-jest.mock('../../../../src/utils/childProcessUtils', () => {
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- const actualModule: any = jest.requireActual(
- '../../../../src/utils/childProcessUtils'
- )
- return {
- ...actualModule,
- getSpawnContent: jest.fn(),
- treatPathSep: jest.fn(),
- }
-})
+
jest.mock('../../../../src/utils/ignoreHelper')
const mockBuildIgnoreHelper = jest.mocked(buildIgnoreHelper)
-const mockedGetStreamContent = jest.mocked(getSpawnContent)
-const mockedTreatPathSep = jest.mocked(treatPathSep)
-const mockedStat = jest.mocked(stat)
-const mockedIsLFS = jest.mocked(isLFS)
-const mockedGetLFSObjectContentPath = jest.mocked(getLFSObjectContentPath)
+
+const mockGetStringContent = jest.fn()
+const mockGetFilesFrom = jest.fn()
+const mockGetFilesPath = jest.fn()
+const mockPathExists = jest.fn()
+jest.mock('../../../../src/adapter/GitAdapter', () => ({
+ getInstance: () => ({
+ getStringContent: mockGetStringContent,
+ getFilesFrom: mockGetFilesFrom,
+ getFilesPath: mockGetFilesPath,
+ pathExists: mockPathExists,
+ }),
+}))
let work: Work
beforeEach(() => {
@@ -60,19 +44,6 @@ beforeEach(() => {
work.config.to = 'recentsha'
})
-describe('gitPathSeparatorNormalizer', () => {
- it('replaces every instance of \\', async () => {
- // Arrange
- const windowsPath = 'path\\to\\a\\\\file'
-
- // Act
- const result = gitPathSeparatorNormalizer(windowsPath)
-
- // Assert
- expect(result).toEqual('path/to/a/file')
- })
-})
-
describe('readPathFromGit', () => {
describe.each([
['windows', 'force-app\\main\\default\\classes\\myClass.cls'],
@@ -80,59 +51,36 @@ describe('readPathFromGit', () => {
])('when path is %s format', (_, path) => {
beforeEach(() => {
// Arrange
- mockedGetStreamContent.mockResolvedValue(Buffer.from(''))
+ mockGetStringContent.mockImplementation(() =>
+ Promise.resolve(Buffer.from(''))
+ )
})
- it('should use "config.to" and "normalized path" to get git history', async () => {
+ it('returns the file content at `config.to` ref', async () => {
// Act
- await readPathFromGit(path, work.config)
+ const forRef = { path, oid: work.config.to }
+ await readPathFromGit(forRef, work.config)
// Assert
- const normalizedPath = path.replace(/\\+/g, '/')
- expect(getSpawnContent).toHaveBeenCalledWith(
- 'git',
- expect.arrayContaining([`${work.config.to}:${normalizedPath}`]),
- expect.anything()
- )
+ expect(mockGetStringContent).toHaveBeenCalledWith(forRef)
})
})
- describe.each([undefined, null])('when path returned "%s"', value => {
+ describe.each([undefined, null])('when path contains "%s"', value => {
beforeEach(() => {
// Arrange
- mockedGetStreamContent.mockResolvedValue(value as unknown as Buffer)
+ mockGetStringContent.mockImplementation(() => Promise.resolve(value))
})
- it('should use "config.to" and "normalized path" to get git history', async () => {
+ it('returns the file content at `config.to` ref', async () => {
// Act
- const content = await readPathFromGit('path/file', work.config)
-
- // Assert
- expect(content).toBe('')
- })
- })
- describe('when file is LSF', () => {
- const bufferContent =
- Buffer.from(`version https://git-lfs.github.com/spec/v1
- oid sha256:0a4ca7e5eca75024197fff96ef7e5de1b2ca35d6c058ce76e7e0d84bee1c8b14
- size 72`)
- beforeEach(async () => {
- // Arrange
- mockedGetStreamContent.mockResolvedValue(bufferContent)
- mockedIsLFS.mockReturnValueOnce(true)
- mockedGetLFSObjectContentPath.mockImplementationOnce(
- () => 'lfs/objects/oid'
+ const content = await readPathFromGit(
+ { path: 'path/file', oid: work.config.to },
+ work.config
)
- })
- it('should copy the file', async () => {
- // Act
- await readPathFromGit('path/lfs/file', work.config)
// Assert
- expect(getSpawnContent).toBeCalled()
- expect(getLFSObjectContentPath).toBeCalledTimes(1)
- expect(getLFSObjectContentPath).toHaveBeenCalledWith(bufferContent)
- expect(fsReadFile).toBeCalledWith('lfs/objects/oid')
+ expect(content).toBe(value)
})
})
})
@@ -154,7 +102,7 @@ describe('copyFile', () => {
await copyFiles(work.config, 'source/file')
// Assert
- expect(getSpawnContent).not.toBeCalled()
+ expect(mockGetStringContent).not.toBeCalled()
expect(outputFile).not.toBeCalled()
})
})
@@ -162,7 +110,6 @@ describe('copyFile', () => {
describe('when file is already written', () => {
it('should not copy file', async () => {
// Arrange
- mockedTreatPathSep.mockReturnValueOnce('file')
await writeFile('source/file', 'content', work.config)
jest.resetAllMocks()
@@ -170,25 +117,29 @@ describe('copyFile', () => {
await copyFiles(work.config, 'source/file')
// Assert
- expect(getSpawnContent).not.toBeCalled()
expect(outputFile).not.toBeCalled()
})
})
- describe('when content is not a git location', () => {
- it('should ignore this path', async () => {
+ describe('when source location is empty', () => {
+ it('should copy file', async () => {
// Arrange
- const sourcePath = 'source/warning'
- mockedGetStreamContent.mockRejectedValue(
- `fatal: path '${sourcePath}' does not exist in 'HEAD'`
+ const sourcePath = 'source/copyFile'
+ mockGetFilesFrom.mockImplementation(() =>
+ Promise.resolve([
+ {
+ path: sourcePath,
+ content: Buffer.from(''),
+ },
+ ])
)
// Act
await copyFiles(work.config, sourcePath)
// Assert
- expect(getSpawnContent).toBeCalled()
- expect(outputFile).not.toBeCalled()
+ expect(mockGetFilesFrom).toBeCalled()
+ expect(outputFile).toBeCalledWith(`output/${sourcePath}`, Buffer.from(''))
})
})
@@ -205,7 +156,7 @@ describe('copyFile', () => {
await copyFiles(work.config, 'source/ignored')
// Assert
- expect(getSpawnContent).not.toBeCalled()
+ expect(mockGetFilesFrom).not.toBeCalled()
expect(outputFile).not.toBeCalled()
})
})
@@ -214,385 +165,228 @@ describe('copyFile', () => {
describe('when source location is empty', () => {
it('should copy file', async () => {
// Arrange
-
- mockedTreatPathSep.mockReturnValueOnce('source/copyFile')
- mockedGetStreamContent.mockResolvedValue(Buffer.from(''))
+ mockGetFilesFrom.mockImplementation(() =>
+ Promise.resolve([
+ {
+ path: 'source/emptyFile',
+ content: Buffer.from(''),
+ },
+ ])
+ )
// Act
- await copyFiles(work.config, 'source/doNotCopy')
+ await copyFiles(work.config, 'source/emptyFile')
// Assert
- expect(getSpawnContent).toBeCalled()
+ expect(mockGetFilesFrom).toBeCalled()
expect(outputFile).toBeCalledWith(
- 'output/source/copyFile',
+ 'output/source/emptyFile',
Buffer.from('')
)
})
})
- describe('when content is a folder', () => {
- it('should copy the folder', async () => {
- // Arrange
- mockedTreatPathSep.mockReturnValueOnce('copyDir/copyFile')
- mockedGetStreamContent.mockResolvedValueOnce(
- Buffer.from('tree HEAD:folder\n\ncopyFile')
- )
- mockedGetStreamContent.mockResolvedValue(Buffer.from('content'))
+ describe('when source location is not empty', () => {
+ describe('when content is a folder', () => {
+ it('should copy the folder', async () => {
+ // Arrange
+ mockGetFilesFrom.mockImplementation(() =>
+ Promise.resolve([
+ {
+ path: 'copyDir/copyFile',
+ content: Buffer.from('content'),
+ },
+ ])
+ )
+
+ // Act
+ await copyFiles(work.config, 'source/copyDir')
+
+ // Assert
+ expect(mockGetFilesFrom).toBeCalledTimes(1)
+ expect(outputFile).toBeCalledTimes(1)
+ expect(outputFile).toHaveBeenCalledWith(
+ 'output/copyDir/copyFile',
+ Buffer.from('content')
+ )
+ })
+ })
- // Act
- await copyFiles(work.config, 'source/copyDir')
+ describe('when content is not a git location', () => {
+ it('should ignore this path', async () => {
+ // Arrange
+ const sourcePath = 'source/warning'
+ mockGetFilesFrom.mockImplementation(() => Promise.reject())
- // Assert
- expect(getSpawnContent).toBeCalledTimes(2)
- expect(outputFile).toBeCalledTimes(1)
- expect(outputFile).toHaveBeenCalledWith(
- 'output/copyDir/copyFile',
- Buffer.from('content')
- )
- expect(treatPathSep).toBeCalledTimes(1)
+ // Act
+ await copyFiles(work.config, sourcePath)
+
+ // Assert
+ expect(mockGetFilesFrom).toBeCalled()
+ expect(outputFile).not.toBeCalled()
+ })
+ })
+ describe('when content is a file', () => {
+ beforeEach(async () => {
+ // Arrange
+ mockGetFilesFrom.mockImplementation(() =>
+ Promise.resolve([
+ { path: 'source/copyFile', content: Buffer.from('content') },
+ ])
+ )
+ })
+ it('should copy the file', async () => {
+ // Act
+ await copyFiles(work.config, 'source/copyfile')
+
+ // Assert
+ expect(mockGetFilesFrom).toBeCalled()
+ expect(outputFile).toBeCalledTimes(1)
+ expect(outputFile).toHaveBeenCalledWith(
+ 'output/source/copyFile',
+ Buffer.from('content')
+ )
+ })
})
})
+ })
- describe('when content is a file', () => {
- beforeEach(async () => {
+ describe('readDir', () => {
+ describe('when path exist', () => {
+ const dir = 'dir/'
+ const file = 'test.js'
+ beforeEach(() => {
// Arrange
- mockedGetStreamContent.mockResolvedValue(Buffer.from('content'))
- mockedTreatPathSep.mockReturnValueOnce('source/copyFile')
- mockedIsLFS.mockReturnValue(false)
+ mockGetFilesPath.mockImplementation(() =>
+ Promise.resolve([`${dir}${file}`])
+ )
})
- it('should copy the file', async () => {
+ it('should return the file', async () => {
// Act
- await copyFiles(work.config, 'source/copyfile')
+ const dirContent = await readDir(dir, work.config)
// Assert
- expect(getSpawnContent).toBeCalled()
- expect(treatPathSep).toBeCalledTimes(1)
- expect(outputFile).toBeCalledTimes(1)
- expect(outputFile).toHaveBeenCalledWith(
- 'output/source/copyFile',
- Buffer.from('content')
- )
+ expect(dirContent).toEqual(expect.arrayContaining([`${dir}${file}`]))
+ expect(mockGetFilesPath).toHaveBeenCalled()
})
})
- })
-})
-
-describe('readDir', () => {
- describe('when getSpawnContent succeed', () => {
- const dir = 'dir/'
- const file = 'test.js'
- beforeEach(() => {
- // Arrange
- mockedGetStreamContent.mockResolvedValue(
- Buffer.from([`tree HEAD:${dir}`, '', file].join(EOL))
- )
- })
- it('should return the file', async () => {
- // Act
- const dirContent = await readDir(dir, work.config)
-
- // Assert
- expect(dirContent).toEqual(expect.arrayContaining([`${file}`]))
- expect(getSpawnContent).toHaveBeenCalled()
- })
- })
- describe('when getSpawnContent throw', () => {
- beforeEach(() => {
- // Arrange
- mockedGetStreamContent.mockRejectedValue(new Error('mock'))
- })
- it('should throw', async () => {
- // Act
- try {
- await readDir('path', work.config)
- } catch (err) {
- // Assert
- expect(err).toBeTruthy()
- expect(getSpawnContent).toHaveBeenCalled()
- }
+ describe('when path does not exist', () => {
+ beforeEach(() => {
+ // Arrange
+ mockGetFilesPath.mockImplementation(() =>
+ Promise.reject(new Error('test'))
+ )
+ })
+ it('should throw', async () => {
+ // Act
+ try {
+ await readDir('path', work.config)
+ } catch (err) {
+ // Assert
+ expect(err).toBeTruthy()
+ expect(mockGetFilesPath).toHaveBeenCalled()
+ }
+ })
})
})
-})
-describe('scan', () => {
- describe('when getSpawnContent throw', () => {
- beforeEach(() => {
- // Arrange
- mockedGetStreamContent.mockRejectedValue(new Error('mock'))
- })
- it('should not throw', async () => {
+ describe('pathExists', () => {
+ it('returns true when path is folder', async () => {
// Arrange
- const res = await scan('dir', work.config)
+ mockPathExists.mockImplementation(() => Promise.resolve(true))
- // Assert
- expect(res).toMatchObject({})
- })
- })
- describe('when getSpawnContent returns nothing', () => {
- beforeEach(() => {
- // Arrange
- mockedGetStreamContent.mockResolvedValue(Buffer.from(''))
- })
- it('should return nothing', async () => {
- // Arrange
- const g = scan('dir', work.config)
// Act
- const result = await g.next()
+ const result = await pathExists('path', work.config)
// Assert
- expect(result.value).toBeFalsy()
- })
- })
- describe('when getSpawnContent returns a file', () => {
- const dir = 'dir/'
- const file = 'test.js'
- beforeEach(() => {
- // Arrange
- mockedGetStreamContent.mockResolvedValue(
- Buffer.from([`tree HEAD:${dir}`, '', file].join(EOL))
- )
+ expect(result).toBe(true)
})
- it('should return a file', async () => {
+ it('returns true when path is file', async () => {
// Arrange
- const g = scan(dir, work.config)
+ mockPathExists.mockImplementation(() => Promise.resolve(true))
+
// Act
- const result = await g.next()
+ const result = await pathExists('path', work.config)
// Assert
- expect(result.value).toEqual(`${dir}${file}`)
+ expect(result).toBe(true)
})
- })
- describe('when getSpawnContent returns an empty directory', () => {
- const dir = 'dir/'
- const subDir = 'subDir/'
- it('should return nothing', async () => {
+ it('returns false when path does not exist', async () => {
// Arrange
- mockedGetStreamContent.mockResolvedValueOnce(
- Buffer.from([`tree HEAD:${dir}`, '', subDir].join(EOL))
- )
- mockedGetStreamContent.mockResolvedValue(
- Buffer.from([`tree HEAD:${dir}${subDir}`].join(EOL))
- )
- const g = scan('dir', work.config)
+ mockPathExists.mockImplementation(() => Promise.resolve(false))
// Act
- const result = await g.next()
+ const result = await pathExists('path', work.config)
// Assert
- expect(result.value).toBeFalsy()
+ expect(result).toBe(false)
})
- })
- describe('when getSpawnContent returns a directory with a file', () => {
- const dir = 'dir/'
- const subDir = 'subDir/'
- const subFile = 'test.js'
- beforeEach(() => {
+ it('returns false when sub service throws', async () => {
+ expect.assertions(1)
// Arrange
- mockedGetStreamContent.mockResolvedValueOnce(
- Buffer.from([`tree HEAD:${dir}`, '', subDir].join(EOL))
- )
- mockedGetStreamContent.mockResolvedValue(
- Buffer.from([`tree HEAD:${dir}${subDir}`, '', subFile].join(EOL))
- )
- })
- it('should return a file', async () => {
- // Arrange
- const g = scan('dir', work.config)
- // Act
- const result = await g.next()
+ mockPathExists.mockImplementation(() => Promise.reject(new Error('test')))
- // Assert
- expect(result.value).toBe(`${dir}${subDir}${subFile}`)
- })
- })
-})
-describe('scanExtension', () => {
- describe('when directory does not contains a file with the extension', () => {
- const dir = 'dir/'
- const file = 'test.js'
- beforeEach(() => {
- // Arrange
- mockedGetStreamContent.mockResolvedValue(
- Buffer.from([`tree HEAD:${dir}`, '', file].join(EOL))
- )
- })
- it('should return', async () => {
- // Arrange
// Act
- const result = await scanExtension(dir, 'txt', work.config)
+ const exist = await pathExists('path', work.config)
// Assert
- expect(result).toEqual([])
+ expect(exist).toBe(false)
})
})
- describe('when directory contains a file with the extension', () => {
- const dir = 'dir/'
- const file = 'test.js'
+ describe('writeFile', () => {
beforeEach(() => {
- // Arrange
- mockedGetStreamContent.mockResolvedValue(
- Buffer.from([`tree HEAD:${dir}`, '', file].join(EOL))
- )
- })
- it('should return a file', async () => {
- // Arrange
- // Act
- const result = await scanExtension(dir, 'js', work.config)
-
- // Assert
- expect(result).toEqual([`${dir}${file}`])
+ mockBuildIgnoreHelper.mockResolvedValue({
+ globalIgnore: {
+ ignores: () => false,
+ } as unknown as Ignore,
+ } as unknown as IgnoreHelper)
})
- })
-})
-
-describe('pathExists', () => {
- it('returns true when path is folder', async () => {
- // Arrange
- mockedGetStreamContent.mockResolvedValue(Buffer.from('tree path\n\nfolder'))
+ it.each(['folder/file', 'folder\\file'])(
+ 'write the content to the file system',
+ async path => {
+ // Arrange
+ const config: Config = work.config
+ config.output = 'root'
+ const content = 'content'
- // Act
- const result = await pathExists('path', work.config)
+ // Act
+ await writeFile(path, content, config)
- // Assert
- expect(result).toBe(true)
- })
- it('returns true when path is file', async () => {
- // Arrange
- mockedGetStreamContent.mockResolvedValue(
- Buffer.from('{"attribute":"content"}')
+ // Assert
+ expect(outputFile).toHaveBeenCalledWith('root/folder/file', content)
+ }
)
- // Act
- const result = await pathExists('path', work.config)
-
- // Assert
- expect(result).toBe(true)
- })
- it('returns false when path does not exist', async () => {
- // Arrange
- mockedGetStreamContent.mockResolvedValue(Buffer.from(''))
-
- // Act
- const result = await pathExists('path', work.config)
-
- // Assert
- expect(result).toBe(false)
- })
- it('do not throws when getSpawnContent throws', async () => {
- expect.assertions(1)
- // Arrange
- mockedGetStreamContent.mockRejectedValueOnce(new Error('spawn issue'))
-
- // Act
- const exist = await pathExists('path', work.config)
-
- // Assert
- expect(exist).toBe(false)
- })
-})
-
-describe('writeFile', () => {
- beforeEach(() => {
- mockedTreatPathSep.mockReturnValue('folder/file')
- mockBuildIgnoreHelper.mockResolvedValue({
- globalIgnore: {
- ignores: () => false,
- } as unknown as Ignore,
- } as unknown as IgnoreHelper)
- })
-
- it.each(['folder/file', 'folder\\file'])(
- 'write the content to the file system',
- async path => {
+ it('call only once for the same path', async () => {
// Arrange
const config: Config = work.config
config.output = 'root'
const content = 'content'
+ const path = 'other/path/file'
+ await writeFile(path, content, config)
// Act
await writeFile(path, content, config)
// Assert
- expect(outputFile).toHaveBeenCalledWith('root/folder/file', content)
- }
- )
-
- it('call only once for the same path', async () => {
- // Arrange
- const config: Config = work.config
- config.output = 'root'
- const content = 'content'
- const path = 'other/path/file'
- await writeFile(path, content, config)
-
- // Act
- await writeFile(path, content, config)
-
- // Assert
- expect(outputFile).toBeCalledTimes(1)
- })
-
- it('should not copy ignored path', async () => {
- // Arrange
- mockBuildIgnoreHelper.mockResolvedValue({
- globalIgnore: {
- ignores: () => true,
- } as unknown as Ignore,
- } as unknown as IgnoreHelper)
-
- // Act
- await writeFile('', '', {} as Config)
-
- // Assert
- expect(outputFile).not.toBeCalled()
- })
-})
-
-describe('isGit', () => {
- it('returns true when it is a git file', async () => {
- // Arrange
- mockedStat.mockImplementation((() =>
- Promise.resolve({
- isFile: () => true,
- isDirectory: () => false,
- } as unknown as Stats)) as unknown as typeof stat)
-
- // Act
- const exist = await isGit('test')
-
- // Assert
- expect(exist).toBe(true)
- })
-
- it('returns true when it is a git folder', async () => {
- // Arrange
- mockedStat.mockImplementation((() =>
- Promise.resolve({
- isFile: () => false,
- isDirectory: () => true,
- } as unknown as Stats)) as unknown as typeof stat)
-
- // Act
- const exist = await isGit('test')
-
- // Assert
- expect(exist).toBe(true)
- })
+ expect(outputFile).toBeCalledTimes(1)
+ })
- it('returns false when it is neither a git folder nor a git file', async () => {
- // Arrange
- mockedStat.mockImplementation((() =>
- Promise.resolve({
- isFile: () => false,
- isDirectory: () => false,
- } as unknown as Stats)) as unknown as typeof stat)
+ it('should not copy ignored path', async () => {
+ // Arrange
+ mockBuildIgnoreHelper.mockResolvedValue({
+ globalIgnore: {
+ ignores: () => true,
+ } as unknown as Ignore,
+ } as unknown as IgnoreHelper)
- // Act
- const exist = await isGit('test')
+ // Act
+ await writeFile('', '', {} as Config)
- // Assert
- expect(exist).toBe(false)
+ // Assert
+ expect(outputFile).not.toBeCalled()
+ })
})
})
diff --git a/__tests__/unit/lib/utils/fsUtils.test.ts b/__tests__/unit/lib/utils/fsUtils.test.ts
index f9b4620c..a139ced5 100644
--- a/__tests__/unit/lib/utils/fsUtils.test.ts
+++ b/__tests__/unit/lib/utils/fsUtils.test.ts
@@ -5,8 +5,11 @@ import {
fileExists,
isSubDir,
readFile,
+ sanitizePath,
+ treatPathSep,
} from '../../../../src/utils/fsUtils'
import { Stats, stat, readFile as fsReadFile } from 'fs-extra'
+import { sep } from 'path'
jest.mock('fs-extra')
@@ -186,3 +189,62 @@ describe('readFile', () => {
})
})
})
+
+describe('treatPathSep', () => {
+ it(`replace / by ${sep}`, () => {
+ // Arrange
+ const input = 'test///test//test/test'
+
+ // Act
+ const result = treatPathSep(input)
+
+ // Assert
+ expect(result).toBe(`test${sep}test${sep}test${sep}test`)
+ })
+
+ it(`replace \\ by ${sep}`, () => {
+ // Arrange
+ const input = 'test\\\\\\test\\\\test\\test'
+
+ // Act
+ const result = treatPathSep(input)
+
+ // Assert
+ expect(result).toBe(`test${sep}test${sep}test${sep}test`)
+ })
+})
+
+describe('sanitizePath', () => {
+ it(`returns path with '${sep}' separator`, () => {
+ // Arrange
+ const input = 'test\\test/test'
+
+ // Act
+ const result = sanitizePath(input)
+
+ // Assert
+ expect(result).toBe(`test${sep}test${sep}test`)
+ })
+
+ it(`normalize path`, () => {
+ // Arrange
+ const input = 'test/test\\../test'
+
+ // Act
+ const result = sanitizePath(input)
+
+ // Assert
+ expect(result).toBe(`test${sep}test`)
+ })
+
+ it('return empty string when data is empty string', () => {
+ // Arrange
+ const input = ''
+
+ // Act
+ const result = sanitizePath(input)
+
+ // Assert
+ expect(result).toBe('')
+ })
+})
diff --git a/__tests__/unit/lib/utils/fxpHelper.test.ts b/__tests__/unit/lib/utils/fxpHelper.test.ts
index 9c695e9c..c79ca73d 100644
--- a/__tests__/unit/lib/utils/fxpHelper.test.ts
+++ b/__tests__/unit/lib/utils/fxpHelper.test.ts
@@ -63,7 +63,7 @@ describe('fxpHelper', () => {
it('returns empty object', async () => {
// Act
const jsonResult = await parseXmlFileToJson(
- 'path/to/empty/file',
+ { path: 'path/to/empty/file', oid: config.to },
config
)
@@ -81,7 +81,7 @@ describe('fxpHelper', () => {
it('returns json content', async () => {
// Act
const jsonContent = await parseXmlFileToJson(
- 'path/to/empty/file',
+ { path: 'path/to/empty/file', oid: config.to },
config
)
@@ -97,7 +97,7 @@ describe('fxpHelper', () => {
it('returns empty object', async () => {
// Act
const jsonContent = await parseXmlFileToJson(
- 'path/to/empty/file',
+ { path: 'path/to/empty/file', oid: config.to },
config
)
diff --git a/__tests__/unit/lib/utils/packageHelper.test.ts b/__tests__/unit/lib/utils/packageHelper.test.ts
index f0b1c092..7cd5a14d 100644
--- a/__tests__/unit/lib/utils/packageHelper.test.ts
+++ b/__tests__/unit/lib/utils/packageHelper.test.ts
@@ -1,8 +1,6 @@
'use strict'
import { expect, describe, it } from '@jest/globals'
-import PackageBuilder from '../../../../src/utils/packageHelper'
-import {
- cleanUpPackageMember,
+import PackageBuilder, {
fillPackageWithParameter,
} from '../../../../src/utils/packageHelper'
import { Config } from '../../../../src/types/config'
@@ -159,17 +157,4 @@ describe('fillPackageWithParameter', () => {
})
})
})
-
- describe('cleanUpPackageMember', () => {
- it(`package member path delimiter with "/"`, () => {
- // Arrange
- const example = `Package\\Member`
-
- // Act
- const result = cleanUpPackageMember(example).split('/')
-
- // Assert
- expect(result.length).toBe(2)
- })
- })
})
diff --git a/__tests__/unit/lib/utils/repoGitDiff.test.ts b/__tests__/unit/lib/utils/repoGitDiff.test.ts
index 6f95ead6..826a7c47 100644
--- a/__tests__/unit/lib/utils/repoGitDiff.test.ts
+++ b/__tests__/unit/lib/utils/repoGitDiff.test.ts
@@ -1,34 +1,29 @@
'use strict'
import { expect, jest, describe, it } from '@jest/globals'
import { getGlobalMetadata } from '../../../__utils__/globalTestHelper'
-import { resetIgnoreInstance } from '../../../../src/utils/ignoreHelper'
import RepoGitDiff from '../../../../src/utils/repoGitDiff'
import {
ADDITION,
DELETION,
MODIFICATION,
} from '../../../../src/constant/gitConstants'
-import {
- getSpawnContent,
- getSpawnContentByLine,
-} from '../../../../src/utils/childProcessUtils'
import { Config } from '../../../../src/types/config'
import { MetadataRepository } from '../../../../src/metadata/MetadataRepository'
-jest.mock('../../../../src/utils/childProcessUtils', () => {
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- const actualModule: any = jest.requireActual(
- '../../../../src/utils/childProcessUtils'
- )
- return {
- ...actualModule,
- getSpawnContent: jest.fn(),
- getSpawnContentByLine: jest.fn(),
- }
-})
-
-const mockedGetSpawnContent = jest.mocked(getSpawnContent)
-const mockedGetSpawnContentByLine = jest.mocked(getSpawnContentByLine)
+const mockGetDiffLines = jest.fn()
+jest.mock('../../../../src/adapter/GitAdapter', () => ({
+ getInstance: jest.fn(() => ({
+ getDiffLines: mockGetDiffLines,
+ })),
+}))
+
+const mockKeep = jest.fn()
+jest.mock('../../../../src/utils/ignoreHelper', () => ({
+ buildIgnoreHelper: jest.fn(() => ({
+ keep: mockKeep,
+ })),
+}))
+mockKeep.mockReturnValue(true)
const FORCEIGNORE_MOCK_PATH = '__mocks__/.forceignore'
@@ -43,7 +38,6 @@ describe(`test if repoGitDiff`, () => {
})
beforeEach(() => {
- resetIgnoreInstance()
config = {
to: '',
from: '',
@@ -61,7 +55,7 @@ describe(`test if repoGitDiff`, () => {
})
it('can parse git correctly', async () => {
const output: string[] = []
- mockedGetSpawnContentByLine.mockResolvedValue([])
+ mockGetDiffLines.mockImplementation(() => Promise.resolve([]))
config.ignore = FORCEIGNORE_MOCK_PATH
config.ignoreWhitespace = true
const repoGitDiff = new RepoGitDiff(config, globalMetadata)
@@ -71,7 +65,7 @@ describe(`test if repoGitDiff`, () => {
it('can parse git permissively', async () => {
const output: string[] = []
- mockedGetSpawnContentByLine.mockResolvedValue([])
+ mockGetDiffLines.mockImplementation(() => Promise.resolve([]))
config.ignore = FORCEIGNORE_MOCK_PATH
const repoGitDiff = new RepoGitDiff(config, globalMetadata)
const work = await repoGitDiff.getLines()
@@ -82,11 +76,9 @@ describe(`test if repoGitDiff`, () => {
const output: string[] = [
'force-app/main/default/objects/Account/fields/awesome.field-meta.xml',
]
- mockedGetSpawnContentByLine.mockResolvedValueOnce([])
- mockedGetSpawnContentByLine.mockResolvedValueOnce(
- output.map(x => `1${TAB}1${TAB}${x}`)
+ mockGetDiffLines.mockImplementation(() =>
+ Promise.resolve(output.map(x => `${DELETION}${TAB}${x}`))
)
- mockedGetSpawnContentByLine.mockResolvedValueOnce([])
config.ignore = FORCEIGNORE_MOCK_PATH
const repoGitDiff = new RepoGitDiff(config, globalMetadata)
const work = await repoGitDiff.getLines()
@@ -97,11 +89,9 @@ describe(`test if repoGitDiff`, () => {
const output: string[] = [
'force-app/main/default/objects/Account/fields/awesome.field-meta.xml',
]
- mockedGetSpawnContentByLine.mockResolvedValueOnce([])
- mockedGetSpawnContentByLine.mockResolvedValueOnce(
- output.map(x => `1${TAB}1${TAB}${x}`)
+ mockGetDiffLines.mockImplementation(() =>
+ Promise.resolve(output.map(x => `${DELETION}${TAB}${x}`))
)
- mockedGetSpawnContentByLine.mockResolvedValueOnce([])
const repoGitDiff = new RepoGitDiff(config, globalMetadata)
const work = await repoGitDiff.getLines()
expect(work).toStrictEqual(output.map(x => `${DELETION}${TAB}${x}`))
@@ -111,11 +101,9 @@ describe(`test if repoGitDiff`, () => {
const output: string[] = [
'force-app/main/default/objects/Account/fields/awesome.field-meta.xml',
]
- mockedGetSpawnContentByLine.mockResolvedValueOnce(
- output.map(x => `1${TAB}1${TAB}${x}`)
+ mockGetDiffLines.mockImplementation(() =>
+ Promise.resolve(output.map(x => `${ADDITION}${TAB}${x}`))
)
- mockedGetSpawnContentByLine.mockResolvedValueOnce([])
- mockedGetSpawnContentByLine.mockResolvedValueOnce([])
const repoGitDiff = new RepoGitDiff(config, globalMetadata)
const work = await repoGitDiff.getLines()
expect(work).toStrictEqual(output.map(x => `${ADDITION}${TAB}${x}`))
@@ -125,11 +113,9 @@ describe(`test if repoGitDiff`, () => {
const output: string[] = [
'force-app/main/default/objects/Account/fields/awesome.field-meta.xml',
]
- mockedGetSpawnContentByLine.mockResolvedValueOnce(
- output.map(x => `1${TAB}1${TAB}${x}`)
+ mockGetDiffLines.mockImplementation(() =>
+ Promise.resolve(output.map(x => `${ADDITION}${TAB}${x}`))
)
- mockedGetSpawnContentByLine.mockResolvedValueOnce([])
- mockedGetSpawnContentByLine.mockResolvedValueOnce([])
const repoGitDiff = new RepoGitDiff(config, globalMetadata)
const work = await repoGitDiff.getLines()
expect(work).toStrictEqual(output.map(x => `${ADDITION}${TAB}${x}`))
@@ -139,10 +125,8 @@ describe(`test if repoGitDiff`, () => {
const output: string[] = [
'force-app/main/default/objects/Account/fields/awesome.field-meta.xml',
]
- mockedGetSpawnContentByLine.mockResolvedValueOnce([])
- mockedGetSpawnContentByLine.mockResolvedValueOnce([])
- mockedGetSpawnContentByLine.mockResolvedValueOnce(
- output.map(x => `1${TAB}1${TAB}${x}`)
+ mockGetDiffLines.mockImplementation(() =>
+ Promise.resolve(output.map(x => `M${TAB}${x}`))
)
const repoGitDiff = new RepoGitDiff(config, globalMetadata)
const work = await repoGitDiff.getLines()
@@ -153,10 +137,8 @@ describe(`test if repoGitDiff`, () => {
const output: string[] = [
'force-app/main/default/objects/Account/fields/awesome.field-meta.xml',
]
- mockedGetSpawnContentByLine.mockResolvedValueOnce([])
- mockedGetSpawnContentByLine.mockResolvedValueOnce([])
- mockedGetSpawnContentByLine.mockResolvedValueOnce(
- output.map(x => `1${TAB}1${TAB}${x}`)
+ mockGetDiffLines.mockImplementation(() =>
+ Promise.resolve(output.map(x => `M${TAB}${x}`))
)
const repoGitDiff = new RepoGitDiff(config, globalMetadata)
const work = await repoGitDiff.getLines()
@@ -164,12 +146,11 @@ describe(`test if repoGitDiff`, () => {
})
it('can filter ignored files', async () => {
- const output = 'force-app/main/default/pages/test.page-meta.xml'
- mockedGetSpawnContentByLine.mockResolvedValueOnce([
- `1${TAB}1${TAB}${output}`,
- ])
- mockedGetSpawnContentByLine.mockResolvedValueOnce([])
- mockedGetSpawnContentByLine.mockResolvedValueOnce([])
+ const output = ['force-app/main/default/pages/test.page-meta.xml']
+ mockKeep.mockReturnValueOnce(false)
+ mockGetDiffLines.mockImplementation(() =>
+ Promise.resolve(output.map(x => `${ADDITION}${TAB}${x}`))
+ )
config.ignore = FORCEIGNORE_MOCK_PATH
const repoGitDiff = new RepoGitDiff(config, globalMetadata)
const work = await repoGitDiff.getLines()
@@ -179,12 +160,11 @@ describe(`test if repoGitDiff`, () => {
})
it('can filter ignored destructive files', async () => {
- const output = 'force-app/main/default/pages/test.page-meta.xml'
- mockedGetSpawnContentByLine.mockResolvedValueOnce([])
- mockedGetSpawnContent.mockResolvedValueOnce(
- Buffer.from(`1${TAB}1${TAB}${output}`)
+ const output = ['force-app/main/default/pages/test.page-meta.xml']
+ mockKeep.mockReturnValueOnce(false)
+ mockGetDiffLines.mockImplementation(() =>
+ Promise.resolve(output.map(x => `${ADDITION}${TAB}${x}`))
)
- mockedGetSpawnContentByLine.mockResolvedValueOnce([])
config.ignoreDestructive = FORCEIGNORE_MOCK_PATH
const repoGitDiff = new RepoGitDiff(config, globalMetadata)
const work = await repoGitDiff.getLines()
@@ -194,13 +174,10 @@ describe(`test if repoGitDiff`, () => {
})
it('can filter ignored and ignored destructive files', async () => {
- const output = 'force-app/main/default/lwc/jsconfig.json'
- mockedGetSpawnContentByLine.mockResolvedValueOnce([])
- mockedGetSpawnContent.mockResolvedValueOnce(
- Buffer.from(`1${TAB}1${TAB}${output}`)
- )
- mockedGetSpawnContent.mockResolvedValueOnce(
- Buffer.from(`1${TAB}1${TAB}${output}`)
+ const output = ['force-app/main/default/lwc/jsconfig.json']
+ mockKeep.mockReturnValueOnce(false)
+ mockGetDiffLines.mockImplementation(() =>
+ Promise.resolve(output.map(x => `${ADDITION}${TAB}${x}`))
)
config.ignore = FORCEIGNORE_MOCK_PATH
config.ignoreDestructive = FORCEIGNORE_MOCK_PATH
@@ -212,12 +189,11 @@ describe(`test if repoGitDiff`, () => {
})
it('can filter deletion if only ignored is specified files', async () => {
- const output = 'force-app/main/default/pages/test.page-meta.xml'
- mockedGetSpawnContentByLine.mockResolvedValueOnce([])
- mockedGetSpawnContentByLine.mockResolvedValueOnce([
- `1${TAB}1${TAB}${output}`,
- ])
- mockedGetSpawnContentByLine.mockResolvedValueOnce([])
+ const output = ['force-app/main/default/pages/test.page-meta.xml']
+ mockKeep.mockReturnValueOnce(false)
+ mockGetDiffLines.mockImplementation(() =>
+ Promise.resolve(output.map(x => `${ADDITION}${TAB}${x}`))
+ )
config.ignore = FORCEIGNORE_MOCK_PATH
const repoGitDiff = new RepoGitDiff(config, globalMetadata)
const work = await repoGitDiff.getLines()
@@ -227,12 +203,10 @@ describe(`test if repoGitDiff`, () => {
})
it('cannot filter non deletion if only ignored destructive is specified files', async () => {
- const output = 'force-app/main/default/pages/test.page-meta.xml'
- mockedGetSpawnContentByLine.mockResolvedValueOnce([
- `1${TAB}1${TAB}${output}`,
- ])
- mockedGetSpawnContentByLine.mockResolvedValueOnce([])
- mockedGetSpawnContentByLine.mockResolvedValueOnce([])
+ const output = ['force-app/main/default/pages/test.page-meta.xml']
+ mockGetDiffLines.mockImplementation(() =>
+ Promise.resolve(output.map(x => `${ADDITION}${TAB}${x}`))
+ )
config.ignoreDestructive = FORCEIGNORE_MOCK_PATH
const repoGitDiff = new RepoGitDiff(config, globalMetadata)
@@ -242,12 +216,11 @@ describe(`test if repoGitDiff`, () => {
})
it('can filter sub folders', async () => {
- const output = 'force-app/main/default/pages/test.page-meta.xml'
- mockedGetSpawnContentByLine.mockResolvedValueOnce([
- `1${TAB}1${TAB}${output}`,
- ])
- mockedGetSpawnContentByLine.mockResolvedValueOnce([])
- mockedGetSpawnContentByLine.mockResolvedValueOnce([])
+ const output = ['force-app/main/default/pages/test.page-meta.xml']
+ mockKeep.mockReturnValueOnce(false)
+ mockGetDiffLines.mockImplementation(() =>
+ Promise.resolve(output.map(x => `${ADDITION}${TAB}${x}`))
+ )
config.ignore = FORCEIGNORE_MOCK_PATH
const repoGitDiff = new RepoGitDiff(config, globalMetadata)
const work = await repoGitDiff.getLines()
@@ -258,85 +231,52 @@ describe(`test if repoGitDiff`, () => {
it('can filter moved files', async () => {
const output: string[] = [
- 'force-app/main/default/classes/Account.cls',
- 'force-app/account/domain/classes/Account.cls',
+ `${DELETION}${TAB}force-app/main/default/classes/Account.cls`,
+ `${ADDITION}${TAB}force-app/account/domain/classes/Account.cls`,
]
-
- mockedGetSpawnContentByLine.mockResolvedValueOnce([
- `1${TAB}1${TAB}${output[1]}`,
- ])
- mockedGetSpawnContentByLine.mockResolvedValueOnce([
- `1${TAB}1${TAB}${output[0]}`,
- ])
- mockedGetSpawnContentByLine.mockResolvedValueOnce([])
+ mockGetDiffLines.mockImplementation(() => Promise.resolve(output))
const repoGitDiff = new RepoGitDiff(config, globalMetadata)
const work = await repoGitDiff.getLines()
- const expected: string[] = [`${ADDITION}${TAB}${output[1]}`]
+ const expected: string[] = [`${output[1]}`]
expect(work).toStrictEqual(expected)
})
it('can filter case changed files', async () => {
const output: string[] = [
- 'force-app/main/default/objects/Account/fields/TEST__c.field-meta.xml',
- 'force-app/main/default/objects/Account/fields/Test__c.field-meta.xml',
+ `${DELETION}${TAB}force-app/main/default/objects/Account/fields/TEST__c.field-meta.xml`,
+ `${ADDITION}${TAB}force-app/main/default/objects/Account/fields/Test__c.field-meta.xml`,
]
- mockedGetSpawnContentByLine.mockResolvedValueOnce([
- `1${TAB}1${TAB}${output[1]}`,
- ])
- mockedGetSpawnContentByLine.mockResolvedValueOnce([
- `1${TAB}1${TAB}${output[0]}`,
- ])
- mockedGetSpawnContentByLine.mockResolvedValueOnce([])
+ mockGetDiffLines.mockImplementation(() => Promise.resolve(output))
const repoGitDiff = new RepoGitDiff(config, globalMetadata)
const work = await repoGitDiff.getLines()
- const expected: string[] = [`${ADDITION}${TAB}${output[1]}`]
+ const expected: string[] = [`${output[1]}`]
expect(work).toStrictEqual(expected)
})
it('cannot filter renamed files', async () => {
const output: string[] = [
- 'force-app/main/default/classes/Account.cls',
- 'force-app/main/default/classes/RenamedAccount.cls',
+ `${DELETION}${TAB}force-app/main/default/classes/Account.cls`,
+ `${ADDITION}${TAB}force-app/main/default/classes/RenamedAccount.cls`,
]
- mockedGetSpawnContentByLine.mockResolvedValueOnce([
- `1${TAB}1${TAB}${output[1]}`,
- ])
- mockedGetSpawnContentByLine.mockResolvedValueOnce([
- `1${TAB}1${TAB}${output[0]}`,
- ])
- mockedGetSpawnContentByLine.mockResolvedValueOnce([])
+ mockGetDiffLines.mockImplementation(() => Promise.resolve(output))
const repoGitDiff = new RepoGitDiff(config, globalMetadata)
const work = await repoGitDiff.getLines()
- const expected: string[] = [
- `${ADDITION}${TAB}${output[1]}`,
- `${DELETION}${TAB}${output[0]}`,
- ]
- expect(work).toStrictEqual(expected)
+ expect(work).toStrictEqual(output)
})
it('cannot filter same name file with different metadata', async () => {
const output: string[] = [
- 'force-app/main/default/objects/Account/fields/CustomField__c.field-meta.xml',
- 'force-app/main/default/objects/Opportunity/fields/CustomField__c.field-meta.xml',
+ `${DELETION}${TAB}force-app/main/default/objects/Account/fields/CustomField__c.field-meta.xml`,
+ `${ADDITION}${TAB}force-app/main/default/objects/Opportunity/fields/CustomField__c.field-meta.xml`,
]
- mockedGetSpawnContentByLine.mockResolvedValueOnce([
- `1${TAB}1${TAB}${output[1]}`,
- ])
- mockedGetSpawnContentByLine.mockResolvedValueOnce([
- `1${TAB}1${TAB}${output[0]}`,
- ])
- mockedGetSpawnContentByLine.mockResolvedValueOnce([])
+ mockGetDiffLines.mockImplementation(() => Promise.resolve(output))
const repoGitDiff = new RepoGitDiff(config, globalMetadata)
const work = await repoGitDiff.getLines()
- const expected: string[] = [
- `${ADDITION}${TAB}${output[1]}`,
- `${DELETION}${TAB}${output[0]}`,
- ]
- expect(work).toStrictEqual(expected)
+ expect(work).toStrictEqual(output)
})
it('can reject in case of error', async () => {
- mockedGetSpawnContent.mockRejectedValue(new Error('test'))
+ mockGetDiffLines.mockImplementation(() => Promise.reject(new Error('test')))
try {
const repoGitDiff = new RepoGitDiff(
config,
@@ -362,7 +302,7 @@ describe(`test if repoGitDiff`, () => {
const result = sut['_extractComparisonName'](line)
// Assert
- expect(result).toBe('Test.cls')
+ expect(result).toBe('test.cls')
})
})
@@ -379,7 +319,7 @@ describe(`test if repoGitDiff`, () => {
const result = sut['_extractComparisonName'](line)
// Assert
- expect(result).toBe(elPath.replace(/\//g, ''))
+ expect(result).toBe(elPath.replace(/\//g, '').toLocaleLowerCase())
})
})
})
diff --git a/__tests__/unit/lib/utils/repoSetup.test.ts b/__tests__/unit/lib/utils/repoSetup.test.ts
deleted file mode 100644
index d40cc7e8..00000000
--- a/__tests__/unit/lib/utils/repoSetup.test.ts
+++ /dev/null
@@ -1,106 +0,0 @@
-'use strict'
-import { expect, jest, describe, it } from '@jest/globals'
-import RepoSetup from '../../../../src/utils/repoSetup'
-import {
- getSpawnContent,
- getSpawnContentByLine,
-} from '../../../../src/utils/childProcessUtils'
-import { Config } from '../../../../src/types/config'
-jest.mock('../../../../src/utils/childProcessUtils')
-
-const mockedGetSpawnContent = jest.mocked(getSpawnContent)
-const mockedGetSpawnContentByLine = jest.mocked(getSpawnContentByLine)
-
-describe(`test if repoSetup`, () => {
- const config: Config = {
- to: '',
- from: '',
- output: '',
- source: '',
- ignore: '',
- ignoreDestructive: '',
- apiVersion: 0,
- repo: '',
- ignoreWhitespace: false,
- generateDelta: false,
- include: '',
- includeDestructive: '',
- }
- describe('repoConfiguration', () => {
- it('can set core.quotepath to off', async () => {
- config.repo = './'
- config.from = 'HEAD~1'
- mockedGetSpawnContent.mockResolvedValue(Buffer.from(''))
- const repoSetup = new RepoSetup(config)
- await repoSetup.repoConfiguration()
- expect(mockedGetSpawnContent).toBeCalledTimes(1)
- })
- })
-
- describe('getCommitRefType', () => {
- it('returns "commit" when commitRef is a commit', async () => {
- const shaRef = 'HEAD'
- config.repo = './'
- config.to = shaRef
- mockedGetSpawnContent.mockResolvedValue(Buffer.from('commit'))
- const repoSetup = new RepoSetup(config)
- const commitRef = await repoSetup.getCommitRefType(shaRef)
-
- expect(commitRef).toBe('commit')
- })
-
- it('returns "tag" when commitRef is a tag', async () => {
- const shaRef = 'tag'
- config.repo = './'
- config.to = shaRef
- mockedGetSpawnContent.mockResolvedValue(Buffer.from('tag'))
- const repoSetup = new RepoSetup(config)
- const commitRef = await repoSetup.getCommitRefType(shaRef)
-
- expect(commitRef).toBe('tag')
- })
-
- it('return empty string when commitRef is a not a git sha', async () => {
- const shaRef = 'wrong sha'
- config.repo = './'
- config.to = shaRef
- mockedGetSpawnContent.mockResolvedValue(Buffer.from(''))
- const repoSetup = new RepoSetup(config)
- const commitRef = await repoSetup.getCommitRefType(shaRef)
-
- expect(commitRef).toBe('')
- })
- })
-
- describe('getFirstCommitRef', () => {
- it('returns the first commit SHA', async () => {
- // Arrange
- config.repo = './'
- mockedGetSpawnContent.mockResolvedValue(Buffer.from('firstsha'))
-
- // Act
- const repoSetup = new RepoSetup(config)
- const commitRef = await repoSetup.getFirstCommitRef()
-
- // Assert
-
- expect(commitRef).toBe('firstsha')
- })
- })
-
- describe('getAllFilesAsLineStream', () => {
- it('returns all the file at sha', async () => {
- // Arrange
- const expected = ['file/path/name.ext', 'other/file/path/name.ext']
- config.repo = './'
- mockedGetSpawnContentByLine.mockResolvedValue(expected)
-
- // Act
- const repoSetup = new RepoSetup(config)
- const lines = await repoSetup.getAllFilesAsLineStream()
-
- // Assert
- expect(lines).toStrictEqual(expected)
- })
- })
-})
diff --git a/package.json b/package.json
index b48add22..f2d9c2a0 100644
--- a/package.json
+++ b/package.json
@@ -26,10 +26,13 @@
"author": "Sebastien Colladon ",
"dependencies": {
"@salesforce/command": "^5.3.9",
- "fast-xml-parser": "^4.3.2",
+ "async": "^3.2.5",
+ "fast-xml-parser": "^4.3.3",
"fs-extra": "^11.2.0",
"ignore": "^5.3.0",
+ "isomorphic-git": "^1.25.3",
"lodash": "^4.17.21",
+ "simple-git": "^3.22.0",
"xmlbuilder2": "^3.1.1"
},
"license": "MIT",
@@ -68,30 +71,31 @@
"@commitlint/config-conventional": "^18.4.4",
"@jest/globals": "^29.7.0",
"@oclif/dev-cli": "^1.26.10",
- "@salesforce/cli-plugins-testkit": "^5.1.3",
+ "@salesforce/cli-plugins-testkit": "^5.1.7",
"@salesforce/dev-config": "^4.1.0",
"@salesforce/ts-sinon": "^1.4.19",
"@stryker-mutator/core": "^8.0.0",
"@stryker-mutator/jest-runner": "^8.0.0",
- "@swc/core": "^1.3.102",
+ "@swc/core": "^1.3.105",
+ "@types/async": "^3.2.24",
"@types/jest": "^29.5.11",
"@types/mocha": "^10.0.6",
- "@types/node": "^20.10.6",
- "@typescript-eslint/eslint-plugin": "^6.17.0",
- "@typescript-eslint/parser": "^6.17.0",
+ "@types/node": "^20.11.5",
+ "@typescript-eslint/eslint-plugin": "^6.19.0",
+ "@typescript-eslint/parser": "^6.19.0",
"benchmark": "^2.1.4",
"chai": "^4.3.10",
"depcheck": "^1.4.7",
"eslint": "^8.56.0",
"eslint-config-prettier": "^9.1.0",
"eslint-plugin-import": "^2.29.1",
- "eslint-plugin-prettier": "^5.1.2",
+ "eslint-plugin-prettier": "^5.1.3",
"husky": "^8.0.3",
"jest": "^29.7.0",
"lint-staged": "^15.2.0",
"mocha": "^10.2.0",
"nyc": "^15.1.0",
- "prettier": "^3.1.1",
+ "prettier": "^3.2.4",
"shx": "^0.3.4",
"sinon": "^17.0.1",
"ts-jest": "^29.1.1",
diff --git a/src/adapter/GitAdapter.ts b/src/adapter/GitAdapter.ts
new file mode 100644
index 00000000..e284d577
--- /dev/null
+++ b/src/adapter/GitAdapter.ts
@@ -0,0 +1,310 @@
+import git, { TREE, WalkerEntry, WalkerIterateCallback } from 'isomorphic-git'
+import { simpleGit, SimpleGit } from 'simple-git'
+import { readFile } from 'fs-extra'
+import fs from 'fs'
+import { Config } from '../types/config'
+import {
+ UTF8_ENCODING,
+ GIT_FOLDER,
+ ADDITION,
+ DELETION,
+ MODIFICATION,
+} from '../constant/gitConstants'
+import { SOURCE_DEFAULT_VALUE } from '../utils/cliConstants'
+import { dirExists, fileExists, treatPathSep } from '../utils/fsUtils'
+import { DOT, PATH_SEP } from '../constant/fsConstants'
+import { join } from 'path'
+import { getLFSObjectContentPath, isLFS } from '../utils/gitLfsHelper'
+import { FileGitRef } from '../types/git'
+
+const firstCommitParams = ['rev-list', '--max-parents=0', 'HEAD']
+const BLOB_TYPE = 'blob'
+const TREE_TYPE = 'tree'
+
+const stripWhiteChar = (content: string) => content?.replace(/\s+/g, '')
+
+export const iterate = async (
+ walk: WalkerIterateCallback,
+ children: IterableIterator>
+) => {
+ const result = []
+ for (const child of children) {
+ const walkedChildResult = await walk(child)
+ result.push(walkedChildResult)
+ }
+ return result
+}
+
+type GitBaseConfig = {
+ fs: typeof fs
+ dir: string
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ cache: any
+ gitdir?: string
+}
+
+export default class GitAdapter {
+ private static instances: Map = new Map()
+ private static sharedCache = {}
+
+ public static getInstance(config: Config): GitAdapter {
+ if (!GitAdapter.instances.has(config)) {
+ const instance = new GitAdapter(config)
+ GitAdapter.instances.set(config, instance)
+ }
+
+ return GitAdapter.instances.get(config)!
+ }
+
+ private async getBufferFromBlob(blob: Uint8Array): Promise {
+ let bufferData: Buffer = Buffer.from(blob)
+ if (isLFS(bufferData)) {
+ const lsfPath = getLFSObjectContentPath(bufferData)
+ bufferData = await readFile(join(this.config.repo, lsfPath))
+ }
+
+ return bufferData
+ }
+
+ protected readonly isoGit = git
+ protected readonly simpleGit: SimpleGit
+ protected readonly gitConfig: GitBaseConfig
+
+ private constructor(
+ // eslint-disable-next-line no-unused-vars
+ protected readonly config: Config
+ ) {
+ this.simpleGit = simpleGit(config.repo)
+ this.gitConfig = {
+ fs: fs,
+ dir: config.repo,
+ cache: GitAdapter.sharedCache,
+ }
+ }
+
+ public async configureRepository() {
+ const quotepathOff = {
+ path: 'core.quotepath',
+ value: 'off',
+ }
+ await this.isoGit.setConfig({
+ ...this.gitConfig,
+ ...quotepathOff,
+ })
+ }
+
+ public async setGitDir(): Promise {
+ if (this.gitConfig.gitdir) {
+ return
+ }
+ if (await dirExists(join(this.config.repo, GIT_FOLDER))) {
+ this.gitConfig.gitdir = join(this.config.repo, GIT_FOLDER)
+ } else if (await fileExists(join(this.config.repo, GIT_FOLDER))) {
+ const gitFileContent = await readFile(join(this.config.repo, GIT_FOLDER))
+ this.gitConfig.gitdir = gitFileContent.toString().trim().substring(8)
+ } else {
+ throw new Error('Not a git repository')
+ }
+ }
+
+ public async parseRev(ref: string) {
+ const parsedRev = await this.simpleGit.revparse([ref])
+ return parsedRev
+ }
+
+ public async pathExists(path: string) {
+ try {
+ const { type } = await this.isoGit.readObject({
+ ...this.gitConfig,
+ oid: this.config.to,
+ filepath: treatPathSep(path),
+ })
+ return [TREE_TYPE, BLOB_TYPE].includes(type)
+ } catch {
+ return false
+ }
+ }
+
+ public async getFirstCommitRef() {
+ const sha = await this.simpleGit.raw(firstCommitParams)
+ return sha
+ }
+
+ public async getStringContent(forRef: FileGitRef): Promise {
+ try {
+ const { blob } = await this.isoGit.readBlob({
+ ...this.gitConfig,
+ oid: forRef.oid,
+ filepath: treatPathSep(forRef.path),
+ })
+ const bufferData = await this.getBufferFromBlob(blob)
+ return bufferData?.toString(UTF8_ENCODING) ?? ''
+ } catch (error) {
+ const err = error as Error
+ if (err.name === 'NotFoundError') {
+ return ''
+ } else {
+ throw error
+ }
+ }
+ }
+
+ public async getFilesPath(path: string) {
+ const walker = filePathWalker(path)
+ return await this.isoGit.walk({
+ ...this.gitConfig,
+ dir: treatPathSep(path),
+ trees: [TREE({ ref: this.config.to })],
+ map: walker,
+ iterate,
+ })
+ }
+
+ public async getFilesFrom(path: string) {
+ const treatedPath = treatPathSep(path)
+ const object = await this.isoGit.readObject({
+ ...this.gitConfig,
+ oid: this.config.to,
+ filepath: treatedPath,
+ })
+ // Return object exposing async getContent
+ // Iterate over and output file using the getContent API when needed
+ const blobFiles: { path: string; content: Uint8Array }[] = []
+ if (object.type === TREE_TYPE) {
+ const filesContent = await this.isoGit.walk({
+ ...this.gitConfig,
+ dir: treatedPath,
+ trees: [TREE({ ref: this.config.to })],
+ map: contentWalker(treatedPath),
+ iterate,
+ })
+ blobFiles.push(...filesContent)
+ } else if (object.type === BLOB_TYPE) {
+ blobFiles.push({
+ path,
+ content: object.object as Uint8Array,
+ })
+ } else {
+ throw new Error(`Path ${path} does not exist in ${this.config.to}`)
+ }
+ return await this.getContentFromFiles(blobFiles)
+ }
+
+ protected async getContentFromFiles(
+ blobFiles: { path: string; content: Uint8Array }[]
+ ) {
+ const bufferFiles: { path: string; content: Buffer }[] = []
+ for (const file of blobFiles) {
+ const content = await this.getBufferFromBlob(file.content)
+ bufferFiles.push({
+ path: treatPathSep(file.path),
+ content,
+ })
+ }
+ return bufferFiles
+ }
+
+ public async getDiffLines() {
+ const walker = diffLineWalker(this.config)
+ return this.isoGit.walk({
+ ...this.gitConfig,
+ dir: join(this.config.repo, this.config.source),
+ trees: [TREE({ ref: this.config.from }), TREE({ ref: this.config.to })],
+ map: walker,
+ iterate,
+ })
+ }
+}
+
+export const filePathWalker = (path: string) => {
+ const shouldSkip = evaluateShouldSkip(path)
+ return async (filepath: string, trees: (WalkerEntry | null)[]) => {
+ if (await shouldSkip(filepath, trees)) {
+ return
+ }
+ return treatPathSep(filepath)
+ }
+}
+
+export const contentWalker = (path: string) => {
+ const shouldSkip = evaluateShouldSkip(path)
+ return async (filepath: string, trees: (WalkerEntry | null)[]) => {
+ if (await shouldSkip(filepath, trees)) {
+ return
+ }
+
+ const [tree] = trees
+ const blob: Uint8Array = (await tree!.content()) as Uint8Array
+ return {
+ path: treatPathSep(filepath),
+ content: blob,
+ }
+ }
+}
+
+export const diffLineWalker = (config: Config) => {
+ const shouldSkip = evaluateShouldSkip(config.source)
+
+ return async (filepath: string, trees: (WalkerEntry | null)[]) => {
+ if (await shouldSkip(filepath, trees)) {
+ return
+ }
+
+ const [fromOID, toOID] = await Promise.all(trees.map(tree => tree?.oid()))
+ if (fromOID === toOID) {
+ return
+ }
+ let type
+ if (fromOID === undefined) {
+ type = ADDITION
+ } else if (toOID === undefined) {
+ type = DELETION
+ } else {
+ if (
+ config.ignoreWhitespace &&
+ (await isContentsEqualIgnoringWhiteChars(trees))
+ ) {
+ return
+ }
+ type = MODIFICATION
+ }
+
+ const result = `${type}\t${treatPathSep(filepath)}`
+ return result
+ }
+}
+
+const isContentsEqualIgnoringWhiteChars = async (
+ trees: (WalkerEntry | null)[]
+) => {
+ const [fromContent, toContent] = await Promise.all(
+ trees.map(async tree => {
+ const content = (await tree!.content()) as Uint8Array
+ return stripWhiteChar(Buffer.from(content).toString())
+ })
+ )
+ return fromContent === toContent
+}
+
+const pathDoesNotStartsWith = (root: string) => {
+ const gitFormattedRoot = treatPathSep(root) + PATH_SEP
+
+ return (path: string) =>
+ gitFormattedRoot !== SOURCE_DEFAULT_VALUE &&
+ !path.startsWith(gitFormattedRoot)
+}
+
+const evaluateShouldSkip = (base: string) => {
+ const checkPath = pathDoesNotStartsWith(base)
+ return async (path: string, trees: (WalkerEntry | null)[]) => {
+ if (path === DOT || checkPath(path)) {
+ return true
+ }
+
+ const types = await Promise.all(
+ trees.filter(Boolean).map(tree => tree!.type())
+ )
+
+ return types.some(type => type !== BLOB_TYPE)
+ }
+}
diff --git a/src/commands/sgd/source/delta.ts b/src/commands/sgd/source/delta.ts
index 42a6ce7d..f28bbd31 100644
--- a/src/commands/sgd/source/delta.ts
+++ b/src/commands/sgd/source/delta.ts
@@ -6,7 +6,7 @@ import {
REPO_DEFAULT_VALUE,
SOURCE_DEFAULT_VALUE,
OUTPUT_DEFAULT_VALUE,
-} from '../../../utils/cliHelper'
+} from '../../../utils/cliConstants'
import { Config } from '../../../types/config'
import { Output } from '../../../types/output'
diff --git a/src/constant/fsConstants.ts b/src/constant/fsConstants.ts
index 4d67a0b3..250b201d 100644
--- a/src/constant/fsConstants.ts
+++ b/src/constant/fsConstants.ts
@@ -1,3 +1,8 @@
'use strict'
+
+import { posix } from 'path'
+
export const DOT = '.'
export const UTF8_ENCODING = 'utf8'
+export const PATH_SEPARATOR_REGEX = /[/\\]+/
+export const PATH_SEP = posix.sep
diff --git a/src/constant/gitConstants.ts b/src/constant/gitConstants.ts
index cb070f3a..051488eb 100644
--- a/src/constant/gitConstants.ts
+++ b/src/constant/gitConstants.ts
@@ -2,16 +2,12 @@
export const ADDITION = 'A'
export const DELETION = 'D'
export const MODIFICATION = 'M'
-export const COMMIT_REF_TYPE = 'commit'
-export const TAG_REF_TYPE = 'tag'
-export const POINTER_REF_TYPES = [COMMIT_REF_TYPE, TAG_REF_TYPE]
export const GIT_DIFF_TYPE_REGEX = /^.\s+/u
export const GIT_FOLDER = '.git'
-export const GIT_PATH_SEP = '/'
export const IGNORE_WHITESPACE_PARAMS = [
'--ignore-all-space',
'--ignore-blank-lines',
'--ignore-cr-at-eol',
'--word-diff-regex=|[^[:space:]]',
]
-export const GIT_COMMAND = 'git'
+export const UTF8_ENCODING = 'utf8'
diff --git a/src/constant/metadataConstants.ts b/src/constant/metadataConstants.ts
index cd798298..226a7b1f 100644
--- a/src/constant/metadataConstants.ts
+++ b/src/constant/metadataConstants.ts
@@ -1,5 +1,6 @@
'use strict'
+export const CUSTOM_APPLICATION_TYPE = 'applications'
export const FIELD_DIRECTORY_NAME = 'fields'
export const FLOW_XML_NAME = 'Flow'
export const INFOLDER_SUFFIX = `Folder`
@@ -31,4 +32,3 @@ export const SUB_OBJECT_TYPES = [
export const TERRITORY_MODEL_TYPE = 'territory2Models'
export const TRANSLATION_EXTENSION = 'translation'
export const TRANSLATION_TYPE = 'Translations'
-export const XML_HEADER_TAG_END = '?>'
diff --git a/src/metadata/MetadataRepositoryImpl.ts b/src/metadata/MetadataRepositoryImpl.ts
index 6a374d8d..3fc04d18 100644
--- a/src/metadata/MetadataRepositoryImpl.ts
+++ b/src/metadata/MetadataRepositoryImpl.ts
@@ -1,8 +1,9 @@
'use strict'
-import { parse, sep } from 'path'
+import { parse } from 'path'
import { Metadata } from '../types/metadata'
import {
+ CUSTOM_APPLICATION_TYPE,
METAFILE_SUFFIX,
OBJECT_TRANSLATION_TYPE,
OBJECT_TYPE,
@@ -11,7 +12,7 @@ import {
TERRITORY_MODEL_TYPE,
} from '../constant/metadataConstants'
import { MetadataRepository } from './MetadataRepository'
-import { DOT } from '../constant/fsConstants'
+import { DOT, PATH_SEP } from '../constant/fsConstants'
export class MetadataRepositoryImpl implements MetadataRepository {
protected readonly metadataPerExt: Map
@@ -36,7 +37,7 @@ export class MetadataRepositoryImpl implements MetadataRepository {
}
public get(path: string): Metadata | undefined {
- const parts = path.split(sep)
+ const parts = path.split(PATH_SEP)
const metadata = this.searchByExtension(parts)
return metadata ?? this.searchByDirectory(parts)
}
@@ -81,11 +82,11 @@ export class MetadataRepositoryImpl implements MetadataRepository {
MetadataRepositoryImpl.COMPOSED_TYPES.includes(type.directoryName)
) {
const parentType = path
- .split(sep)
+ .split(PATH_SEP)
.find(part => this.metadataPerDir.get(part))!
fullyQualifiedName = path
.slice(path.indexOf(parentType))
- .replaceAll(sep, '')
+ .replaceAll(PATH_SEP, '')
}
return fullyQualifiedName
}
@@ -95,7 +96,10 @@ export class MetadataRepositoryImpl implements MetadataRepository {
}
private static TYPES_WITH_SUB_TYPES = [OBJECT_TYPE, TERRITORY_MODEL_TYPE, '']
- private static EXTENSION_MATCHING_EXCEPTION = [RESTRICTION_RULE_TYPE]
+ private static EXTENSION_MATCHING_EXCEPTION = [
+ CUSTOM_APPLICATION_TYPE,
+ RESTRICTION_RULE_TYPE,
+ ]
private static COMPOSED_TYPES = [
OBJECT_TYPE,
diff --git a/src/post-processor/flowTranslationProcessor.ts b/src/post-processor/flowTranslationProcessor.ts
index ddc46a54..37a37722 100644
--- a/src/post-processor/flowTranslationProcessor.ts
+++ b/src/post-processor/flowTranslationProcessor.ts
@@ -8,8 +8,8 @@ import {
TRANSLATION_EXTENSION,
TRANSLATION_TYPE,
} from '../constant/metadataConstants'
-import { writeFile, scanExtension } from '../utils/fsHelper'
-import { isSubDir, readFile } from '../utils/fsUtils'
+import { writeFile, readDir } from '../utils/fsHelper'
+import { isSubDir, readFile, treatPathSep } from '../utils/fsUtils'
import { pathExists } from 'fs-extra'
import { parse, join } from 'path'
import { buildIgnoreHelper } from '../utils/ignoreHelper'
@@ -20,11 +20,10 @@ import {
convertJsonToXml,
} from '../utils/fxpHelper'
import { fillPackageWithParameter } from '../utils/packageHelper'
-import { treatPathSep } from '../utils/childProcessUtils'
import { Work } from '../types/work'
import { MetadataRepository } from '../metadata/MetadataRepository'
-const EXTENSION = `${TRANSLATION_EXTENSION}${METAFILE_SUFFIX}`
+const EXTENSION = `.${TRANSLATION_EXTENSION}${METAFILE_SUFFIX}`
const getTranslationName = (translationPath: string) =>
parse(translationPath.replace(META_REGEX, '')).name
@@ -38,11 +37,11 @@ const getDefaultTranslation = () => ({
})
export default class FlowTranslationProcessor extends BaseProcessor {
- protected readonly translationPaths: Map
+ protected readonly translations: Map
constructor(work: Work, metadata: MetadataRepository) {
super(work, metadata)
- this.translationPaths = new Map()
+ this.translations = new Map()
}
public override async process() {
@@ -53,17 +52,15 @@ export default class FlowTranslationProcessor extends BaseProcessor {
}
async _buildFlowDefinitionsMap() {
- this.translationPaths.clear()
-
- const translationsIterator = await scanExtension(
- this.config.source,
- EXTENSION,
- this.work.config
- )
+ this.translations.clear()
+ const allFiles = await readDir(this.config.source, this.work.config)
const ignoreHelper = await buildIgnoreHelper(this.config)
+ const translationPaths = allFiles.filter((file: string) =>
+ file.endsWith(EXTENSION)
+ )
- for (const translationPath of translationsIterator) {
+ for (const translationPath of translationPaths) {
if (
!ignoreHelper.globalIgnore.ignores(translationPath) &&
!isSubDir(this.config.output, translationPath)
@@ -74,7 +71,7 @@ export default class FlowTranslationProcessor extends BaseProcessor {
}
protected async _handleFlowTranslation() {
- for (const translationPath of this.translationPaths.keys()) {
+ for (const translationPath of this.translations.keys()) {
fillPackageWithParameter({
store: this.work.diffs.package,
type: TRANSLATION_TYPE,
@@ -85,7 +82,7 @@ export default class FlowTranslationProcessor extends BaseProcessor {
await this._getTranslationAsJSON(translationPath)
this._scrapTranslationFile(
jsonTranslation,
- this.translationPaths.get(translationPath)
+ this.translations.get(translationPath)
)
const scrappedTranslation = convertJsonToXml(jsonTranslation)
await writeFile(translationPath, scrappedTranslation, this.config)
@@ -114,7 +111,7 @@ export default class FlowTranslationProcessor extends BaseProcessor {
protected async _parseTranslationFile(translationPath: string) {
const translationJSON = await parseXmlFileToJson(
- translationPath,
+ { path: translationPath, oid: this.config.to },
this.config
)
const flowDefinitions = asArray(
@@ -137,10 +134,10 @@ export default class FlowTranslationProcessor extends BaseProcessor {
}) {
const packagedElements = this.work.diffs.package.get(FLOW_XML_NAME)
if (packagedElements?.has(flowDefinition?.fullName)) {
- if (!this.translationPaths.has(translationPath)) {
- this.translationPaths.set(translationPath, [])
+ if (!this.translations.has(translationPath)) {
+ this.translations.set(translationPath, [])
}
- this.translationPaths.get(translationPath).push(flowDefinition)
+ this.translations.get(translationPath).push(flowDefinition)
}
}
diff --git a/src/post-processor/includeProcessor.ts b/src/post-processor/includeProcessor.ts
index 4f70a4e8..97ff7128 100644
--- a/src/post-processor/includeProcessor.ts
+++ b/src/post-processor/includeProcessor.ts
@@ -1,21 +1,21 @@
'use strict'
import BaseProcessor from './baseProcessor'
import { buildIncludeHelper, IgnoreHelper } from '../utils/ignoreHelper'
-import RepoSetup from '../utils/repoSetup'
import DiffLineInterpreter from '../service/diffLineInterpreter'
-import { treatPathSep } from '../utils/childProcessUtils'
+import { treatPathSep } from '../utils/fsUtils'
import { ADDITION, DELETION } from '../constant/gitConstants'
import { Work } from '../types/work'
+import GitAdapter from '../adapter/GitAdapter'
import { MetadataRepository } from '../metadata/MetadataRepository'
const TAB = '\t'
export default class IncludeProcessor extends BaseProcessor {
- protected readonly gitHelper: RepoSetup
+ protected readonly gitAdapter: GitAdapter
protected from: string
protected includeHelper!: IgnoreHelper
constructor(work: Work, metadata: MetadataRepository) {
super(work, metadata)
- this.gitHelper = new RepoSetup(this.config)
+ this.gitAdapter = GitAdapter.getInstance(this.config)
this.from = this.config.from
}
@@ -32,7 +32,7 @@ export default class IncludeProcessor extends BaseProcessor {
}
protected async _prepare() {
- const firstSha = await this.gitHelper.getFirstCommitRef()
+ const firstSha = await this.gitAdapter.getFirstCommitRef()
this.config.from = firstSha
this.includeHelper = await buildIncludeHelper(this.config)
@@ -46,7 +46,9 @@ export default class IncludeProcessor extends BaseProcessor {
[ADDITION]: [],
[DELETION]: [],
}
- const lines: string[] = await this.gitHelper.getAllFilesAsLineStream()
+ const lines: string[] = await this.gitAdapter.getFilesPath(
+ this.config.source
+ )
for (const line of lines) {
Object.keys(includeHolder).forEach(changeType => {
const changedLine = `${changeType}${TAB}${treatPathSep(line)}`
diff --git a/src/post-processor/packageGenerator.ts b/src/post-processor/packageGenerator.ts
index edef776a..fa815516 100644
--- a/src/post-processor/packageGenerator.ts
+++ b/src/post-processor/packageGenerator.ts
@@ -53,12 +53,12 @@ export default class PackageGenerator extends BaseProcessor {
folder: DESTRUCTIVE_CHANGES_FILE_NAME,
manifest: new Map(),
},
- ].map(async op =>
- outputFile(
+ ].map(op => {
+ return outputFile(
join(this.config.output, op.folder, op.filename),
pc.buildPackage(op.manifest) as string
)
- )
+ })
)
}
}
diff --git a/src/service/botHandler.ts b/src/service/botHandler.ts
index 8f662c69..48141ef0 100644
--- a/src/service/botHandler.ts
+++ b/src/service/botHandler.ts
@@ -1,8 +1,8 @@
'use strict'
import ShareFolderHandler from './sharedFolderHandler'
import { fillPackageWithParameter } from '../utils/packageHelper'
-import { parse, sep } from 'path'
-import { DOT } from '../utils/fsHelper'
+import { parse } from 'path'
+import { DOT, PATH_SEP } from '../constant/fsConstants'
const BOT_TYPE = 'Bot'
const BOT_EXTENSION = 'bot'
@@ -11,7 +11,7 @@ export default class BotHandler extends ShareFolderHandler {
protected override _getElementName() {
const parsedPath = this._getParsedPath()
const elementName = new Set([
- parsedPath.dir.split(sep).pop(),
+ parsedPath.dir.split(PATH_SEP).pop(),
parsedPath.name,
])
return [...elementName].join(DOT)
@@ -22,7 +22,7 @@ export default class BotHandler extends ShareFolderHandler {
}
protected async _addParentBot() {
- const botName = this.parentFolder.split(sep).pop() as string
+ const botName = this.parentFolder.split(PATH_SEP).pop() as string
fillPackageWithParameter({
store: this.diffs.package,
type: BOT_TYPE,
@@ -31,7 +31,9 @@ export default class BotHandler extends ShareFolderHandler {
if (!this.config.generateDelta) return
- const botPath = `${parse(this.line).dir}${sep}${botName}.${BOT_EXTENSION}`
+ const botPath = `${
+ parse(this.line).dir
+ }${PATH_SEP}${botName}.${BOT_EXTENSION}`
await this._copyWithMetaFile(botPath)
}
diff --git a/src/service/customObjectHandler.ts b/src/service/customObjectHandler.ts
index 6b511dc9..8217a0bf 100644
--- a/src/service/customObjectHandler.ts
+++ b/src/service/customObjectHandler.ts
@@ -27,9 +27,9 @@ export default class CustomObjectHandler extends StandardHandler {
const fields = await readDir(fieldsFolder, this.config)
const masterDetailsFields = await asyncFilter(
fields,
- async (fieldPath: string) => {
+ async (path: string) => {
const content = await readPathFromGit(
- join(fieldsFolder, fieldPath),
+ { path, oid: this.config.to },
this.config
)
return content.includes(MASTER_DETAIL_TAG)
@@ -37,9 +37,7 @@ export default class CustomObjectHandler extends StandardHandler {
)
await Promise.all(
- masterDetailsFields.map((field: string) =>
- this._copyWithMetaFile(join(fieldsFolder, field))
- )
+ masterDetailsFields.map((field: string) => this._copyWithMetaFile(field))
)
}
}
diff --git a/src/service/diffLineInterpreter.ts b/src/service/diffLineInterpreter.ts
index c1a47657..7dcd14e3 100644
--- a/src/service/diffLineInterpreter.ts
+++ b/src/service/diffLineInterpreter.ts
@@ -2,6 +2,11 @@
import { MetadataRepository } from '../metadata/MetadataRepository'
import { Work } from '../types/work'
import TypeHandlerFactory from './typeHandlerFactory'
+import { availableParallelism } from 'os'
+import { queue } from 'async'
+import StandardHandler from './standardHandler'
+
+const MAX_PARALLELISM = Math.min(availableParallelism(), 6)
export default class DiffLineInterpreter {
constructor(
@@ -13,8 +18,18 @@ export default class DiffLineInterpreter {
public async process(lines: string[]) {
const typeHandlerFactory = new TypeHandlerFactory(this.work, this.metadata)
+ const processor = queue(
+ async (handler: StandardHandler) => await handler.handle(),
+ MAX_PARALLELISM
+ )
+
for (const line of lines) {
- await typeHandlerFactory.getTypeHandler(line).handle()
+ const handler: StandardHandler = typeHandlerFactory.getTypeHandler(line)
+ processor.push(handler)
+ }
+
+ if (processor.length() > 0) {
+ await processor.drain()
}
}
}
diff --git a/src/service/inBundleHandler.ts b/src/service/inBundleHandler.ts
index 2c2ae738..6fbe87bb 100644
--- a/src/service/inBundleHandler.ts
+++ b/src/service/inBundleHandler.ts
@@ -1,8 +1,7 @@
'use strict'
import InResourceHandler from './inResourceHandler'
-import { sep } from 'path'
import { META_REGEX } from '../constant/metadataConstants'
-import { cleanUpPackageMember } from '../utils/packageHelper'
+import { PATH_SEP } from '../constant/fsConstants'
export default class BundleHandler extends InResourceHandler {
protected override _getElementName() {
@@ -10,11 +9,9 @@ export default class BundleHandler extends InResourceHandler {
.slice(this.splittedLine.indexOf(this.type) + 1)
.slice(0, 2)
- const packageMember: string = bundlePath
- .join(sep)
+ return bundlePath
+ .join(PATH_SEP)
.replace(META_REGEX, '')
.replace(this.suffixRegex, '')
-
- return cleanUpPackageMember(packageMember)
}
}
diff --git a/src/service/inFileHandler.ts b/src/service/inFileHandler.ts
index 38fc5bdb..fc104418 100644
--- a/src/service/inFileHandler.ts
+++ b/src/service/inFileHandler.ts
@@ -2,13 +2,11 @@
import { LABEL_EXTENSION, LABEL_XML_NAME } from '../constant/metadataConstants'
import StandardHandler from './standardHandler'
import { basename } from 'path'
-import { writeFile, DOT } from '../utils/fsHelper'
+import { writeFile } from '../utils/fsHelper'
+import { DOT } from '../constant/fsConstants'
import { getInFileAttributes, isPackable } from '../metadata/metadataManager'
import MetadataDiff from '../utils/metadataDiff'
-import {
- cleanUpPackageMember,
- fillPackageWithParameter,
-} from '../utils/packageHelper'
+import { fillPackageWithParameter } from '../utils/packageHelper'
import { Manifest, Work } from '../types/work'
import { MetadataRepository } from '../metadata/MetadataRepository'
@@ -77,9 +75,10 @@ export default class InFileHandler extends StandardHandler {
member: string
) {
if (isPackable(subType)) {
- const cleanedMember = cleanUpPackageMember(
- `${getNamePrefix({ subType, line: this.line })}${member}`
- )
+ const cleanedMember = `${getNamePrefix({
+ subType,
+ line: this.line,
+ })}${member}`
fillPackageWithParameter({
store,
diff --git a/src/service/inFolderHandler.ts b/src/service/inFolderHandler.ts
index 6b283652..d3ff33d0 100644
--- a/src/service/inFolderHandler.ts
+++ b/src/service/inFolderHandler.ts
@@ -5,9 +5,9 @@ import {
META_REGEX,
METAFILE_SUFFIX,
} from '../constant/metadataConstants'
-import { cleanUpPackageMember } from '../utils/packageHelper'
-import { join, parse, sep } from 'path'
+import { join, parse } from 'path'
import { readDir } from '../utils/fsHelper'
+import { PATH_SEP } from '../constant/fsConstants'
const INFOLDER_SUFFIX_REGEX = new RegExp(`${INFOLDER_SUFFIX}$`)
const EXTENSION_SUFFIX_REGEX = new RegExp(/\.[^/.]+$/)
@@ -22,9 +22,11 @@ export default class InFolderHandler extends StandardHandler {
protected async _copyFolderMetaFile() {
const [, folderPath, folderName] = this._parseLine()!
- const folderFileName = `${folderName}.${
- this.metadataDef.suffix!.toLowerCase() + METAFILE_SUFFIX
- }`
+ const suffix = folderName.endsWith(INFOLDER_SUFFIX)
+ ? ''
+ : `.${this.metadataDef.suffix!.toLowerCase()}`
+
+ const folderFileName = `${folderName}${suffix}${METAFILE_SUFFIX}`
await this._copyWithMetaFile(join(folderPath, folderFileName))
}
@@ -35,20 +37,18 @@ export default class InFolderHandler extends StandardHandler {
await Promise.all(
dirContent
- .filter(file => file.includes(parsedLine.name))
- .map(file => this._copyWithMetaFile(join(parsedLine.dir, file)))
+ .filter((file: string) => file.includes(parsedLine.name))
+ .map((file: string) => this._copyWithMetaFile(file))
)
}
protected override _getElementName() {
- const packageMember = this.splittedLine
+ return this.splittedLine
.slice(this.splittedLine.indexOf(this.type) + 1)
- .join(sep)
+ .join(PATH_SEP)
.replace(META_REGEX, '')
.replace(INFOLDER_SUFFIX_REGEX, '')
.replace(EXTENSION_SUFFIX_REGEX, '')
-
- return cleanUpPackageMember(packageMember)
}
protected override _isProcessable() {
diff --git a/src/service/inResourceHandler.ts b/src/service/inResourceHandler.ts
index 63fc8ca9..a0f90ec1 100644
--- a/src/service/inResourceHandler.ts
+++ b/src/service/inResourceHandler.ts
@@ -1,9 +1,9 @@
'use strict'
import StandardHandler from './standardHandler'
-import { join, parse, sep } from 'path'
-import { pathExists, DOT } from '../utils/fsHelper'
+import { join, parse } from 'path'
+import { pathExists } from '../utils/fsHelper'
import { META_REGEX, METAFILE_SUFFIX } from '../constant/metadataConstants'
-import { cleanUpPackageMember } from '../utils/packageHelper'
+import { DOT, PATH_SEP } from '../constant/fsConstants'
import { Work } from '../types/work'
import { MetadataRepository } from '../metadata/MetadataRepository'
@@ -41,7 +41,7 @@ export default class ResourceHandler extends StandardHandler {
protected override _getElementName() {
const parsedPath = this._getParsedPath()
- return cleanUpPackageMember(parsedPath.name)
+ return parsedPath.name
}
protected override _getParsedPath() {
@@ -64,13 +64,15 @@ export default class ResourceHandler extends StandardHandler {
}
resourcePath.push(pathElement)
}
- const lastPathElement = resourcePath[resourcePath.length - 1].split(DOT)
+ const lastPathElement = resourcePath[resourcePath.length - 1]
+ .replace(METAFILE_SUFFIX, '')
+ .split(DOT)
if (lastPathElement.length > 1) {
lastPathElement.pop()
}
resourcePath[resourcePath.length - 1] = lastPathElement.join(DOT)
- return `${resourcePath.join(sep)}`
+ return `${resourcePath.join(PATH_SEP)}`
}
protected override _getMetaTypeFilePath() {
diff --git a/src/service/lwcHandler.ts b/src/service/lwcHandler.ts
index fc28b64d..9d89073f 100644
--- a/src/service/lwcHandler.ts
+++ b/src/service/lwcHandler.ts
@@ -1,10 +1,11 @@
'use strict'
+import { PATH_SEP } from '../constant/fsConstants'
import InResourceHandler from './inResourceHandler'
-import { parse, sep } from 'path'
+import { parse } from 'path'
export default class LwcHandler extends InResourceHandler {
protected override _isProcessable() {
- const parentFolder = parse(this.line).dir.split(sep).pop()
+ const parentFolder = parse(this.line).dir.split(PATH_SEP).pop()
return parentFolder !== this.type
}
diff --git a/src/service/objectTranslationHandler.ts b/src/service/objectTranslationHandler.ts
index 97a6aadb..bbe575a3 100644
--- a/src/service/objectTranslationHandler.ts
+++ b/src/service/objectTranslationHandler.ts
@@ -4,8 +4,9 @@ import StandardHandler from './standardHandler'
import { getInFileAttributes } from '../metadata/metadataManager'
import { OBJECT_TRANSLATION_META_XML_SUFFIX } from '../constant/metadataConstants'
import { writeFile } from '../utils/fsHelper'
-import { parse, sep } from 'path'
+import { parse } from 'path'
import MetadataDiff from '../utils/metadataDiff'
+import { PATH_SEP } from '../constant/fsConstants'
export default class ObjectTranslationHandler extends ResourceHandler {
public override async handleAddition() {
@@ -31,7 +32,7 @@ export default class ObjectTranslationHandler extends ResourceHandler {
protected _getObjectTranslationPath() {
// Return Object Translation Path for both objectTranslation and fieldTranslation
// QUESTION: Why fieldTranslation element are not deployable when objectTranslation element is not in the deployed sources (even if objectTranslation file is empty) ?
- return `${parse(this.line).dir}${sep}${
+ return `${parse(this.line).dir}${PATH_SEP}${
this.splittedLine[this.splittedLine.length - 2]
}.${OBJECT_TRANSLATION_META_XML_SUFFIX}`
}
diff --git a/src/service/standardHandler.ts b/src/service/standardHandler.ts
index 3824f8c8..01f19598 100644
--- a/src/service/standardHandler.ts
+++ b/src/service/standardHandler.ts
@@ -1,5 +1,5 @@
'use strict'
-import { join, parse, sep, ParsedPath } from 'path'
+import { join, parse, ParsedPath } from 'path'
import {
ADDITION,
DELETION,
@@ -7,11 +7,9 @@ import {
GIT_DIFF_TYPE_REGEX,
} from '../constant/gitConstants'
import { META_REGEX, METAFILE_SUFFIX } from '../constant/metadataConstants'
-import {
- cleanUpPackageMember,
- fillPackageWithParameter,
-} from '../utils/packageHelper'
-import { copyFiles, DOT } from '../utils/fsHelper'
+import { DOT, PATH_SEP } from '../constant/fsConstants'
+import { fillPackageWithParameter } from '../utils/packageHelper'
+import { copyFiles } from '../utils/fsHelper'
import { Manifest, Manifests, Work } from '../types/work'
import { Metadata } from '../types/metadata'
import { Config } from '../types/config'
@@ -41,11 +39,10 @@ export default class StandardHandler {
) {
this.changeType = line.charAt(0) as string
this.line = line.replace(GIT_DIFF_TYPE_REGEX, '')
- // internal getters
this.diffs = work.diffs
this.config = work.config
this.warnings = work.warnings
- this.splittedLine = this.line.split(sep)
+ this.splittedLine = this.line.split(PATH_SEP)
if (this.metadata.get(this.type)?.metaFile === true) {
this.line = this.line.replace(METAFILE_SUFFIX, '')
@@ -59,7 +56,7 @@ export default class StandardHandler {
.split(DOT)
.pop() as string
- this.parentFolder = this.parsedLine.dir.split(sep).slice(-1)[0]
+ this.parentFolder = this.parsedLine.dir.split(PATH_SEP).slice(-1)[0]
this.metadataDef = this.metadata.get(this.type) as Metadata
}
@@ -107,7 +104,7 @@ export default class StandardHandler {
.slice(
this.splittedLine.findIndex(x => x.includes(METAFILE_SUFFIX)) - 1
)
- .join(sep)
+ .join(PATH_SEP)
.replace(META_REGEX, '')
.replace(this.suffixRegex, '')
@@ -116,7 +113,7 @@ export default class StandardHandler {
protected _getElementName() {
const parsedPath = this._getParsedPath()
- return cleanUpPackageMember(parsedPath.base)
+ return parsedPath.base
}
protected _fillPackage(store: Manifest) {
diff --git a/src/service/subCustomObjectHandler.ts b/src/service/subCustomObjectHandler.ts
index a73b1e05..751e9165 100644
--- a/src/service/subCustomObjectHandler.ts
+++ b/src/service/subCustomObjectHandler.ts
@@ -5,7 +5,8 @@ import {
OBJECT_META_XML_SUFFIX,
} from '../constant/metadataConstants'
import { readPathFromGit } from '../utils/fsHelper'
-import { join, sep } from 'path'
+import { join } from 'path'
+import { PATH_SEP } from '../constant/fsConstants'
export default class SubCustomObjectHandler extends StandardHandler {
public override async handleAddition() {
@@ -13,12 +14,15 @@ export default class SubCustomObjectHandler extends StandardHandler {
if (!this.config.generateDelta) return
// QUESTION: Why we need to add parent object for Master Detail field ? https://help.salesforce.com/s/articleView?id=000386883&type=1
- const data = await readPathFromGit(this.line, this.config)
+ const data = await readPathFromGit(
+ { path: this.line, oid: this.config.to },
+ this.config
+ )
if (!data.includes(MASTER_DETAIL_TAG)) return
const customObjectDirPath = this.splittedLine
.slice(0, this.splittedLine.indexOf(this.type))
- .join(sep)
+ .join(PATH_SEP)
const customObjectName =
this.splittedLine[this.splittedLine.indexOf(this.type) - 1]
diff --git a/src/types/git.ts b/src/types/git.ts
new file mode 100644
index 00000000..1e1f31e9
--- /dev/null
+++ b/src/types/git.ts
@@ -0,0 +1,4 @@
+export type FileGitRef = {
+ path: string
+ oid: string
+}
diff --git a/src/utils/childProcessUtils.ts b/src/utils/childProcessUtils.ts
deleted file mode 100644
index ad5ffb41..00000000
--- a/src/utils/childProcessUtils.ts
+++ /dev/null
@@ -1,105 +0,0 @@
-'use strict'
-import {
- ChildProcessWithoutNullStreams,
- SpawnOptionsWithoutStdio,
- spawn,
-} from 'child_process'
-import { normalize, sep } from 'path'
-
-export const EOLRegex: RegExp = /\r?\n/g
-
-export const treatPathSep = (data: string) => data?.replace(/[/\\]+/g, sep)
-
-export const sanitizePath = (data: string) => {
- if (data) {
- return normalize(treatPathSep(data))
- }
- return data
-}
-
-export const getSpawnContentByLine = async (
- command: string,
- args: string[],
- options: SpawnOptionsWithoutStdio = {}
-): Promise => {
- const stream = spawn(command, [...args], options)
- const handler = new LineStreamHandler()
- return handleStream(stream, handler) as Promise
-}
-
-export const getSpawnContent = async (
- command: string,
- args: string[],
- options: SpawnOptionsWithoutStdio = {}
-): Promise => {
- const stream = spawn(command, [...args], options)
- const handler = new BufferStreamHandler()
- return handleStream(stream, handler) as Promise
-}
-
-const handleStream = (
- stream: ChildProcessWithoutNullStreams,
- handler: StreamHandler
-): Promise => {
- return new Promise((resolve, reject) => {
- stream.stdout.on('data', (data: Buffer) => handler.onData(data))
-
- stream.stderr.setEncoding('utf8')
- stream.stderr.on('data', (data: string) => handler.onError(data))
-
- stream.on('close', (code: number) => {
- if (code !== 0) {
- reject(new Error(handler.getError()))
- return
- }
-
- const result = handler.getContent()
- resolve(result)
- })
- })
-}
-
-abstract class StreamHandler {
- protected readonly error: string[] = []
- // eslint-disable-next-line no-unused-vars
- public abstract onData(data: Buffer): void
- public onError(data: string) {
- this.error.push(data.toString())
- }
- public getError() {
- return this.error.join('')
- }
- public abstract getContent(): Buffer | string[]
-}
-
-class BufferStreamHandler extends StreamHandler {
- protected readonly content: Buffer[] = []
- public override onData(data: Buffer): void {
- this.content.push(data)
- }
- public override getContent(): Buffer {
- return Buffer.concat(this.content)
- }
-}
-
-class LineStreamHandler extends StreamHandler {
- protected readonly content: string[] = []
- protected chunk: string = ''
- public override onData(data: Buffer): void {
- this.chunk += data
- let eolIndex = this.chunk.search(EOLRegex)
- while (eolIndex >= 0) {
- this.content.push(this.chunk.slice(0, eolIndex))
- this.chunk = this.chunk.slice(eolIndex + 1)
- eolIndex = this.chunk.search(EOLRegex)
- }
- }
-
- public override getContent(): string[] {
- if (this.chunk.length > 0) {
- this.content.push(this.chunk)
- }
-
- return this.content
- }
-}
diff --git a/src/utils/cliConstants.ts b/src/utils/cliConstants.ts
new file mode 100644
index 00000000..6a0dfbe2
--- /dev/null
+++ b/src/utils/cliConstants.ts
@@ -0,0 +1,4 @@
+export const TO_DEFAULT_VALUE = 'HEAD'
+export const OUTPUT_DEFAULT_VALUE = './output'
+export const SOURCE_DEFAULT_VALUE = './'
+export const REPO_DEFAULT_VALUE = './'
diff --git a/src/utils/cliHelper.ts b/src/utils/cliHelper.ts
index 83da12a3..c069a686 100644
--- a/src/utils/cliHelper.ts
+++ b/src/utils/cliHelper.ts
@@ -1,16 +1,13 @@
'use strict'
import asyncFilter from './asyncFilter'
import messages from '../locales/en'
-import RepoSetup from './repoSetup'
-import { sanitizePath } from './childProcessUtils'
-import { POINTER_REF_TYPES } from '../constant/gitConstants'
+import GitAdapter from '../adapter/GitAdapter'
import {
getLatestSupportedVersion,
isVersionSupported,
} from '../metadata/metadataManager'
import { format } from 'util'
-import { isGit } from './fsHelper'
-import { readFile, dirExists, fileExists } from './fsUtils'
+import { readFile, dirExists, fileExists, sanitizePath } from './fsUtils'
import { join } from 'path'
import { Work } from '../types/work'
import { Config } from '../types/config'
@@ -23,11 +20,11 @@ const SFDX_PROJECT_FILE_NAME = 'sfdx-project.json'
export default class CLIHelper {
protected readonly config: Config
- protected readonly repoSetup: RepoSetup
+ protected readonly gitAdapter: GitAdapter
constructor(protected readonly work: Work) {
this.config = work.config
- this.repoSetup = new RepoSetup(work.config)
+ this.gitAdapter = GitAdapter.getInstance(work.config)
}
protected async _validateGitSha() {
@@ -44,8 +41,10 @@ export default class CLIHelper {
return true
}).map(async (shaParameter: keyof Config) => {
const shaValue: string = this.config[shaParameter] as string
- const refType = await this.repoSetup.getCommitRefType(shaValue)
- if (!POINTER_REF_TYPES.includes(refType?.replace(/\s/g, ''))) {
+ try {
+ const ref: string = await this.gitAdapter.parseRev(shaValue)
+ ;(this.config[shaParameter] as string) = ref
+ } catch (error) {
errors.push(
format(messages.errorParameterIsNotGitSHA, shaParameter, shaValue)
)
@@ -61,7 +60,6 @@ export default class CLIHelper {
await this._handleDefault()
const errors: string[] = []
- const isGitPromise = isGit(this.config.repo)
const directoriesPromise = this._filterDirectories()
const filesPromise = this._filterFiles()
@@ -75,8 +73,9 @@ export default class CLIHelper {
errors.push(format(messages.errorPathIsNotFile, file))
)
- const isGitRepo = await isGitPromise
- if (!isGitRepo) {
+ try {
+ await this.gitAdapter.setGitDir()
+ } catch {
errors.push(format(messages.errorPathIsNotGit, this.config.repo))
}
@@ -87,7 +86,7 @@ export default class CLIHelper {
throw new Error(errors.join(', '))
}
- await this.repoSetup.repoConfiguration()
+ await this.gitAdapter.configureRepository()
}
protected _filterDirectories() {
@@ -174,8 +173,3 @@ export default class CLIHelper {
)
}
}
-
-export const TO_DEFAULT_VALUE = 'HEAD'
-export const OUTPUT_DEFAULT_VALUE = './output'
-export const SOURCE_DEFAULT_VALUE = './'
-export const REPO_DEFAULT_VALUE = './'
diff --git a/src/utils/fsHelper.ts b/src/utils/fsHelper.ts
index 598c6fd6..a87e0406 100644
--- a/src/utils/fsHelper.ts
+++ b/src/utils/fsHelper.ts
@@ -1,19 +1,12 @@
'use strict'
-import { join } from 'path'
-import { readFile as fsReadFile, outputFile } from 'fs-extra'
-import { GIT_COMMAND, GIT_FOLDER, GIT_PATH_SEP } from '../constant/gitConstants'
-import { UTF8_ENCODING } from '../constant/fsConstants'
-import { EOLRegex, getSpawnContent, treatPathSep } from './childProcessUtils'
-import { isLFS, getLFSObjectContentPath } from './gitLfsHelper'
+import { outputFile } from 'fs-extra'
import { buildIgnoreHelper } from './ignoreHelper'
-import { dirExists, fileExists } from './fsUtils'
+import { join } from 'path'
import { Config } from '../types/config'
+import GitAdapter from '../adapter/GitAdapter'
+import { FileGitRef } from '../types/git'
+import { treatPathSep } from './fsUtils'
-const FOLDER = 'tree'
-
-const showCmd = ['--no-pager', 'show']
-export const gitPathSeparatorNormalizer = (path: string) =>
- path.replace(/\\+/g, GIT_PATH_SEP)
const copiedFiles = new Set()
const writtenFiles = new Set()
@@ -28,88 +21,46 @@ export const copyFiles = async (config: Config, src: string) => {
return
}
try {
- const bufferData: Buffer = await readPathFromGitAsBuffer(src, config)
- const utf8Data = bufferData?.toString(UTF8_ENCODING) ?? ''
-
- if (utf8Data.startsWith(FOLDER)) {
- const [header, , ...files] = utf8Data.split(EOLRegex)
- const folder = header.split(':')[1]
- for (const file of files) {
- const fileSrc = join(folder, file)
-
- await copyFiles(config, fileSrc)
- }
- } else {
- const dst = join(config.output, treatPathSep(src))
+ const gitAdapter = GitAdapter.getInstance(config)
+ const files = await gitAdapter.getFilesFrom(treatPathSep(src))
+ for (const file of files) {
// Use Buffer to output the file content
// Let fs implementation detect the encoding ("utf8" or "binary")
- await outputFile(dst, bufferData)
+ const dst = join(config.output, file.path)
+ await outputFile(treatPathSep(dst), file.content)
+ copiedFiles.add(dst)
}
} catch {
/* empty */
}
}
-const readPathFromGitAsBuffer = async (
- path: string,
- { repo, to }: { repo: string; to: string }
-) => {
- const normalizedPath = gitPathSeparatorNormalizer(path)
- let bufferData: Buffer = await getSpawnContent(
- GIT_COMMAND,
- [...showCmd, `${to}:${normalizedPath}`],
- {
- cwd: repo,
- }
- )
- if (isLFS(bufferData)) {
- const lsfPath = getLFSObjectContentPath(bufferData)
- bufferData = await fsReadFile(join(repo, lsfPath))
- }
-
- return bufferData
-}
-
-export const readPathFromGit = async (path: string, config: Config) => {
+export const readPathFromGit = async (forRef: FileGitRef, config: Config) => {
let utf8Data = ''
try {
- const bufferData = await readPathFromGitAsBuffer(path, config)
- utf8Data = bufferData.toString(UTF8_ENCODING)
- } catch {
+ const gitAdapter = GitAdapter.getInstance(config)
+ utf8Data = await gitAdapter.getStringContent(forRef)
+ } catch (error) {
/* empty */
}
return utf8Data
}
export const pathExists = async (path: string, config: Config) => {
- const data = await readPathFromGit(path, config)
- return !!data
-}
-
-export const readDir = async (dir: string, config: Config) => {
- const data = await readPathFromGit(dir, config)
- const dirContent: string[] = []
- if (data.startsWith(FOLDER)) {
- const [, , ...files] = data.split(EOLRegex)
- dirContent.push(...files)
+ const gitAdapter = GitAdapter.getInstance(config)
+ try {
+ return await gitAdapter.pathExists(path)
+ } catch {
+ return false
}
- return dirContent
}
-export async function* scan(
- dir: string,
+export const readDir = async (
+ path: string,
config: Config
-): AsyncGenerator {
- const entries = await readDir(dir, config)
- for (const file of entries) {
- const filePath = join(dir, file)
- if (file.endsWith(GIT_PATH_SEP)) {
- yield* scan(filePath, config)
- } else {
- yield filePath
- //yield new Promise(resolve => resolve(filePath))
- }
- }
+): Promise => {
+ const gitAdapter = GitAdapter.getInstance(config)
+ return await gitAdapter.getFilesPath(path)
}
export const writeFile = async (
@@ -128,26 +79,3 @@ export const writeFile = async (
}
await outputFile(join(config.output, treatPathSep(path)), content)
}
-
-export const scanExtension = async (
- dir: string,
- ext: string,
- config: Config
-): Promise => {
- const result = []
- for await (const file of scan(dir, config)) {
- if (file.endsWith(ext)) {
- result.push(file)
- }
- }
- return result
-}
-
-export const isGit = async (dir: string) => {
- const isGitDir = await dirExists(join(dir, GIT_FOLDER))
- const isGitFile = await fileExists(join(dir, GIT_FOLDER))
-
- return isGitDir || isGitFile
-}
-
-export const DOT = '.'
diff --git a/src/utils/fsUtils.ts b/src/utils/fsUtils.ts
index d6b1aae7..534911a8 100644
--- a/src/utils/fsUtils.ts
+++ b/src/utils/fsUtils.ts
@@ -1,7 +1,16 @@
'use strict'
import { stat, readFile as fsReadFile } from 'fs-extra'
-import { isAbsolute, relative } from 'path'
-import { UTF8_ENCODING } from '../constant/fsConstants'
+import { isAbsolute, normalize, relative } from 'path'
+import {
+ PATH_SEPARATOR_REGEX,
+ UTF8_ENCODING,
+ PATH_SEP,
+} from '../constant/fsConstants'
+
+export const treatPathSep = (data: string) =>
+ data.split(PATH_SEPARATOR_REGEX).filter(Boolean).join(PATH_SEP)
+export const sanitizePath = (data: string) =>
+ data ? normalize(treatPathSep(data)) : data
export const isSubDir = (parent: string, dir: string) => {
const rel = relative(parent, dir)
diff --git a/src/utils/fxpHelper.ts b/src/utils/fxpHelper.ts
index 2358fea6..8ae69fdb 100644
--- a/src/utils/fxpHelper.ts
+++ b/src/utils/fxpHelper.ts
@@ -1,9 +1,9 @@
'use strict'
import { XMLBuilder, XMLParser } from 'fast-xml-parser'
-import { readPathFromGit } from './fsHelper'
-import { XML_HEADER_TAG_END } from '../constant/metadataConstants'
import { Config } from '../types/config'
+import { readPathFromGit } from './fsHelper'
+import { FileGitRef } from '../types/git'
const XML_PARSER_OPTION = {
commentPropName: '#comment',
@@ -37,17 +37,18 @@ export const xml2Json = (xmlContent: string) => {
return jsonContent
}
-export const parseXmlFileToJson = async (line: string, config: Config) => {
- const xmlContent = await readPathFromGit(line, config)
+export const parseXmlFileToJson = async (
+ forRef: FileGitRef,
+ config: Config
+) => {
+ const xmlContent = await readPathFromGit(forRef, config)
return xml2Json(xmlContent)
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export const convertJsonToXml = (jsonContent: any) => {
const xmlBuilder = new XMLBuilder(JSON_PARSER_OPTION)
- return xmlBuilder
- .build(jsonContent)
- .replace(XML_HEADER_TAG_END, `${XML_HEADER_TAG_END}`)
+ return xmlBuilder.build(jsonContent)
}
export const ATTRIBUTE_PREFIX = '@_'
diff --git a/src/utils/gitLfsHelper.ts b/src/utils/gitLfsHelper.ts
index 0ea1241d..885eb29d 100644
--- a/src/utils/gitLfsHelper.ts
+++ b/src/utils/gitLfsHelper.ts
@@ -2,7 +2,6 @@
import { sep } from 'path'
import { GIT_FOLDER } from '../constant/gitConstants'
import { UTF8_ENCODING } from '../constant/fsConstants'
-import { EOLRegex } from './childProcessUtils'
const LFS_HEADER = Buffer.from('version https://git-lfs')
@@ -11,7 +10,7 @@ export const isLFS = (content: Buffer): boolean =>
export const getLFSObjectContentPath = (bufferContent: Buffer): string => {
const content = bufferContent.toString(UTF8_ENCODING)
- const oid = content.split(EOLRegex)[1].split(':')[1]
+ const oid = content.split(/\n/)[1].split(':')[1]
return [
GIT_FOLDER,
'lfs',
diff --git a/src/utils/ignoreHelper.ts b/src/utils/ignoreHelper.ts
index 2dc578e0..5670a8cd 100644
--- a/src/utils/ignoreHelper.ts
+++ b/src/utils/ignoreHelper.ts
@@ -7,6 +7,8 @@ import {
GIT_DIFF_TYPE_REGEX,
} from '../constant/gitConstants'
+// QUESTION: Why we should ignore recordTypes for destructive changes manifest ?
+// Because the operation is note enabled on the metadata API https://ideas.salesforce.com/s/idea/a0B8W00000GdeGKUAZ/allow-deletion-of-record-type-using-metadata-api
const BASE_DESTRUCTIVE_IGNORE = ['recordTypes/']
export class IgnoreHelper {
@@ -21,7 +23,7 @@ export class IgnoreHelper {
const changeType = line.charAt(0)
let ignInstance!: Ignore
- if (DELETION == changeType) {
+ if (DELETION === changeType) {
ignInstance = this.destructiveIgnore
} else if ([ADDITION, MODIFICATION].includes(changeType)) {
ignInstance = this.globalIgnore
@@ -43,9 +45,7 @@ export const buildIgnoreHelper = async ({
}) => {
if (!ignoreInstance) {
const globalIgnore = await _buildIgnore(ignore)
- const destructiveIgnore = ignoreDestructive
- ? await _buildIgnore(ignoreDestructive)
- : await _buildIgnore(ignore)
+ const destructiveIgnore = await _buildIgnore(ignoreDestructive || ignore)
destructiveIgnore.add(BASE_DESTRUCTIVE_IGNORE)
diff --git a/src/utils/metadataDiff.ts b/src/utils/metadataDiff.ts
index cef9b46c..b2df4be5 100644
--- a/src/utils/metadataDiff.ts
+++ b/src/utils/metadataDiff.ts
@@ -125,7 +125,7 @@ const getElementProcessor =
return metadataMember
}
-// Partial JSON generation functional are
+// Partial JSON generation functional area
// Side effect on jsonContent
const generatePartialJSON =
(attributes: Map) =>
@@ -146,31 +146,26 @@ const generatePartialJSON =
}
export default class MetadataDiff {
- protected readonly configTo: Config
- protected readonly configFrom: Config
protected toContent: any
protected add!: Manifest
constructor(
+ // eslint-disable-next-line no-unused-vars
protected readonly config: Config,
+ // eslint-disable-next-line no-unused-vars
protected readonly metadata: MetadataRepository,
+ // eslint-disable-next-line no-unused-vars
protected readonly attributes: Map
- ) {
- this.config = config
- this.metadata = metadata
- this.attributes = attributes
- this.configTo = {
- repo: this.config.repo,
- to: this.config.to,
- } as Config
- this.configFrom = {
- repo: this.config.repo,
- to: this.config.from,
- } as Config
- }
+ ) {}
public async compare(path: string) {
- this.toContent = await parseXmlFileToJson(path, this.configTo)
- const fromContent = await parseXmlFileToJson(path, this.configFrom)
+ this.toContent = await parseXmlFileToJson(
+ { path, oid: this.config.to },
+ this.config
+ )
+ const fromContent = await parseXmlFileToJson(
+ { path, oid: this.config.from },
+ this.config
+ )
const diff = compareContent(this.attributes)
diff --git a/src/utils/packageHelper.ts b/src/utils/packageHelper.ts
index 82b0311e..fbc3ec91 100644
--- a/src/utils/packageHelper.ts
+++ b/src/utils/packageHelper.ts
@@ -52,8 +52,3 @@ export const fillPackageWithParameter = ({
}
store.get(type)?.add(member)
}
-
-const PACKAGE_MEMBER_PATH_SEP = '/'
-export const cleanUpPackageMember = (packageMember: string) => {
- return `${packageMember}`.replace(/\\+/g, PACKAGE_MEMBER_PATH_SEP)
-}
diff --git a/src/utils/repoGitDiff.ts b/src/utils/repoGitDiff.ts
index eac64c89..8ee9331b 100644
--- a/src/utils/repoGitDiff.ts
+++ b/src/utils/repoGitDiff.ts
@@ -1,33 +1,12 @@
'use strict'
-import { getSpawnContentByLine, treatPathSep } from './childProcessUtils'
import { buildIgnoreHelper } from './ignoreHelper'
-import {
- ADDITION,
- DELETION,
- IGNORE_WHITESPACE_PARAMS,
- MODIFICATION,
- GIT_COMMAND,
-} from '../constant/gitConstants'
-import { SpawnOptionsWithoutStdio } from 'child_process'
-import { gitPathSeparatorNormalizer } from './fsHelper'
import { Config } from '../types/config'
+import GitAdapter from '../adapter/GitAdapter'
+import { ADDITION, DELETION } from '../constant/gitConstants'
import { MetadataRepository } from '../metadata/MetadataRepository'
-const DIFF_FILTER = '--diff-filter'
-
-const fullDiffParams = ['--no-pager', 'diff', '--numstat', '--no-renames']
-const filterDeleted = [`${DIFF_FILTER}=${DELETION}`]
-const filterAdded = [`${DIFF_FILTER}=${ADDITION}`]
-const filterModification = [`${DIFF_FILTER}=${MODIFICATION}`]
-const TAB = '\t'
-const NUM_STAT_REGEX = /^((-|\d+)\t){2}/
-const lcSensitivity: Intl.CollatorOptions = {
- sensitivity: 'accent',
-}
-
export default class RepoGitDiff {
- protected readonly spawnConfig: SpawnOptionsWithoutStdio
- protected readonly ignoreWhitespaceParams: string[]
+ protected readonly gitAdapter: GitAdapter
constructor(
// eslint-disable-next-line no-unused-vars
@@ -35,49 +14,13 @@ export default class RepoGitDiff {
// eslint-disable-next-line no-unused-vars
protected readonly metadata: MetadataRepository
) {
- this.spawnConfig = {
- cwd: this.config.repo,
- }
- this.ignoreWhitespaceParams = this.config.ignoreWhitespace
- ? IGNORE_WHITESPACE_PARAMS
- : []
+ this.gitAdapter = GitAdapter.getInstance(this.config)
}
public async getLines() {
- const lines = await this._getFilteredDiff()
- return Array.from(new Set([...lines.flat().filter(Boolean)]))
- }
-
- protected async _getFilteredDiff() {
- const lines = await Promise.all([
- this._spawnGitDiff(filterAdded, ADDITION),
- this._spawnGitDiff(filterDeleted, DELETION),
- this._spawnGitDiff(filterModification, MODIFICATION),
- ])
- const treatedLines = await this._treatResult(lines.flat())
- return treatedLines
- }
-
- protected async _spawnGitDiff(
- filter: string[],
- changeType: string
- ): Promise {
- const diffContent = await getSpawnContentByLine(
- GIT_COMMAND,
- [
- ...fullDiffParams,
- ...filter,
- ...this.ignoreWhitespaceParams,
- this.config.from,
- this.config.to,
- gitPathSeparatorNormalizer(this.config.source),
- ],
- this.spawnConfig
- )
-
- return diffContent.map(line =>
- treatPathSep(line).replace(NUM_STAT_REGEX, `${changeType}${TAB}`)
- )
+ const lines = await this.gitAdapter.getDiffLines()
+ const treatedLines = await this._treatResult(lines)
+ return Array.from(new Set([...treatedLines]))
}
protected async _treatResult(lines: string[]): Promise {
@@ -94,17 +37,17 @@ export default class RepoGitDiff {
protected _getRenamedElements(lines: string[]) {
const linesPerDiffType: Map =
this._spreadLinePerDiffType(lines)
- const AfileNames: string[] =
+ const AfileNames: Set = new Set(
linesPerDiffType
.get(ADDITION)
?.map(line => this._extractComparisonName(line)) ?? []
- const deletedRenamed: string[] =
- linesPerDiffType.get(DELETION)?.filter((line: string) => {
- const dEl = this._extractComparisonName(line)
- return AfileNames.some(
- aEl => !aEl.localeCompare(dEl, undefined, lcSensitivity)
- )
- }) ?? []
+ )
+ const deletedRenamed: string[] = [
+ ...(linesPerDiffType.get(DELETION) ?? []),
+ ].filter((line: string) => {
+ const dEl = this._extractComparisonName(line)
+ return AfileNames.has(dEl)
+ })
return deletedRenamed
}
@@ -124,6 +67,6 @@ export default class RepoGitDiff {
}
protected _extractComparisonName(line: string) {
- return this.metadata.getFullyQualifiedName(line)
+ return this.metadata.getFullyQualifiedName(line).toLocaleLowerCase()
}
}
diff --git a/src/utils/repoSetup.ts b/src/utils/repoSetup.ts
deleted file mode 100644
index 35adcf50..00000000
--- a/src/utils/repoSetup.ts
+++ /dev/null
@@ -1,54 +0,0 @@
-'use strict'
-import { Config } from '../types/config'
-import { getSpawnContent, getSpawnContentByLine } from './childProcessUtils'
-import { GIT_COMMAND } from '../constant/gitConstants'
-import { UTF8_ENCODING } from '../constant/fsConstants'
-import { SpawnOptionsWithoutStdio } from 'child_process'
-const commitCheckParams = ['cat-file', '-t']
-const firstCommitParams = ['rev-list', '--max-parents=0', 'HEAD']
-const allFilesParams = ['ls-tree', '--name-only', '-r']
-const gitConfig = ['config', 'core.quotepath', 'off']
-
-export default class RepoSetup {
- protected readonly spawnConfig: SpawnOptionsWithoutStdio
-
- constructor(protected readonly config: Config) {
- this.spawnConfig = {
- cwd: config.repo,
- }
- }
-
- public async repoConfiguration() {
- await getSpawnContent(GIT_COMMAND, gitConfig, this.spawnConfig)
- }
-
- public async getCommitRefType(commitRef: string) {
- const data: Buffer = await getSpawnContent(
- GIT_COMMAND,
- [...commitCheckParams, commitRef],
- {
- cwd: this.config.repo,
- }
- )
-
- return data?.toString(UTF8_ENCODING)
- }
-
- public async getFirstCommitRef() {
- const data: Buffer = await getSpawnContent(GIT_COMMAND, firstCommitParams, {
- cwd: this.config.repo,
- })
-
- return data?.toString(UTF8_ENCODING)
- }
-
- public async getAllFilesAsLineStream() {
- const result = await getSpawnContentByLine(
- GIT_COMMAND,
- [...allFilesParams, this.config.to],
- this.spawnConfig
- )
-
- return result
- }
-}
diff --git a/yarn.lock b/yarn.lock
index 256a1196..64653c09 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -252,13 +252,13 @@ __metadata:
linkType: hard
"@babel/helpers@npm:^7.23.7":
- version: 7.23.7
- resolution: "@babel/helpers@npm:7.23.7"
+ version: 7.23.8
+ resolution: "@babel/helpers@npm:7.23.8"
dependencies:
"@babel/template": ^7.22.15
"@babel/traverse": ^7.23.7
"@babel/types": ^7.23.6
- checksum: 4f3bdf35fb54ff79107c6020ba1e36a38213a15b05ca0fa06c553b65f566e185fba6339fb3344be04593ebc244ed0bbb0c6087e73effe0d053a30bcd2db3a013
+ checksum: 8b522d527921f8df45a983dc7b8e790c021250addf81ba7900ba016e165442a527348f6f877aa55e1debb3eef9e860a334b4e8d834e6c9b438ed61a63d9a7ad4
languageName: node
linkType: hard
@@ -503,21 +503,21 @@ __metadata:
linkType: hard
"@babel/runtime-corejs3@npm:^7.12.5":
- version: 7.23.7
- resolution: "@babel/runtime-corejs3@npm:7.23.7"
+ version: 7.23.8
+ resolution: "@babel/runtime-corejs3@npm:7.23.8"
dependencies:
core-js-pure: ^3.30.2
regenerator-runtime: ^0.14.0
- checksum: 98792dc4558aba3f57f0a102e1be0a13b5dfa78ecbc1a57a9d7aeb898b2b182961a9e7c83984028e5eb4dbf0e42cdeb92cd33c36d346c3a701d2d9dc0c1fce7a
+ checksum: e786b79bcb3031bd7433fb4523e43f0acbd386cd7bb5b0a6df6e627c7965706b3d5612211ea3d729ce4459ba1d1b654ccdd8aefe791c6413f70882ee1be903b9
languageName: node
linkType: hard
"@babel/runtime@npm:^7.12.5":
- version: 7.23.7
- resolution: "@babel/runtime@npm:7.23.7"
+ version: 7.23.8
+ resolution: "@babel/runtime@npm:7.23.8"
dependencies:
regenerator-runtime: ^0.14.0
- checksum: eba85bd24d250abb5ae19b16cffc15a54d3894d8228ace40fa4c0e2f1938f28b38ad3e3430ebff9a1ef511eeb8c527e36044ac19076d6deafa52cef35d8624b9
+ checksum: 0bd5543c26811153822a9f382fd39886f66825ff2a397a19008011376533747cd05c33a91f6248c0b8b0edf0448d7c167ebfba34786088f1b7eb11c65be7dfc3
languageName: node
linkType: hard
@@ -813,13 +813,13 @@ __metadata:
linkType: hard
"@humanwhocodes/config-array@npm:^0.11.13":
- version: 0.11.13
- resolution: "@humanwhocodes/config-array@npm:0.11.13"
+ version: 0.11.14
+ resolution: "@humanwhocodes/config-array@npm:0.11.14"
dependencies:
- "@humanwhocodes/object-schema": ^2.0.1
- debug: ^4.1.1
+ "@humanwhocodes/object-schema": ^2.0.2
+ debug: ^4.3.1
minimatch: ^3.0.5
- checksum: f8ea57b0d7ed7f2d64cd3944654976829d9da91c04d9c860e18804729a33f7681f78166ef4c761850b8c324d362f7d53f14c5c44907a6b38b32c703ff85e4805
+ checksum: 861ccce9eaea5de19546653bccf75bf09fe878bc39c3aab00aeee2d2a0e654516adad38dd1098aab5e3af0145bbcbf3f309bdf4d964f8dab9dcd5834ae4c02f2
languageName: node
linkType: hard
@@ -830,10 +830,10 @@ __metadata:
languageName: node
linkType: hard
-"@humanwhocodes/object-schema@npm:^2.0.1":
- version: 2.0.1
- resolution: "@humanwhocodes/object-schema@npm:2.0.1"
- checksum: 24929487b1ed48795d2f08346a0116cc5ee4634848bce64161fb947109352c562310fd159fc64dda0e8b853307f5794605191a9547f7341158559ca3c8262a45
+"@humanwhocodes/object-schema@npm:^2.0.2":
+ version: 2.0.2
+ resolution: "@humanwhocodes/object-schema@npm:2.0.2"
+ checksum: 2fc11503361b5fb4f14714c700c02a3f4c7c93e9acd6b87a29f62c522d90470f364d6161b03d1cc618b979f2ae02aed1106fd29d302695d8927e2fc8165ba8ee
languageName: node
linkType: hard
@@ -1144,12 +1144,28 @@ __metadata:
linkType: hard
"@jridgewell/trace-mapping@npm:^0.3.12, @jridgewell/trace-mapping@npm:^0.3.17, @jridgewell/trace-mapping@npm:^0.3.18, @jridgewell/trace-mapping@npm:^0.3.9":
- version: 0.3.20
- resolution: "@jridgewell/trace-mapping@npm:0.3.20"
+ version: 0.3.21
+ resolution: "@jridgewell/trace-mapping@npm:0.3.21"
dependencies:
"@jridgewell/resolve-uri": ^3.1.0
"@jridgewell/sourcemap-codec": ^1.4.14
- checksum: cd1a7353135f385909468ff0cf20bdd37e59f2ee49a13a966dedf921943e222082c583ade2b579ff6cd0d8faafcb5461f253e1bf2a9f48fec439211fdbe788f5
+ checksum: e91d3943c6d84687503ba033600d42b2a81d9eaf32758fee06449cd1415c59b944af08841e99f030b71f83bb5f814969e96fc8aa29e469eb3ea1b46597d13cff
+ languageName: node
+ linkType: hard
+
+"@kwsites/file-exists@npm:^1.1.1":
+ version: 1.1.1
+ resolution: "@kwsites/file-exists@npm:1.1.1"
+ dependencies:
+ debug: ^4.1.1
+ checksum: 4ff945de7293285133aeae759caddc71e73c4a44a12fac710fdd4f574cce2671a3f89d8165fdb03d383cfc97f3f96f677d8de3c95133da3d0e12a123a23109fe
+ languageName: node
+ linkType: hard
+
+"@kwsites/promise-deferred@npm:^1.1.1":
+ version: 1.1.1
+ resolution: "@kwsites/promise-deferred@npm:1.1.1"
+ checksum: 07455477a0123d9a38afb503739eeff2c5424afa8d3dbdcc7f9502f13604488a4b1d9742fc7288832a52a6422cf1e1c0a1d51f69a39052f14d27c9a0420b6629
languageName: node
linkType: hard
@@ -1514,9 +1530,9 @@ __metadata:
linkType: hard
"@pkgr/core@npm:^0.1.0":
- version: 0.1.0
- resolution: "@pkgr/core@npm:0.1.0"
- checksum: eeff0e0e517b1ed10eb4c1a8971413a8349bbfdab727dbe7d4085fd94eab95f0c3beb51b9245fef30562849d2a7a119e07ca48c343c8c4ec4e64ee289f50fe5e
+ version: 0.1.1
+ resolution: "@pkgr/core@npm:0.1.1"
+ checksum: 6f25fd2e3008f259c77207ac9915b02f1628420403b2630c92a07ff963129238c9262afc9e84344c7a23b5cc1f3965e2cd17e3798219f5fd78a63d144d3cceba
languageName: node
linkType: hard
@@ -1542,11 +1558,11 @@ __metadata:
languageName: node
linkType: hard
-"@salesforce/cli-plugins-testkit@npm:^5.1.3":
- version: 5.1.3
- resolution: "@salesforce/cli-plugins-testkit@npm:5.1.3"
+"@salesforce/cli-plugins-testkit@npm:^5.1.7":
+ version: 5.1.7
+ resolution: "@salesforce/cli-plugins-testkit@npm:5.1.7"
dependencies:
- "@salesforce/core": ^6.4.2
+ "@salesforce/core": ^6.4.7
"@salesforce/kit": ^3.0.15
"@salesforce/ts-types": ^2.0.9
"@types/shelljs": ^0.8.15
@@ -1554,8 +1570,8 @@ __metadata:
jszip: ^3.10.1
shelljs: ^0.8.4
strip-ansi: 6.0.1
- ts-retry-promise: ^0.7.1
- checksum: 355ea0a4d4cdce72195a2506009f47d5a1662e342cbd5a5b7ab07c4467004078c3f57aed2a15178acd9b4e8bc7dc821467cbedb21e16d960bab502181e73d5c6
+ ts-retry-promise: ^0.8.0
+ checksum: 35f7214504dfab9ad4ac4bc0ee4c8601dc882d1a2592b4387977b2285ab6a3ceabf1b55b07db8daef81f3abe39c2a156e84e5793bea29959ffa346e452ec23e4
languageName: node
linkType: hard
@@ -1596,9 +1612,9 @@ __metadata:
languageName: node
linkType: hard
-"@salesforce/core@npm:^6.4.2":
- version: 6.4.4
- resolution: "@salesforce/core@npm:6.4.4"
+"@salesforce/core@npm:^6.4.7":
+ version: 6.4.7
+ resolution: "@salesforce/core@npm:6.4.7"
dependencies:
"@salesforce/kit": ^3.0.15
"@salesforce/schemas": ^1.6.1
@@ -1612,13 +1628,13 @@ __metadata:
jsforce: ^2.0.0-beta.29
jsonwebtoken: 9.0.2
jszip: 3.10.1
- pino: ^8.16.2
+ pino: ^8.17.2
pino-abstract-transport: ^1.1.0
pino-pretty: ^10.3.1
proper-lockfile: ^4.1.2
semver: ^7.5.4
ts-retry-promise: ^0.7.1
- checksum: 1d02c351063dea1b9b45e0a07674f09c6c24c7125c893137251d4608831d4838c3536b34e58cad442e90a74c5171e1c2f70bed63b1f6f7d178e24a3e0b562e07
+ checksum: 17d3aa73ef74aaf5fbe02122aeae655efb13254c597f007ad06ca73d487455a518e1c51cdcfc3b630a3b707d169153a603c7634d08739715daa7c841f30bf142
languageName: node
linkType: hard
@@ -1779,7 +1795,7 @@ __metadata:
languageName: node
linkType: hard
-"@sinonjs/text-encoding@npm:^0.7.1":
+"@sinonjs/text-encoding@npm:^0.7.1, @sinonjs/text-encoding@npm:^0.7.2":
version: 0.7.2
resolution: "@sinonjs/text-encoding@npm:0.7.2"
checksum: fe690002a32ba06906cf87e2e8fe84d1590294586f2a7fd180a65355b53660c155c3273d8011a5f2b77209b819aa7306678ae6e4aea0df014bd7ffd4bbbcf1ab
@@ -1875,90 +1891,90 @@ __metadata:
languageName: node
linkType: hard
-"@swc/core-darwin-arm64@npm:1.3.102":
- version: 1.3.102
- resolution: "@swc/core-darwin-arm64@npm:1.3.102"
+"@swc/core-darwin-arm64@npm:1.3.105":
+ version: 1.3.105
+ resolution: "@swc/core-darwin-arm64@npm:1.3.105"
conditions: os=darwin & cpu=arm64
languageName: node
linkType: hard
-"@swc/core-darwin-x64@npm:1.3.102":
- version: 1.3.102
- resolution: "@swc/core-darwin-x64@npm:1.3.102"
+"@swc/core-darwin-x64@npm:1.3.105":
+ version: 1.3.105
+ resolution: "@swc/core-darwin-x64@npm:1.3.105"
conditions: os=darwin & cpu=x64
languageName: node
linkType: hard
-"@swc/core-linux-arm-gnueabihf@npm:1.3.102":
- version: 1.3.102
- resolution: "@swc/core-linux-arm-gnueabihf@npm:1.3.102"
+"@swc/core-linux-arm-gnueabihf@npm:1.3.105":
+ version: 1.3.105
+ resolution: "@swc/core-linux-arm-gnueabihf@npm:1.3.105"
conditions: os=linux & cpu=arm
languageName: node
linkType: hard
-"@swc/core-linux-arm64-gnu@npm:1.3.102":
- version: 1.3.102
- resolution: "@swc/core-linux-arm64-gnu@npm:1.3.102"
+"@swc/core-linux-arm64-gnu@npm:1.3.105":
+ version: 1.3.105
+ resolution: "@swc/core-linux-arm64-gnu@npm:1.3.105"
conditions: os=linux & cpu=arm64 & libc=glibc
languageName: node
linkType: hard
-"@swc/core-linux-arm64-musl@npm:1.3.102":
- version: 1.3.102
- resolution: "@swc/core-linux-arm64-musl@npm:1.3.102"
+"@swc/core-linux-arm64-musl@npm:1.3.105":
+ version: 1.3.105
+ resolution: "@swc/core-linux-arm64-musl@npm:1.3.105"
conditions: os=linux & cpu=arm64 & libc=musl
languageName: node
linkType: hard
-"@swc/core-linux-x64-gnu@npm:1.3.102":
- version: 1.3.102
- resolution: "@swc/core-linux-x64-gnu@npm:1.3.102"
+"@swc/core-linux-x64-gnu@npm:1.3.105":
+ version: 1.3.105
+ resolution: "@swc/core-linux-x64-gnu@npm:1.3.105"
conditions: os=linux & cpu=x64 & libc=glibc
languageName: node
linkType: hard
-"@swc/core-linux-x64-musl@npm:1.3.102":
- version: 1.3.102
- resolution: "@swc/core-linux-x64-musl@npm:1.3.102"
+"@swc/core-linux-x64-musl@npm:1.3.105":
+ version: 1.3.105
+ resolution: "@swc/core-linux-x64-musl@npm:1.3.105"
conditions: os=linux & cpu=x64 & libc=musl
languageName: node
linkType: hard
-"@swc/core-win32-arm64-msvc@npm:1.3.102":
- version: 1.3.102
- resolution: "@swc/core-win32-arm64-msvc@npm:1.3.102"
+"@swc/core-win32-arm64-msvc@npm:1.3.105":
+ version: 1.3.105
+ resolution: "@swc/core-win32-arm64-msvc@npm:1.3.105"
conditions: os=win32 & cpu=arm64
languageName: node
linkType: hard
-"@swc/core-win32-ia32-msvc@npm:1.3.102":
- version: 1.3.102
- resolution: "@swc/core-win32-ia32-msvc@npm:1.3.102"
+"@swc/core-win32-ia32-msvc@npm:1.3.105":
+ version: 1.3.105
+ resolution: "@swc/core-win32-ia32-msvc@npm:1.3.105"
conditions: os=win32 & cpu=ia32
languageName: node
linkType: hard
-"@swc/core-win32-x64-msvc@npm:1.3.102":
- version: 1.3.102
- resolution: "@swc/core-win32-x64-msvc@npm:1.3.102"
+"@swc/core-win32-x64-msvc@npm:1.3.105":
+ version: 1.3.105
+ resolution: "@swc/core-win32-x64-msvc@npm:1.3.105"
conditions: os=win32 & cpu=x64
languageName: node
linkType: hard
-"@swc/core@npm:^1.3.102":
- version: 1.3.102
- resolution: "@swc/core@npm:1.3.102"
+"@swc/core@npm:^1.3.105":
+ version: 1.3.105
+ resolution: "@swc/core@npm:1.3.105"
dependencies:
- "@swc/core-darwin-arm64": 1.3.102
- "@swc/core-darwin-x64": 1.3.102
- "@swc/core-linux-arm-gnueabihf": 1.3.102
- "@swc/core-linux-arm64-gnu": 1.3.102
- "@swc/core-linux-arm64-musl": 1.3.102
- "@swc/core-linux-x64-gnu": 1.3.102
- "@swc/core-linux-x64-musl": 1.3.102
- "@swc/core-win32-arm64-msvc": 1.3.102
- "@swc/core-win32-ia32-msvc": 1.3.102
- "@swc/core-win32-x64-msvc": 1.3.102
+ "@swc/core-darwin-arm64": 1.3.105
+ "@swc/core-darwin-x64": 1.3.105
+ "@swc/core-linux-arm-gnueabihf": 1.3.105
+ "@swc/core-linux-arm64-gnu": 1.3.105
+ "@swc/core-linux-arm64-musl": 1.3.105
+ "@swc/core-linux-x64-gnu": 1.3.105
+ "@swc/core-linux-x64-musl": 1.3.105
+ "@swc/core-win32-arm64-msvc": 1.3.105
+ "@swc/core-win32-ia32-msvc": 1.3.105
+ "@swc/core-win32-x64-msvc": 1.3.105
"@swc/counter": ^0.1.1
"@swc/types": ^0.1.5
peerDependencies:
@@ -1987,7 +2003,7 @@ __metadata:
peerDependenciesMeta:
"@swc/helpers":
optional: true
- checksum: 45c0edb06f87a811e28fb3ed587fbe6b7ca67ff2440fe15666d43729788903a4af61e3b57842aecc0b2b70e3c9981b698d8233746ba94dfb5a19e1c62eea33ad
+ checksum: 5baa880bc92748ef4845d9c65eba5d6dd01adaa673854e20a5116f5e267c12180db50e563cf3c34a415772b9742d021176a9d9a91065c190ef6f54fefe85728c
languageName: node
linkType: hard
@@ -2033,6 +2049,13 @@ __metadata:
languageName: node
linkType: hard
+"@types/async@npm:^3.2.24":
+ version: 3.2.24
+ resolution: "@types/async@npm:3.2.24"
+ checksum: e52ee5e9b6e4354aba709551f1777080b16c07d91632ef2d3d6542ea838c84fae5a2811015327dd908ce7dc810daacc564ab99b9a5bc22e4f3f86819461e0970
+ languageName: node
+ linkType: hard
+
"@types/babel__core@npm:^7.1.14":
version: 7.20.5
resolution: "@types/babel__core@npm:7.20.5"
@@ -2221,12 +2244,12 @@ __metadata:
languageName: node
linkType: hard
-"@types/node@npm:*, @types/node@npm:^20.10.6":
- version: 20.10.6
- resolution: "@types/node@npm:20.10.6"
+"@types/node@npm:*":
+ version: 20.11.4
+ resolution: "@types/node@npm:20.11.4"
dependencies:
undici-types: ~5.26.4
- checksum: ada40e4ccbda3697dca88f8d13f4c996c493be6fbc15f5f5d3b91096d56bd700786a2c148a92a2b4c5d1f133379e63f754a786b3aebfc6a7d09fc7ea16dc017b
+ checksum: b9cf2c5397ea31f3355656edd204aee777a36db75b79b8b7aba2bed7ea5b29914fa808489da5c632c5eddbb33c3106188bef0bff3b7648bd39aa50dee466a73b
languageName: node
linkType: hard
@@ -2237,6 +2260,15 @@ __metadata:
languageName: node
linkType: hard
+"@types/node@npm:^20.11.5":
+ version: 20.11.5
+ resolution: "@types/node@npm:20.11.5"
+ dependencies:
+ undici-types: ~5.26.4
+ checksum: a542727de1334ae20a3ca034b0ecf4b464a57ca01efc4f9cf43bd9ab93896125ab3c2de060ecd8f6ae23b86c6bf3463f681b643e69c032c6a662d376c98a6092
+ languageName: node
+ linkType: hard
+
"@types/normalize-package-data@npm:^2.4.0":
version: 2.4.4
resolution: "@types/normalize-package-data@npm:2.4.4"
@@ -2269,11 +2301,11 @@ __metadata:
linkType: hard
"@types/sinon@npm:*":
- version: 17.0.2
- resolution: "@types/sinon@npm:17.0.2"
+ version: 17.0.3
+ resolution: "@types/sinon@npm:17.0.3"
dependencies:
"@types/sinonjs__fake-timers": "*"
- checksum: 3a56615f2dc7d0b67d3e4b8ae358df2ff2164d89fabb22e9b46e6afe7d4df844a354ea65d409af9baf29ac0103ea562ab44dd0176405a9cf82a4ff183939f22f
+ checksum: c8e9956d9c90fe1ec1cc43085ae48897f93f9ea86e909ab47f255ea71f5229651faa070393950fb6923aef426c84e92b375503f9f8886ef44668b82a8ee49e9a
languageName: node
linkType: hard
@@ -2314,15 +2346,15 @@ __metadata:
languageName: node
linkType: hard
-"@typescript-eslint/eslint-plugin@npm:^6.17.0":
- version: 6.17.0
- resolution: "@typescript-eslint/eslint-plugin@npm:6.17.0"
+"@typescript-eslint/eslint-plugin@npm:^6.19.0":
+ version: 6.19.0
+ resolution: "@typescript-eslint/eslint-plugin@npm:6.19.0"
dependencies:
"@eslint-community/regexpp": ^4.5.1
- "@typescript-eslint/scope-manager": 6.17.0
- "@typescript-eslint/type-utils": 6.17.0
- "@typescript-eslint/utils": 6.17.0
- "@typescript-eslint/visitor-keys": 6.17.0
+ "@typescript-eslint/scope-manager": 6.19.0
+ "@typescript-eslint/type-utils": 6.19.0
+ "@typescript-eslint/utils": 6.19.0
+ "@typescript-eslint/visitor-keys": 6.19.0
debug: ^4.3.4
graphemer: ^1.4.0
ignore: ^5.2.4
@@ -2335,44 +2367,44 @@ __metadata:
peerDependenciesMeta:
typescript:
optional: true
- checksum: 169646a705fdd1bc2a0d78678dbf3557ff3e534e9d4a11f7b5bba1d9f5cbec821f8c16b260413203efc8d6e0c0a3d7f9332bb1476e3dac80e60aa16eb9a0ad11
+ checksum: 9880567d52d4e6559e2343caeed68f856d593b42816b8f705cd98d5a5b46cc620e3bebaaf08bbc982061bba18e5be94d6c539c0c816e8772ddabba0ad4e9363e
languageName: node
linkType: hard
-"@typescript-eslint/parser@npm:^6.17.0":
- version: 6.17.0
- resolution: "@typescript-eslint/parser@npm:6.17.0"
+"@typescript-eslint/parser@npm:^6.19.0":
+ version: 6.19.0
+ resolution: "@typescript-eslint/parser@npm:6.19.0"
dependencies:
- "@typescript-eslint/scope-manager": 6.17.0
- "@typescript-eslint/types": 6.17.0
- "@typescript-eslint/typescript-estree": 6.17.0
- "@typescript-eslint/visitor-keys": 6.17.0
+ "@typescript-eslint/scope-manager": 6.19.0
+ "@typescript-eslint/types": 6.19.0
+ "@typescript-eslint/typescript-estree": 6.19.0
+ "@typescript-eslint/visitor-keys": 6.19.0
debug: ^4.3.4
peerDependencies:
eslint: ^7.0.0 || ^8.0.0
peerDependenciesMeta:
typescript:
optional: true
- checksum: c48864aebf364332540f520d84630a6bb3e2ddc84492d75c14a453964b669a37f1fd43b60469e3683e618e8e8d3d7747baffe92e408599d5df6869cae86ac9e1
+ checksum: 0ac91ff83fdf693de4494b45be79f25803ea6ca3ee717e4f96785b7ffc1da0180adb0426b61bc6eff5666c8ef9ea58c50efbd4351ef9018c0050116cbd74a62b
languageName: node
linkType: hard
-"@typescript-eslint/scope-manager@npm:6.17.0":
- version: 6.17.0
- resolution: "@typescript-eslint/scope-manager@npm:6.17.0"
+"@typescript-eslint/scope-manager@npm:6.19.0":
+ version: 6.19.0
+ resolution: "@typescript-eslint/scope-manager@npm:6.19.0"
dependencies:
- "@typescript-eslint/types": 6.17.0
- "@typescript-eslint/visitor-keys": 6.17.0
- checksum: 6eabac1e52cd25714ab176c7bbf9919d065febf4580620eb067ab1b41607f5e592857bd831a2ab41daa873af4011217dbcae55ed248855e381127f1cabcd2d2c
+ "@typescript-eslint/types": 6.19.0
+ "@typescript-eslint/visitor-keys": 6.19.0
+ checksum: 47d9d1b70cd64f9d1bb717090850e0ff1a34e453c28b43fd0cecaea4db05cacebd60f5da55b35c4b3cc01491f02e9de358f82a0822b27c00e80e3d1a27de32d1
languageName: node
linkType: hard
-"@typescript-eslint/type-utils@npm:6.17.0":
- version: 6.17.0
- resolution: "@typescript-eslint/type-utils@npm:6.17.0"
+"@typescript-eslint/type-utils@npm:6.19.0":
+ version: 6.19.0
+ resolution: "@typescript-eslint/type-utils@npm:6.19.0"
dependencies:
- "@typescript-eslint/typescript-estree": 6.17.0
- "@typescript-eslint/utils": 6.17.0
+ "@typescript-eslint/typescript-estree": 6.19.0
+ "@typescript-eslint/utils": 6.19.0
debug: ^4.3.4
ts-api-utils: ^1.0.1
peerDependencies:
@@ -2380,23 +2412,23 @@ __metadata:
peerDependenciesMeta:
typescript:
optional: true
- checksum: bb6f824c1c7f8d25a21b7218a5bcb74e58c38121f85418eb1639f2931c6149285c2ede96dd677a3e7dc64886cc7640d74be6001d970c3ac9c9a4d889315c5d15
+ checksum: a88f022617be636f43429a7c7c5cd2e0e29955e96d4a9fed7d03467dc4a432b1240a71009d62213604ddb3522be9694e6b78882ee805687cda107021d1ddb203
languageName: node
linkType: hard
-"@typescript-eslint/types@npm:6.17.0":
- version: 6.17.0
- resolution: "@typescript-eslint/types@npm:6.17.0"
- checksum: a199516230b505f85de1b99cdf22c526cbae7604fa2dd0dd24e8bba5de45aeaee231263e7e59843af7b226cb91c4ba5447d06517a1a73b511a94c6b483af0d5b
+"@typescript-eslint/types@npm:6.19.0":
+ version: 6.19.0
+ resolution: "@typescript-eslint/types@npm:6.19.0"
+ checksum: 1371b5ba41c1d2879b3c2823ab01a30cf034e476ef53ff2a7f9e9a4a0056dfbbfecd3143031b05430aa6c749233cacbd01b72cea38a9ece1c6cf95a5cd43da6a
languageName: node
linkType: hard
-"@typescript-eslint/typescript-estree@npm:6.17.0":
- version: 6.17.0
- resolution: "@typescript-eslint/typescript-estree@npm:6.17.0"
+"@typescript-eslint/typescript-estree@npm:6.19.0":
+ version: 6.19.0
+ resolution: "@typescript-eslint/typescript-estree@npm:6.19.0"
dependencies:
- "@typescript-eslint/types": 6.17.0
- "@typescript-eslint/visitor-keys": 6.17.0
+ "@typescript-eslint/types": 6.19.0
+ "@typescript-eslint/visitor-keys": 6.19.0
debug: ^4.3.4
globby: ^11.1.0
is-glob: ^4.0.3
@@ -2406,34 +2438,34 @@ __metadata:
peerDependenciesMeta:
typescript:
optional: true
- checksum: 4bf7811ddae66361cad55f7a6fcf9975eb77456ceb2eca5d7a6228387737845bdfe1b9eef4c76d5d6b7c7d6029a8f62bc67b509c0724cd37395ae16eb07dd7ab
+ checksum: 919f9588840cdab7e0ef6471f4c35d602523b142b2cffeabe9171d6ce65eb7f41614d0cb17e008e0d8e796374821ab053ced35b84642c3b1d491987362f2fdb5
languageName: node
linkType: hard
-"@typescript-eslint/utils@npm:6.17.0":
- version: 6.17.0
- resolution: "@typescript-eslint/utils@npm:6.17.0"
+"@typescript-eslint/utils@npm:6.19.0":
+ version: 6.19.0
+ resolution: "@typescript-eslint/utils@npm:6.19.0"
dependencies:
"@eslint-community/eslint-utils": ^4.4.0
"@types/json-schema": ^7.0.12
"@types/semver": ^7.5.0
- "@typescript-eslint/scope-manager": 6.17.0
- "@typescript-eslint/types": 6.17.0
- "@typescript-eslint/typescript-estree": 6.17.0
+ "@typescript-eslint/scope-manager": 6.19.0
+ "@typescript-eslint/types": 6.19.0
+ "@typescript-eslint/typescript-estree": 6.19.0
semver: ^7.5.4
peerDependencies:
eslint: ^7.0.0 || ^8.0.0
- checksum: 2eea8fd3763b2ab9d86503c68b4d61df81071fd38851b8ba920d53b055c352d13e192a3d15ca853f11aee818c61e8af65946e963aa0e9b18d19e3254881bded0
+ checksum: 05a26251a526232b08850b6c3327637213ef989453e353f3a8255433b74893a70d5c38369c528b762e853b7586d7830d728b372494e65f37770ecb05a88112d4
languageName: node
linkType: hard
-"@typescript-eslint/visitor-keys@npm:6.17.0":
- version: 6.17.0
- resolution: "@typescript-eslint/visitor-keys@npm:6.17.0"
+"@typescript-eslint/visitor-keys@npm:6.19.0":
+ version: 6.19.0
+ resolution: "@typescript-eslint/visitor-keys@npm:6.19.0"
dependencies:
- "@typescript-eslint/types": 6.17.0
+ "@typescript-eslint/types": 6.19.0
eslint-visitor-keys: ^3.4.1
- checksum: e98755087bd067388d9a9182375e53f590183ca656d02b3d05d9718bab2ac571832fd16691060c7c979fd941e9d4b7923d8975632923697de0691f50fc97c8ac
+ checksum: 35b11143e1b55ecf01e0f513085df2cc83d0781f4a8354dc10f6ec3356f66b91a1ed8abadb6fb66af1c1746f9c874eabc8b5636882466e229cda5d6a39aada08
languageName: node
linkType: hard
@@ -2444,60 +2476,60 @@ __metadata:
languageName: node
linkType: hard
-"@vue/compiler-core@npm:3.4.5":
- version: 3.4.5
- resolution: "@vue/compiler-core@npm:3.4.5"
+"@vue/compiler-core@npm:3.4.14":
+ version: 3.4.14
+ resolution: "@vue/compiler-core@npm:3.4.14"
dependencies:
"@babel/parser": ^7.23.6
- "@vue/shared": 3.4.5
+ "@vue/shared": 3.4.14
entities: ^4.5.0
estree-walker: ^2.0.2
source-map-js: ^1.0.2
- checksum: c8d917d09a9c911bb6451235d5d6db8450905cfbba855939c3f27dcdf29b2a67230372cfa5cd9d1f64d125eacd42b87997ff82319ee5dcbe3a725fa1e1e68f2a
+ checksum: df7767c63273555ec46719757d1b240ef4ad6643847fbf574b30c2e7049e640dfedb698cffb06ad399257f452eb8a08484ba0ff80ff5650e1ea4257d0c3374d6
languageName: node
linkType: hard
-"@vue/compiler-dom@npm:3.4.5":
- version: 3.4.5
- resolution: "@vue/compiler-dom@npm:3.4.5"
+"@vue/compiler-dom@npm:3.4.14":
+ version: 3.4.14
+ resolution: "@vue/compiler-dom@npm:3.4.14"
dependencies:
- "@vue/compiler-core": 3.4.5
- "@vue/shared": 3.4.5
- checksum: 4e7177ff2a3e12002c0360c4287bfae1235330921cf4730bf23cdd67bd0f5900334d5a59c091d279ce1c0ebff7a6b90f647094367eab29598312e75bf400893b
+ "@vue/compiler-core": 3.4.14
+ "@vue/shared": 3.4.14
+ checksum: 03b06006d780819ccb758baf0ac243d7c3c6502405149fe162f60389529ca02af9af47d2674047908952f63143e5e539a74eee27e51a8ad79b91bb9bdb357aa7
languageName: node
linkType: hard
"@vue/compiler-sfc@npm:^3.3.4":
- version: 3.4.5
- resolution: "@vue/compiler-sfc@npm:3.4.5"
+ version: 3.4.14
+ resolution: "@vue/compiler-sfc@npm:3.4.14"
dependencies:
"@babel/parser": ^7.23.6
- "@vue/compiler-core": 3.4.5
- "@vue/compiler-dom": 3.4.5
- "@vue/compiler-ssr": 3.4.5
- "@vue/shared": 3.4.5
+ "@vue/compiler-core": 3.4.14
+ "@vue/compiler-dom": 3.4.14
+ "@vue/compiler-ssr": 3.4.14
+ "@vue/shared": 3.4.14
estree-walker: ^2.0.2
magic-string: ^0.30.5
- postcss: ^8.4.32
+ postcss: ^8.4.33
source-map-js: ^1.0.2
- checksum: 2b37b51133f84709fcace36e83d411b4feeb320d69bb5a8c77d1e4f782e6d6600f36b199d2889e9e6f1434c8a159585145ea142536f708e5683093fe3c013dea
+ checksum: 2ce5c4e6323cf6eb304bc56a4b0079cb3c9c064493e02f29df3db5a08c99c1dc5eff88d7e97f1cb1177d66b397fbf40c9f3a2a19f71688e563fbf10d06c6a490
languageName: node
linkType: hard
-"@vue/compiler-ssr@npm:3.4.5":
- version: 3.4.5
- resolution: "@vue/compiler-ssr@npm:3.4.5"
+"@vue/compiler-ssr@npm:3.4.14":
+ version: 3.4.14
+ resolution: "@vue/compiler-ssr@npm:3.4.14"
dependencies:
- "@vue/compiler-dom": 3.4.5
- "@vue/shared": 3.4.5
- checksum: 091704226210d273d23669334f25a0bb06ebcb29b494acb0d7d2a09faa19f122abfcd066ea31022cd816c0103a43ac7111e1ead4e7ed910a31bc364a01112dc7
+ "@vue/compiler-dom": 3.4.14
+ "@vue/shared": 3.4.14
+ checksum: faff2e3815020ec232d50835c28e99108d50f88795adba0d08faed330405c48d16be10648ccef9a4d6fad9ac2e6d747b9d15b0cfaddb98ae305212483d40d1da
languageName: node
linkType: hard
-"@vue/shared@npm:3.4.5":
- version: 3.4.5
- resolution: "@vue/shared@npm:3.4.5"
- checksum: 32b9008e6222326d69d941a34119101f30eb2104952a720df75db972ac2e41f073a57ad9c325a47be5117b00f9e17a81417c0a54586205a6c0be227b07cd33ea
+"@vue/shared@npm:3.4.14":
+ version: 3.4.14
+ resolution: "@vue/shared@npm:3.4.14"
+ checksum: 6945de71cc74b4f26e6e3310f4e98f627ed950ffebd32ff5e864df9001708918432fec8fe2840797f7915163c4ffaee334d73fac812f96a3b1ea15bd08a5ceea
languageName: node
linkType: hard
@@ -2546,9 +2578,9 @@ __metadata:
linkType: hard
"acorn-walk@npm:^8.1.1":
- version: 8.3.1
- resolution: "acorn-walk@npm:8.3.1"
- checksum: 5c8926ddb5400bc825b6baca782931f9df4ace603ba1a517f5243290fd9cdb089d52877840687b5d5c939591ebc314e2e63721514feaa37c6829c828f2b940ce
+ version: 8.3.2
+ resolution: "acorn-walk@npm:8.3.2"
+ checksum: 3626b9d26a37b1b427796feaa5261faf712307a8920392c8dce9a5739fb31077667f4ad2ec71c7ac6aaf9f61f04a9d3d67ff56f459587206fc04aa31c27ef392
languageName: node
linkType: hard
@@ -2945,7 +2977,14 @@ __metadata:
languageName: node
linkType: hard
-"async@npm:^3.2.3, async@npm:^3.2.4":
+"async-lock@npm:^1.1.0":
+ version: 1.4.1
+ resolution: "async-lock@npm:1.4.1"
+ checksum: 29e70cd892932b7c202437786cedc39ff62123cb6941014739bd3cabd6106326416e9e7c21285a5d1dc042cad239a0f7ec9c44658491ee4a615fd36a21c1d10a
+ languageName: node
+ linkType: hard
+
+"async@npm:^3.2.3, async@npm:^3.2.4, async@npm:^3.2.5":
version: 3.2.5
resolution: "async@npm:3.2.5"
checksum: 5ec77f1312301dee02d62140a6b1f7ee0edd2a0f983b6fd2b0849b969f245225b990b47b8243e7b9ad16451a53e7f68e753700385b706198ced888beedba3af4
@@ -3331,9 +3370,9 @@ __metadata:
linkType: hard
"caniuse-lite@npm:^1.0.30001565":
- version: 1.0.30001574
- resolution: "caniuse-lite@npm:1.0.30001574"
- checksum: 4064719755371a9716446ee79714ff5cee347861492d6325c2e3db00c37cb27f184742f53f2b6e4c15cc2e1a47fae32cc44c9b15e957a9290982bf4108933245
+ version: 1.0.30001577
+ resolution: "caniuse-lite@npm:1.0.30001577"
+ checksum: 26d2b4a498a2a6ad5a33c44c18a32497b59a3bb1963b8b9221ddcbfe166ed7f7a1f75a3de040870cdc2467ce35199c643cfe8c45e7208d8bc033e7877214b0f9
languageName: node
linkType: hard
@@ -3361,8 +3400,8 @@ __metadata:
linkType: hard
"chai@npm:^4.3.10":
- version: 4.4.0
- resolution: "chai@npm:4.4.0"
+ version: 4.4.1
+ resolution: "chai@npm:4.4.1"
dependencies:
assertion-error: ^1.1.0
check-error: ^1.0.3
@@ -3371,7 +3410,7 @@ __metadata:
loupe: ^2.3.6
pathval: ^1.1.1
type-detect: ^4.0.8
- checksum: 2509a0acc2707f0664157cdc9d72b1466c71cedba19a22fec80ae550593fdcfc108fd86d4a7fec3be631b6c0589bf4f05652ee73fa55dbc748387ff6cc85c6b3
+ checksum: 9ab84f36eb8e0b280c56c6c21ca4da5933132cd8a0c89c384f1497f77953640db0bc151edd47f81748240a9fab57b78f7d925edfeedc8e8fc98016d71f40c36e
languageName: node
linkType: hard
@@ -3493,6 +3532,13 @@ __metadata:
languageName: node
linkType: hard
+"clean-git-ref@npm:^2.0.1":
+ version: 2.0.1
+ resolution: "clean-git-ref@npm:2.0.1"
+ checksum: b25f585ed47040ea5d699d40a2bb84d1f35afd651f3fcc05fb077224358ffd3d7509fc9edbfc4570f1fc732c987e03ac7d8ec31524ac503ac35c53cb1f5e3bf9
+ languageName: node
+ linkType: hard
+
"clean-stack@npm:^2.0.0":
version: 2.2.0
resolution: "clean-stack@npm:2.2.0"
@@ -4090,6 +4136,15 @@ __metadata:
languageName: node
linkType: hard
+"decompress-response@npm:^6.0.0":
+ version: 6.0.0
+ resolution: "decompress-response@npm:6.0.0"
+ dependencies:
+ mimic-response: ^3.1.0
+ checksum: d377cf47e02d805e283866c3f50d3d21578b779731e8c5072d6ce8c13cc31493db1c2f6784da9d1d5250822120cefa44f1deab112d5981015f2e17444b763812
+ languageName: node
+ linkType: hard
+
"dedent@npm:^1.0.0":
version: 1.5.1
resolution: "dedent@npm:1.5.1"
@@ -4247,6 +4302,13 @@ __metadata:
languageName: node
linkType: hard
+"diff3@npm:0.0.3":
+ version: 0.0.3
+ resolution: "diff3@npm:0.0.3"
+ checksum: 28d883f1057b9873dfcb38cd2750337e6b32bf184bb1c0fb3292efeb83c597f1ce9b8f508bdd0d623a58b9ca1c917b1f297b90cb7fce3a62b26b0dde496f70e6
+ languageName: node
+ linkType: hard
+
"diff@npm:5.0.0":
version: 5.0.0
resolution: "diff@npm:5.0.0"
@@ -4365,9 +4427,9 @@ __metadata:
linkType: hard
"electron-to-chromium@npm:^1.4.601":
- version: 1.4.622
- resolution: "electron-to-chromium@npm:1.4.622"
- checksum: 38da56a5f723626880c1790555a15c26b40b1470b68298dceb246f3fdbfc635753d924a331cd9b27d21f8f2b4ae63a2bbf9aae58f248d214bd6d2520a8cda7bc
+ version: 1.4.633
+ resolution: "electron-to-chromium@npm:1.4.633"
+ checksum: 401fb8240018c20d6de12e993622cfad78e0dbd9fc5002d9258bf6f5bcc3003aa2851d9bcf944b878acd9052aa7b8d67ad31415bf18a40d73e478342f2449064
languageName: node
linkType: hard
@@ -4635,9 +4697,9 @@ __metadata:
languageName: node
linkType: hard
-"eslint-plugin-prettier@npm:^5.1.2":
- version: 5.1.2
- resolution: "eslint-plugin-prettier@npm:5.1.2"
+"eslint-plugin-prettier@npm:^5.1.3":
+ version: 5.1.3
+ resolution: "eslint-plugin-prettier@npm:5.1.3"
dependencies:
prettier-linter-helpers: ^1.0.0
synckit: ^0.8.6
@@ -4651,7 +4713,7 @@ __metadata:
optional: true
eslint-config-prettier:
optional: true
- checksum: ee972ca16c1d05773abe370dcd43c71ffe729ad4eca86752e21be3d4afbc18f04184b4143e9d17869395d534eb8ad685b7589fcdc2706cb734fe17c3c3f4e6cd
+ checksum: eb2a7d46a1887e1b93788ee8f8eb81e0b6b2a6f5a66a62bc6f375b033fc4e7ca16448da99380be800042786e76cf5c0df9c87a51a2c9b960ed47acbd7c0b9381
languageName: node
linkType: hard
@@ -4991,14 +5053,14 @@ __metadata:
languageName: node
linkType: hard
-"fast-xml-parser@npm:^4.3.2":
- version: 4.3.2
- resolution: "fast-xml-parser@npm:4.3.2"
+"fast-xml-parser@npm:^4.3.3":
+ version: 4.3.3
+ resolution: "fast-xml-parser@npm:4.3.3"
dependencies:
strnum: ^1.0.5
bin:
fxparser: src/cli/cli.js
- checksum: d507ce2efa5fd13d0a5ba28bd76dd68f2fc30ad8748357c37b70f360d19417866d79e35a688af067d5bceaaa796033fa985206aef9692f7a421e1326b6e73309
+ checksum: 5e272a0dbb73c4341487935cd6f37df360999f680c0638efec0974dfc58071fb803919f7a030941a7f5bb894794a2f3356d4b863ba2fb9438191795004cdf36e
languageName: node
linkType: hard
@@ -5673,7 +5735,7 @@ __metadata:
languageName: node
linkType: hard
-"has-property-descriptors@npm:^1.0.0":
+"has-property-descriptors@npm:^1.0.0, has-property-descriptors@npm:^1.0.1":
version: 1.0.1
resolution: "has-property-descriptors@npm:1.0.1"
dependencies:
@@ -5895,7 +5957,7 @@ __metadata:
languageName: node
linkType: hard
-"ignore@npm:^5.1.1, ignore@npm:^5.2.0, ignore@npm:^5.2.4, ignore@npm:^5.3.0":
+"ignore@npm:^5.1.1, ignore@npm:^5.1.4, ignore@npm:^5.2.0, ignore@npm:^5.2.4, ignore@npm:^5.3.0":
version: 5.3.0
resolution: "ignore@npm:5.3.0"
checksum: 2736da6621f14ced652785cb05d86301a66d70248597537176612bd0c8630893564bd5f6421f8806b09e8472e75c591ef01672ab8059c07c6eb2c09cefe04bf9
@@ -6400,6 +6462,27 @@ __metadata:
languageName: node
linkType: hard
+"isomorphic-git@npm:^1.25.3":
+ version: 1.25.3
+ resolution: "isomorphic-git@npm:1.25.3"
+ dependencies:
+ async-lock: ^1.1.0
+ clean-git-ref: ^2.0.1
+ crc-32: ^1.2.0
+ diff3: 0.0.3
+ ignore: ^5.1.4
+ minimisted: ^2.0.0
+ pako: ^1.0.10
+ pify: ^4.0.1
+ readable-stream: ^3.4.0
+ sha.js: ^2.4.9
+ simple-get: ^4.0.1
+ bin:
+ isogit: cli.cjs
+ checksum: 747da1bd0435898a02f8c1ba07ce66c704ea311338b66a44830a5ce1fe9a256a3ea1ec9be5baa087f88134aa686b5d0c6dc2b9338b1fd523f2869ef2480e4d4e
+ languageName: node
+ linkType: hard
+
"istanbul-lib-coverage@npm:^3.0.0, istanbul-lib-coverage@npm:^3.2.0":
version: 3.2.2
resolution: "istanbul-lib-coverage@npm:3.2.2"
@@ -7211,6 +7294,13 @@ __metadata:
languageName: node
linkType: hard
+"just-extend@npm:^6.2.0":
+ version: 6.2.0
+ resolution: "just-extend@npm:6.2.0"
+ checksum: 022024d6f687c807963b97a24728a378799f7e4af7357d1c1f90dedb402943d5c12be99a5136654bed8362c37a358b1793feaad3366896f239a44e17c5032d86
+ languageName: node
+ linkType: hard
+
"jwa@npm:^1.4.1":
version: 1.4.1
resolution: "jwa@npm:1.4.1"
@@ -7811,6 +7901,13 @@ __metadata:
languageName: node
linkType: hard
+"mimic-response@npm:^3.1.0":
+ version: 3.1.0
+ resolution: "mimic-response@npm:3.1.0"
+ checksum: 25739fee32c17f433626bf19f016df9036b75b3d84a3046c7d156e72ec963dd29d7fc8a302f55a3d6c5a4ff24259676b15d915aad6480815a969ff2ec0836867
+ languageName: node
+ linkType: hard
+
"min-indent@npm:^1.0.0":
version: 1.0.1
resolution: "min-indent@npm:1.0.1"
@@ -7874,13 +7971,22 @@ __metadata:
languageName: node
linkType: hard
-"minimist@npm:^1.2.0, minimist@npm:^1.2.3, minimist@npm:^1.2.6":
+"minimist@npm:^1.2.0, minimist@npm:^1.2.3, minimist@npm:^1.2.5, minimist@npm:^1.2.6":
version: 1.2.8
resolution: "minimist@npm:1.2.8"
checksum: 75a6d645fb122dad29c06a7597bddea977258957ed88d7a6df59b5cd3fe4a527e253e9bbf2e783e4b73657f9098b96a5fe96ab8a113655d4109108577ecf85b0
languageName: node
linkType: hard
+"minimisted@npm:^2.0.0":
+ version: 2.0.1
+ resolution: "minimisted@npm:2.0.1"
+ dependencies:
+ minimist: ^1.2.5
+ checksum: 6bc3df14558481c96764cfd6bf77a59f5838dec715c38c1e338193c1e56f536ba792ccbae84ff6632d13a7dd37ac888141c091d23733229b8d100148eec930aa
+ languageName: node
+ linkType: hard
+
"minipass-collect@npm:^2.0.1":
version: 2.0.1
resolution: "minipass-collect@npm:2.0.1"
@@ -8194,15 +8300,15 @@ __metadata:
linkType: hard
"nise@npm:^5.1.5":
- version: 5.1.5
- resolution: "nise@npm:5.1.5"
+ version: 5.1.7
+ resolution: "nise@npm:5.1.7"
dependencies:
- "@sinonjs/commons": ^2.0.0
- "@sinonjs/fake-timers": ^10.0.2
- "@sinonjs/text-encoding": ^0.7.1
- just-extend: ^4.0.2
- path-to-regexp: ^1.7.0
- checksum: c763dc62c5796cafa5c9268e14a5b34db6e6fa2f1dbc57a891fe5d7ea632a87868e22b5bb34965006f984630793ea11368351e94971163228d9e20b2e88edce8
+ "@sinonjs/commons": ^3.0.0
+ "@sinonjs/fake-timers": ^11.2.2
+ "@sinonjs/text-encoding": ^0.7.2
+ just-extend: ^6.2.0
+ path-to-regexp: ^6.2.1
+ checksum: bc1f43825ffb9572a1abea37386c3d847b965d7aca40ca899a6f37400cf2d6405a4738efa03991c276cb738e5b80bdc24dffeba24c45085d1cc6251199f27312
languageName: node
linkType: hard
@@ -8228,13 +8334,13 @@ __metadata:
linkType: hard
"nock@npm:^13.3.3":
- version: 13.4.0
- resolution: "nock@npm:13.4.0"
+ version: 13.5.0
+ resolution: "nock@npm:13.5.0"
dependencies:
debug: ^4.1.0
json-stringify-safe: ^5.0.1
propagate: ^2.0.0
- checksum: 30c3751854f9c412df5f99e01eeaef25b2583d3cae80b8c46524acb39d8b7fa61043603472ad94a3adc4b7d1e0f3098e6bb06e787734cbfbde2751891115b311
+ checksum: b50d680da3287859f81626b426234ed81a43aae16455efd259a3044f4a6139677d97c7001f532def6b308e7c3ecf7517925f2a34b3aaf80ee2ced920cfce2799
languageName: node
linkType: hard
@@ -8639,7 +8745,7 @@ __metadata:
languageName: node
linkType: hard
-"pako@npm:~1.0.2":
+"pako@npm:^1.0.10, pako@npm:~1.0.2":
version: 1.0.11
resolution: "pako@npm:1.0.11"
checksum: 1be2bfa1f807608c7538afa15d6f25baa523c30ec870a3228a89579e474a4d992f4293859524e46d5d87fd30fa17c5edf34dbef0671251d9749820b488660b16
@@ -8785,6 +8891,13 @@ __metadata:
languageName: node
linkType: hard
+"path-to-regexp@npm:^6.2.1":
+ version: 6.2.1
+ resolution: "path-to-regexp@npm:6.2.1"
+ checksum: f0227af8284ea13300f4293ba111e3635142f976d4197f14d5ad1f124aebd9118783dd2e5f1fe16f7273743cc3dbeddfb7493f237bb27c10fdae07020cc9b698
+ languageName: node
+ linkType: hard
+
"path-type@npm:^4.0.0":
version: 4.0.0
resolution: "path-type@npm:4.0.0"
@@ -8822,6 +8935,13 @@ __metadata:
languageName: node
linkType: hard
+"pify@npm:^4.0.1":
+ version: 4.0.1
+ resolution: "pify@npm:4.0.1"
+ checksum: 9c4e34278cb09987685fa5ef81499c82546c033713518f6441778fbec623fc708777fe8ac633097c72d88470d5963094076c7305cafc7ad340aae27cfacd856b
+ languageName: node
+ linkType: hard
+
"pino-abstract-transport@npm:^1.0.0, pino-abstract-transport@npm:^1.1.0, pino-abstract-transport@npm:v1.1.0":
version: 1.1.0
resolution: "pino-abstract-transport@npm:1.1.0"
@@ -8863,7 +8983,7 @@ __metadata:
languageName: node
linkType: hard
-"pino@npm:^8.16.2":
+"pino@npm:^8.17.2":
version: 8.17.2
resolution: "pino@npm:8.17.2"
dependencies:
@@ -8916,7 +9036,7 @@ __metadata:
languageName: node
linkType: hard
-"postcss@npm:^8.4.32":
+"postcss@npm:^8.4.33":
version: 8.4.33
resolution: "postcss@npm:8.4.33"
dependencies:
@@ -8943,12 +9063,12 @@ __metadata:
languageName: node
linkType: hard
-"prettier@npm:^3.1.1":
- version: 3.1.1
- resolution: "prettier@npm:3.1.1"
+"prettier@npm:^3.2.4":
+ version: 3.2.4
+ resolution: "prettier@npm:3.2.4"
bin:
prettier: bin/prettier.cjs
- checksum: e386855e3a1af86a748e16953f168be555ce66d6233f4ba54eb6449b88eb0c6b2ca79441b11eae6d28a7f9a5c96440ce50864b9d5f6356d331d39d6bb66c648e
+ checksum: 6ec9385a836e0b9bac549e585101c086d1521c31d7b882d5c8bb7d7646da0693da5f31f4fff6dc080710e5e2d34c85e6fb2f8766876b3645c8be2f33b9c3d1a3
languageName: node
linkType: hard
@@ -9532,14 +9652,14 @@ __metadata:
linkType: hard
"safe-array-concat@npm:^1.0.1":
- version: 1.0.1
- resolution: "safe-array-concat@npm:1.0.1"
+ version: 1.1.0
+ resolution: "safe-array-concat@npm:1.1.0"
dependencies:
- call-bind: ^1.0.2
- get-intrinsic: ^1.2.1
+ call-bind: ^1.0.5
+ get-intrinsic: ^1.2.2
has-symbols: ^1.0.3
isarray: ^2.0.5
- checksum: 001ecf1d8af398251cbfabaf30ed66e3855127fbceee178179524b24160b49d15442f94ed6c0db0b2e796da76bb05b73bf3cc241490ec9c2b741b41d33058581
+ checksum: 5c71eaa999168ee7474929f1cd3aae80f486353a651a094d9968936692cf90aa065224929a6486dcda66334a27dce4250a83612f9e0fef6dced1a925d3ac7296
languageName: node
linkType: hard
@@ -9565,13 +9685,13 @@ __metadata:
linkType: hard
"safe-regex-test@npm:^1.0.0":
- version: 1.0.0
- resolution: "safe-regex-test@npm:1.0.0"
+ version: 1.0.2
+ resolution: "safe-regex-test@npm:1.0.2"
dependencies:
- call-bind: ^1.0.2
- get-intrinsic: ^1.1.3
+ call-bind: ^1.0.5
+ get-intrinsic: ^1.2.2
is-regex: ^1.1.4
- checksum: bc566d8beb8b43c01b94e67de3f070fd2781685e835959bbbaaec91cc53381145ca91f69bd837ce6ec244817afa0a5e974fc4e40a2957f0aca68ac3add1ddd34
+ checksum: 4af5ce05a2daa4f6d4bfd5a3c64fc33d6b886f6592122e93c0efad52f7147b9b605e5ffc03c269a1e3d1f8db2a23bc636628a961c9fd65bafdc09503330673fd
languageName: node
linkType: hard
@@ -9681,14 +9801,15 @@ __metadata:
linkType: hard
"set-function-length@npm:^1.1.1":
- version: 1.1.1
- resolution: "set-function-length@npm:1.1.1"
+ version: 1.2.0
+ resolution: "set-function-length@npm:1.2.0"
dependencies:
define-data-property: ^1.1.1
- get-intrinsic: ^1.2.1
+ function-bind: ^1.1.2
+ get-intrinsic: ^1.2.2
gopd: ^1.0.1
- has-property-descriptors: ^1.0.0
- checksum: c131d7569cd7e110cafdfbfbb0557249b538477624dfac4fc18c376d879672fa52563b74029ca01f8f4583a8acb35bb1e873d573a24edb80d978a7ee607c6e06
+ has-property-descriptors: ^1.0.1
+ checksum: 63e34b45a2ff9abb419f52583481bf8ba597d33c0c85e56999085eb6078a0f7fbb4222051981c287feceeb358aa7789e7803cea2c82ac94c0ab37059596aff79
languageName: node
linkType: hard
@@ -9718,36 +9839,40 @@ __metadata:
"@commitlint/config-conventional": ^18.4.4
"@jest/globals": ^29.7.0
"@oclif/dev-cli": ^1.26.10
- "@salesforce/cli-plugins-testkit": ^5.1.3
+ "@salesforce/cli-plugins-testkit": ^5.1.7
"@salesforce/command": ^5.3.9
"@salesforce/dev-config": ^4.1.0
"@salesforce/ts-sinon": ^1.4.19
"@stryker-mutator/core": ^8.0.0
"@stryker-mutator/jest-runner": ^8.0.0
- "@swc/core": ^1.3.102
+ "@swc/core": ^1.3.105
+ "@types/async": ^3.2.24
"@types/jest": ^29.5.11
"@types/mocha": ^10.0.6
- "@types/node": ^20.10.6
- "@typescript-eslint/eslint-plugin": ^6.17.0
- "@typescript-eslint/parser": ^6.17.0
+ "@types/node": ^20.11.5
+ "@typescript-eslint/eslint-plugin": ^6.19.0
+ "@typescript-eslint/parser": ^6.19.0
+ async: ^3.2.5
benchmark: ^2.1.4
chai: ^4.3.10
depcheck: ^1.4.7
eslint: ^8.56.0
eslint-config-prettier: ^9.1.0
eslint-plugin-import: ^2.29.1
- eslint-plugin-prettier: ^5.1.2
- fast-xml-parser: ^4.3.2
+ eslint-plugin-prettier: ^5.1.3
+ fast-xml-parser: ^4.3.3
fs-extra: ^11.2.0
husky: ^8.0.3
ignore: ^5.3.0
+ isomorphic-git: ^1.25.3
jest: ^29.7.0
lint-staged: ^15.2.0
lodash: ^4.17.21
mocha: ^10.2.0
nyc: ^15.1.0
- prettier: ^3.1.1
+ prettier: ^3.2.4
shx: ^0.3.4
+ simple-git: ^3.22.0
sinon: ^17.0.1
ts-jest: ^29.1.1
ts-node: ^10.9.2
@@ -9758,6 +9883,18 @@ __metadata:
languageName: unknown
linkType: soft
+"sha.js@npm:^2.4.9":
+ version: 2.4.11
+ resolution: "sha.js@npm:2.4.11"
+ dependencies:
+ inherits: ^2.0.1
+ safe-buffer: ^5.0.1
+ bin:
+ sha.js: ./bin.js
+ checksum: ebd3f59d4b799000699097dadb831c8e3da3eb579144fd7eb7a19484cbcbb7aca3c68ba2bb362242eb09e33217de3b4ea56e4678184c334323eca24a58e3ad07
+ languageName: node
+ linkType: hard
+
"shebang-command@npm:^1.2.0":
version: 1.2.0
resolution: "shebang-command@npm:1.2.0"
@@ -9840,6 +9977,35 @@ __metadata:
languageName: node
linkType: hard
+"simple-concat@npm:^1.0.0":
+ version: 1.0.1
+ resolution: "simple-concat@npm:1.0.1"
+ checksum: 4d211042cc3d73a718c21ac6c4e7d7a0363e184be6a5ad25c8a1502e49df6d0a0253979e3d50dbdd3f60ef6c6c58d756b5d66ac1e05cda9cacd2e9fc59e3876a
+ languageName: node
+ linkType: hard
+
+"simple-get@npm:^4.0.1":
+ version: 4.0.1
+ resolution: "simple-get@npm:4.0.1"
+ dependencies:
+ decompress-response: ^6.0.0
+ once: ^1.3.1
+ simple-concat: ^1.0.0
+ checksum: e4132fd27cf7af230d853fa45c1b8ce900cb430dd0a3c6d3829649fe4f2b26574c803698076c4006450efb0fad2ba8c5455fbb5755d4b0a5ec42d4f12b31d27e
+ languageName: node
+ linkType: hard
+
+"simple-git@npm:^3.22.0":
+ version: 3.22.0
+ resolution: "simple-git@npm:3.22.0"
+ dependencies:
+ "@kwsites/file-exists": ^1.1.1
+ "@kwsites/promise-deferred": ^1.1.1
+ debug: ^4.3.4
+ checksum: 118c43a3e1e27aecd8487205ed509acf925112de6edf1feb304d180c673f6e08279a13bcfae33c948de8b0809f2b929f9263fa7033ec7ef84908904eda0c3e2d
+ languageName: node
+ linkType: hard
+
"sinon@npm:^17.0.1":
version: 17.0.1
resolution: "sinon@npm:17.0.1"
@@ -9953,11 +10119,11 @@ __metadata:
linkType: hard
"sonic-boom@npm:^3.0.0, sonic-boom@npm:^3.7.0":
- version: 3.7.0
- resolution: "sonic-boom@npm:3.7.0"
+ version: 3.8.0
+ resolution: "sonic-boom@npm:3.8.0"
dependencies:
atomic-sleep: ^1.0.0
- checksum: 528f0f7f7e09dcdb02ad5985039f66554266cbd8813f9920781607c9248e01f468598c1334eab2cc740c016a63c8b2a20e15c3f618cddb08ea1cfb4a390a796e
+ checksum: c21ece61a0cabb78db96547aecb4e9086eba2db2d53030221ed07215bfda2d25bb02906366ea2584cbe73d236dd7dd109122d3d7287914b76a9630e0a36ad819
languageName: node
linkType: hard
@@ -10620,6 +10786,13 @@ __metadata:
languageName: node
linkType: hard
+"ts-retry-promise@npm:^0.8.0":
+ version: 0.8.0
+ resolution: "ts-retry-promise@npm:0.8.0"
+ checksum: 79a22633c751361cb61ae8cf9ced5d4faf5ab08a930962a34f251ac8af4ffd9f7f327a5595dc05f0dac5ca081e2cac048fa9002ade89e9280199be7f943133b8
+ languageName: node
+ linkType: hard
+
"tsconfig-paths@npm:^3.15.0":
version: 3.15.0
resolution: "tsconfig-paths@npm:3.15.0"