Skip to content

Commit

Permalink
Update formatting (#1003)
Browse files Browse the repository at this point in the history
  • Loading branch information
ecraig12345 authored Nov 6, 2024
1 parent d429bab commit e388076
Show file tree
Hide file tree
Showing 13 changed files with 429 additions and 142 deletions.
4 changes: 4 additions & 0 deletions .husky/pre-commit
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"

yarn lint-staged
5 changes: 3 additions & 2 deletions .prettierignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,15 @@
*.styl
*.svg
.*ignore
.husky/
.DS_Store
.nojekyll
.nvmrc
docs/.vuepress/dist
docs/.vuepress/dist/
/change/
/CHANGELOG.*
/lib/
LICENSE
node_modules
node_modules/
SECURITY.md
yarn.lock
7 changes: 7 additions & 0 deletions change/beachball-ba6f3e9b-61fb-49ad-a371-b74b8f4ab642.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
{
"type": "none",
"comment": "Update formatting",
"packageName": "beachball",
"email": "[email protected]",
"dependentChangeType": "none"
}
10 changes: 9 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
"docs": "vuepress dev docs --host localhost",
"docs:build": "vuepress build docs",
"format": "prettier --write '**/*'",
"prepare": "husky install",
"pub": "node ./lib/cli.js publish",
"release": "node ./lib/cli.js publish -y",
"release:docs": "yarn docs:build && yarn gh-pages -d docs/.vuepress/dist --dotfiles",
Expand All @@ -40,17 +41,22 @@
"test:watch": "jest --watch",
"update-snapshots": "yarn test:unit -u && yarn test:func -u && yarn test:e2e -u"
},
"lint-staged": {
"*": [
"prettier --write"
]
},
"dependencies": {
"cosmiconfig": "^8.3.6",
"execa": "^5.0.0",
"fs-extra": "^11.1.1",
"lodash": "^4.17.15",
"minimatch": "^3.0.4",
"p-graph": "^1.1.2",
"p-limit": "^3.0.2",
"prompts": "^2.4.2",
"semver": "^7.0.0",
"toposort": "^2.0.2",
"p-graph": "^1.1.2",
"workspace-tools": "^0.38.0",
"yargs-parser": "^21.0.0"
},
Expand All @@ -67,8 +73,10 @@
"@types/yargs-parser": "^21.0.0",
"find-free-port": "^2.0.0",
"gh-pages": "^5.0.0",
"husky": "^8.0.0",
"jest": "^29.0.0",
"jest-mock": "^29.0.0",
"lint-staged": "^12.0.0",
"normalized-tmpdir": "^1.0.1",
"prettier": "~2.8.4",
"strip-ansi": "^6.0.1",
Expand Down
212 changes: 112 additions & 100 deletions src/__e2e__/publishE2E.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,14 +22,14 @@ import os from 'os';
jest.mock('../packageManager/npm');

describe('publish command (e2e)', () => {
const concurrencyValues = [[1],[os.cpus().length]];
const concurrencyValues = [[1], [os.cpus().length]];
const npmMock = initNpmMock();

let repositoryFactory: RepositoryFactory | undefined;
let repo: Repository | undefined;

// show error logs for these tests
initMockLogs({ alsoLog: ['error'] });
const logs = initMockLogs({ alsoLog: ['error'] });

function getOptions(overrides?: Partial<BeachballOptions>): BeachballOptions {
return {
Expand Down Expand Up @@ -101,94 +101,100 @@ describe('publish command (e2e)', () => {
});
});

it.each(concurrencyValues)('can perform a successful npm publish from a race condition, concurrency: %s', async (concurrency: number) => {
repositoryFactory = new RepositoryFactory('single');
repo = repositoryFactory.cloneRepository();

const options = getOptions({ concurrency: concurrency });
it.each(concurrencyValues)(
'can perform a successful npm publish from a race condition, concurrency: %s',
async (concurrency: number) => {
repositoryFactory = new RepositoryFactory('single');
repo = repositoryFactory.cloneRepository();

generateChangeFiles(['foo'], options);
repo.push();
const options = getOptions({ concurrency: concurrency });

// Adds a step that injects a race condition
let fetchCount = 0;

addGitObserver((args, output) => {
if (args[0] === 'fetch') {
if (fetchCount === 0) {
const anotherRepo = repositoryFactory!.cloneRepository();
// inject a checkin
anotherRepo.updateJsonFile('package.json', { version: '1.0.2' });
anotherRepo.push();
}
generateChangeFiles(['foo'], options);
repo.push();

fetchCount++;
}
});

await publish(options);

expect(await npmShow('foo')).toMatchObject({
name: 'foo',
versions: ['1.1.0'],
'dist-tags': { latest: '1.1.0' },
});
// Adds a step that injects a race condition
let fetchCount = 0;

repo.checkout(defaultBranchName);
repo.pull();
expect(repo.getCurrentTags()).toEqual(['foo_v1.1.0']);
addGitObserver((args, output) => {
if (args[0] === 'fetch') {
if (fetchCount === 0) {
const anotherRepo = repositoryFactory!.cloneRepository();
// inject a checkin
anotherRepo.updateJsonFile('package.json', { version: '1.0.2' });
anotherRepo.push();
}

// this indicates 2 tries
expect(fetchCount).toBe(2);
});
fetchCount++;
}
});

it.each(concurrencyValues)('can perform a successful npm publish from a race condition in the dependencies, concurrency: %s', async (concurrency: number) => {
repositoryFactory = new RepositoryFactory('single');
repo = repositoryFactory.cloneRepository();
await publish(options);

const options = getOptions({ concurrency: concurrency });
expect(await npmShow('foo')).toMatchObject({
name: 'foo',
versions: ['1.1.0'],
'dist-tags': { latest: '1.1.0' },
});

generateChangeFiles(['foo'], options);
repo.push();
repo.checkout(defaultBranchName);
repo.pull();
expect(repo.getCurrentTags()).toEqual(['foo_v1.1.0']);

// Adds a step that injects a race condition
let fetchCount = 0;
// this indicates 2 tries
expect(fetchCount).toBe(2);
}
);

it.each(concurrencyValues)(
'can perform a successful npm publish from a race condition in the dependencies, concurrency: %s',
async (concurrency: number) => {
repositoryFactory = new RepositoryFactory('single');
repo = repositoryFactory.cloneRepository();

const options = getOptions({ concurrency: concurrency });

generateChangeFiles(['foo'], options);
repo.push();

// Adds a step that injects a race condition
let fetchCount = 0;

addGitObserver((args, output) => {
if (args[0] === 'fetch') {
if (fetchCount === 0) {
const anotherRepo = repositoryFactory!.cloneRepository();
// inject a checkin
const packageJsonFile = anotherRepo.pathTo('package.json');
const contents = fs.readJSONSync(packageJsonFile, 'utf-8');
delete contents.dependencies.baz;
anotherRepo.commitChange('package.json', JSON.stringify(contents, null, 2));
anotherRepo.push();
}

addGitObserver((args, output) => {
if (args[0] === 'fetch') {
if (fetchCount === 0) {
const anotherRepo = repositoryFactory!.cloneRepository();
// inject a checkin
const packageJsonFile = anotherRepo.pathTo('package.json');
const contents = fs.readJSONSync(packageJsonFile, 'utf-8');
delete contents.dependencies.baz;
anotherRepo.commitChange('package.json', JSON.stringify(contents, null, 2));
anotherRepo.push();
fetchCount++;
}
});

fetchCount++;
}
});

await publish(options);
await publish(options);

expect(await npmShow('foo')).toMatchObject({
name: 'foo',
versions: ['1.1.0'],
'dist-tags': { latest: '1.1.0' },
});
expect(await npmShow('foo')).toMatchObject({
name: 'foo',
versions: ['1.1.0'],
'dist-tags': { latest: '1.1.0' },
});

repo.checkout(defaultBranchName);
repo.pull();
expect(repo.getCurrentTags()).toEqual(['foo_v1.1.0']);
repo.checkout(defaultBranchName);
repo.pull();
expect(repo.getCurrentTags()).toEqual(['foo_v1.1.0']);

// this indicates 2 tries
expect(fetchCount).toBe(2);
// this indicates 2 tries
expect(fetchCount).toBe(2);

const packageJsonFile = repo.pathTo('package.json');
const contents = JSON.parse(fs.readFileSync(packageJsonFile, 'utf-8'));
expect(contents.dependencies.baz).toBeUndefined();
});
const packageJsonFile = repo.pathTo('package.json');
const contents = JSON.parse(fs.readFileSync(packageJsonFile, 'utf-8'));
expect(contents.dependencies.baz).toBeUndefined();
}
);

it('can perform a successful npm publish without bump', async () => {
repositoryFactory = new RepositoryFactory('single');
Expand Down Expand Up @@ -302,35 +308,38 @@ describe('publish command (e2e)', () => {
expect(repo.getCurrentTags()).toEqual(['bar_v1.3.4', 'foo_v1.1.0']);
});

it.each(concurrencyValues)('should not perform npm publish on out-of-scope package, concurrency: %s', async (concurrency: number) => {
repositoryFactory = new RepositoryFactory('monorepo');
repo = repositoryFactory.cloneRepository();
it.each(concurrencyValues)(
'should not perform npm publish on out-of-scope package, concurrency: %s',
async (concurrency: number) => {
repositoryFactory = new RepositoryFactory('monorepo');
repo = repositoryFactory.cloneRepository();

const options = getOptions({
scope: ['!packages/foo'],
concurrency: concurrency,
});
const options = getOptions({
scope: ['!packages/foo'],
concurrency: concurrency,
});

generateChangeFiles(['foo'], options);
generateChangeFiles(['bar'], options);
repo.push();
generateChangeFiles(['foo'], options);
generateChangeFiles(['bar'], options);
repo.push();

await publish(options);
await publish(options);

await npmShow('foo', { shouldFail: true });
await npmShow('foo', { shouldFail: true });

expect(repo.getCurrentTags()).toEqual([]);
expect(repo.getCurrentTags()).toEqual([]);

expect(await npmShow('bar')).toMatchObject({
name: 'bar',
versions: ['1.4.0'],
'dist-tags': { latest: '1.4.0' },
});
expect(await npmShow('bar')).toMatchObject({
name: 'bar',
versions: ['1.4.0'],
'dist-tags': { latest: '1.4.0' },
});

repo.checkout(defaultBranchName);
repo.pull();
expect(repo.getCurrentTags()).toEqual(['bar_v1.4.0']);
});
repo.checkout(defaultBranchName);
repo.pull();
expect(repo.getCurrentTags()).toEqual(['bar_v1.4.0']);
}
);

it('should respect prepublish hooks', async () => {
repositoryFactory = new RepositoryFactory('monorepo');
Expand Down Expand Up @@ -435,7 +444,7 @@ describe('publish command (e2e)', () => {

const options = getOptions({
depth: 10,
});
});

generateChangeFiles(['foo'], options);

Expand Down Expand Up @@ -542,6 +551,7 @@ describe('publish command (e2e)', () => {
});

it('handles errors correctly when one of the packages fails during concurrent publishing', async () => {
logs.setOverrideOptions({ alsoLog: [] });
const packageNames = ['pkg1', 'pkg2', 'pkg3', 'pkg4', 'pkg5', 'pkg6', 'pkg7', 'pkg8'];
const packages: { [packageName: string]: PackageJsonFixture } = {};
const packageToFail = 'pkg4';
Expand Down Expand Up @@ -571,12 +581,14 @@ describe('publish command (e2e)', () => {
stdout: '',
success: false,
all: 'Failed to publish package',
}
};
}
return _mockNpmPublish(registryData, args, opts);
});

await expect(publish(options)).rejects.toThrow('Error publishing! Refer to the previous logs for recovery instructions.');
await expect(publish(options)).rejects.toThrow(
'Error publishing! Refer to the previous logs for recovery instructions.'
);

for (const name of packageNames) {
if (['pkg7', 'pkg8', packageToFail].includes(name)) {
Expand Down Expand Up @@ -621,7 +633,7 @@ describe('publish command (e2e)', () => {
let maxConcurrency = 0;
const options = getOptions({
hooks: {
postpublish: async (packagePath) => {
postpublish: async packagePath => {
currentConcurrency++;
await simulateWait(100);
const packageName = path.basename(packagePath);
Expand Down
8 changes: 6 additions & 2 deletions src/__fixtures__/mockNpm.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,9 @@ describe('_mockNpmShow', () => {

// support for this could be added later
it('currently throws if requested version is a range', async () => {
await expect(() => _mockNpmShow(data, ['foo@^1.0.0'], { cwd: undefined })).rejects.toThrow(/not currently supported/);
await expect(() => _mockNpmShow(data, ['foo@^1.0.0'], { cwd: undefined })).rejects.toThrow(
/not currently supported/
);
});
});

Expand Down Expand Up @@ -199,7 +201,9 @@ describe('_mockNpmPublish', () => {
});

it('throws if cwd is not specified', async () => {
await expect(() => _mockNpmPublish({}, [], { cwd: undefined })).rejects.toThrow('cwd is required for mock npm publish');
await expect(() => _mockNpmPublish({}, [], { cwd: undefined })).rejects.toThrow(
'cwd is required for mock npm publish'
);
});

it('errors if reading package.json fails', async () => {
Expand Down
Loading

0 comments on commit e388076

Please sign in to comment.