mirror of
https://github.com/renovatebot/renovate.git
synced 2025-03-14 08:12:56 +00:00
test: fix coverage (#3794)
This commit is contained in:
parent
b53c4c09cb
commit
5f213255d0
55 changed files with 941 additions and 109 deletions
.tslintrc.js
lib
datasource/docker
manager
platform/github
workers
branch
global
pr/changelog
repository
test
config
datasource
manager
bundler
cargo
composer
gomod
gradle
npm/extract
pip_setup
pipenv
poetry
travis
platform/github
util
versioning
workers
branch
global
repository
onboarding/pr
process/lookup
updates
|
@ -27,7 +27,7 @@ module.exports = {
|
|||
},
|
||||
overrides: [
|
||||
{
|
||||
files: ['*.spec.ts'],
|
||||
files: ['*.spec.js', '*.spec.ts'],
|
||||
rules: {
|
||||
'global-require': 0,
|
||||
'prefer-promise-reject-errors': 0,
|
||||
|
|
|
@ -39,7 +39,6 @@ function getRegistryRepository(lookupName, registryUrls) {
|
|||
}
|
||||
|
||||
async function getAuthHeaders(registry, repository) {
|
||||
// istanbul ignore if
|
||||
try {
|
||||
const apiCheckUrl = `${registry}/v2/`;
|
||||
const apiCheckResponse = await got(apiCheckUrl, { throwHttpErrors: false });
|
||||
|
@ -51,8 +50,10 @@ async function getAuthHeaders(registry, repository) {
|
|||
);
|
||||
|
||||
const { host } = URL.parse(registry);
|
||||
const opts = hostRules.find({ hostType: 'docker', host }) || {};
|
||||
opts.json = true;
|
||||
const opts = {
|
||||
...hostRules.find({ hostType: 'docker', host }),
|
||||
json: true,
|
||||
};
|
||||
if (opts.username && opts.password) {
|
||||
const auth = Buffer.from(`${opts.username}:${opts.password}`).toString(
|
||||
'base64'
|
||||
|
|
|
@ -38,7 +38,7 @@ async function getArtifacts(
|
|||
if (!config.gitFs) {
|
||||
await fs.outputFile(localLockFileName, existingLockFileContent);
|
||||
}
|
||||
let authJson = {};
|
||||
const authJson = {};
|
||||
let credentials = hostRules.find({
|
||||
hostType: 'github',
|
||||
host: 'api.github.com',
|
||||
|
@ -74,7 +74,6 @@ async function getArtifacts(
|
|||
// istanbul ignore else
|
||||
if (hostRule.username && hostRule.password) {
|
||||
logger.debug('Setting packagist auth for host ' + host);
|
||||
authJson = authJson || {};
|
||||
authJson['http-basic'] = authJson['http-basic'] || {};
|
||||
authJson['http-basic'][host] = {
|
||||
username: hostRule.username,
|
||||
|
|
|
@ -95,11 +95,11 @@ async function getArtifacts(
|
|||
{ seconds, type: 'go.sum', stdout, stderr },
|
||||
'Generated lockfile'
|
||||
);
|
||||
// istanbul ignore if
|
||||
if (
|
||||
config.postUpdateOptions &&
|
||||
config.postUpdateOptions.includes('gomodTidy')
|
||||
) {
|
||||
// istanbul ignore else
|
||||
if (config.gitFs) {
|
||||
args = 'mod tidy';
|
||||
if (cmd.includes('.insteadOf')) {
|
||||
|
@ -124,7 +124,6 @@ async function getArtifacts(
|
|||
}
|
||||
}
|
||||
const res = [];
|
||||
// istanbul ignore if
|
||||
if (config.gitFs) {
|
||||
const status = await platform.getRepoStatus();
|
||||
if (!status.modified.includes(sumFileName)) {
|
||||
|
|
|
@ -54,7 +54,6 @@ function detectMonorepos(packageFiles) {
|
|||
subPackage.lernaClient = lernaClient;
|
||||
subPackage.yarnLock = subPackage.yarnLock || yarnLock;
|
||||
subPackage.npmLock = subPackage.npmLock || npmLock;
|
||||
// istanbul ignore if
|
||||
if (subPackage.yarnLock) {
|
||||
subPackage.hasYarnWorkspaces = !!yarnWorkspacesPackages;
|
||||
}
|
||||
|
|
|
@ -106,8 +106,12 @@ async function generateLockFile(cwd, env, config = {}, upgrades = []) {
|
|||
shell: true,
|
||||
env,
|
||||
});
|
||||
stdout += updateRes.stdout ? updateRes.stdout : '';
|
||||
stderr += updateRes.stderr ? updateRes.stderr : '';
|
||||
stdout += updateRes.stdout
|
||||
? /* istanbul ignore next */ updateRes.stdout
|
||||
: '';
|
||||
stderr += updateRes.stderr
|
||||
? /* istanbul ignore next */ updateRes.stderr
|
||||
: '';
|
||||
}
|
||||
if (
|
||||
config.postUpdateOptions &&
|
||||
|
@ -120,8 +124,12 @@ async function generateLockFile(cwd, env, config = {}, upgrades = []) {
|
|||
shell: true,
|
||||
env,
|
||||
});
|
||||
stdout += dedupeRes.stdout ? dedupeRes.stdout : '';
|
||||
stderr += dedupeRes.stderr ? dedupeRes.stderr : '';
|
||||
stdout += dedupeRes.stdout
|
||||
? /* istanbul ignore next */ dedupeRes.stdout
|
||||
: '';
|
||||
stderr += dedupeRes.stderr
|
||||
? /* istanbul ignore next */ dedupeRes.stderr
|
||||
: '';
|
||||
}
|
||||
if (
|
||||
config.postUpdateOptions &&
|
||||
|
@ -134,8 +142,12 @@ async function generateLockFile(cwd, env, config = {}, upgrades = []) {
|
|||
shell: true,
|
||||
env,
|
||||
});
|
||||
stdout += dedupeRes.stdout ? dedupeRes.stdout : '';
|
||||
stderr += dedupeRes.stderr ? dedupeRes.stderr : '';
|
||||
stdout += dedupeRes.stdout
|
||||
? /* istanbul ignore next */ dedupeRes.stdout
|
||||
: '';
|
||||
stderr += dedupeRes.stderr
|
||||
? /* istanbul ignore next */ dedupeRes.stderr
|
||||
: '';
|
||||
}
|
||||
const duration = process.hrtime(startTime);
|
||||
const seconds = Math.round(duration[0] + duration[1] / 1e9);
|
||||
|
|
|
@ -28,7 +28,6 @@ async function getPythonAlias() {
|
|||
try {
|
||||
const { stdout, stderr } = await exec(`${pythonVersion} --version`);
|
||||
const version = parsePythonVersion(stdout || stderr);
|
||||
// istanbul ignore if
|
||||
if (version[0] >= 3 && version[1] >= 7) {
|
||||
pythonAlias = pythonVersion;
|
||||
}
|
||||
|
@ -82,7 +81,6 @@ async function extractSetupFile(content, packageFile, config) {
|
|||
timeout: 5000,
|
||||
}));
|
||||
} catch (err) {
|
||||
// istanbul ignore if
|
||||
if (
|
||||
err.message &&
|
||||
err.message.includes('No such file or directory') &&
|
||||
|
@ -94,7 +92,6 @@ async function extractSetupFile(content, packageFile, config) {
|
|||
}
|
||||
throw err;
|
||||
}
|
||||
// istanbul ignore if
|
||||
if (stderr) {
|
||||
stderr = stderr.replace(/.*\n\s*import imp/, '').trim();
|
||||
if (stderr.length) {
|
||||
|
|
|
@ -171,7 +171,6 @@ async function initRepo({
|
|||
throw new Error('fork');
|
||||
}
|
||||
}
|
||||
// istanbul ignore if
|
||||
if (res.body.full_name && res.body.full_name !== repository) {
|
||||
logger.info(
|
||||
{ repository, this_repository: res.body.full_name },
|
||||
|
@ -1512,7 +1511,7 @@ async function getVulnerabilityAlerts() {
|
|||
references { url }
|
||||
severity
|
||||
}
|
||||
securityVulnerability {
|
||||
securityVulnerability {
|
||||
package { name ecosystem }
|
||||
firstPatchedVersion { identifier }
|
||||
vulnerableVersionRange
|
||||
|
|
|
@ -144,7 +144,7 @@ class Storage {
|
|||
async function deleteBranch(branchName) {
|
||||
delete branchFiles[branchName];
|
||||
const options = config.forkToken
|
||||
? { token: config.forkToken }
|
||||
? /* istanbul ignore next */ { token: config.forkToken }
|
||||
: undefined;
|
||||
try {
|
||||
await get.delete(
|
||||
|
|
|
@ -40,9 +40,7 @@ async function processBranch(branchConfig, prHourlyLimitReached, packageFiles) {
|
|||
logger.info('Branch has been checked in master issue: ' + masterIssueCheck);
|
||||
}
|
||||
try {
|
||||
logger.debug(
|
||||
`Branch has ${dependencies ? dependencies.length : 0} upgrade(s)`
|
||||
);
|
||||
logger.debug(`Branch has ${dependencies.length} upgrade(s)`);
|
||||
|
||||
// Check if branch already existed
|
||||
const existingPr = branchPr ? undefined : await prAlreadyExisted(config);
|
||||
|
@ -74,7 +72,6 @@ async function processBranch(branchConfig, prHourlyLimitReached, packageFiles) {
|
|||
content +=
|
||||
'\n\nIf this PR was closed by mistake or you changed your mind, you can simply rename this PR and you will soon get a fresh replacement PR opened.';
|
||||
if (!config.suppressNotifications.includes('prIgnoreNotification')) {
|
||||
// istanbul ignore if
|
||||
if (config.dryRun) {
|
||||
logger.info(
|
||||
'DRY-RUN: Would ensure closed PR comment in PR #' +
|
||||
|
@ -85,7 +82,6 @@ async function processBranch(branchConfig, prHourlyLimitReached, packageFiles) {
|
|||
}
|
||||
}
|
||||
if (branchExists) {
|
||||
// istanbul ignore if
|
||||
if (config.dryRun) {
|
||||
logger.info('DRY-RUN: Would delete branch ' + config.branchName);
|
||||
} else {
|
||||
|
@ -138,7 +134,6 @@ async function processBranch(branchConfig, prHourlyLimitReached, packageFiles) {
|
|||
branchPr.body &&
|
||||
branchPr.body.includes(`- [x] <!-- ${appSlug}-rebase -->`);
|
||||
if (prRebaseChecked || titleRebase || labelRebase) {
|
||||
// istanbul ignore if
|
||||
if (config.dryRun) {
|
||||
logger.info(
|
||||
'DRY-RUN: Would ensure PR edited comment removal in PR #' +
|
||||
|
@ -151,7 +146,6 @@ async function processBranch(branchConfig, prHourlyLimitReached, packageFiles) {
|
|||
let content = `:construction_worker: This PR has received other commits, so ${appName} will stop updating it to avoid conflicts or other problems.`;
|
||||
content += ` If you wish to abandon your changes and have ${appName} start over you may click the "rebase" checkbox in the PR body/description.`;
|
||||
if (!config.suppressNotifications.includes('prEditNotification')) {
|
||||
// istanbul ignore if
|
||||
if (config.dryRun) {
|
||||
logger.info(
|
||||
'DRY-RUN: ensure comment in PR #' + branchPr.number
|
||||
|
@ -196,7 +190,7 @@ async function processBranch(branchConfig, prHourlyLimitReached, packageFiles) {
|
|||
config.unpublishSafe &&
|
||||
config.canBeUnpublished &&
|
||||
(config.prCreation === 'not-pending' ||
|
||||
config.prCreation === 'status-success')
|
||||
/* istanbul ignore next */ config.prCreation === 'status-success')
|
||||
) {
|
||||
logger.info(
|
||||
'Skipping branch creation due to unpublishSafe + status checks'
|
||||
|
@ -263,7 +257,6 @@ async function processBranch(branchConfig, prHourlyLimitReached, packageFiles) {
|
|||
}
|
||||
|
||||
config.committedFiles = await commitFilesToBranch(config);
|
||||
// istanbul ignore if
|
||||
if (
|
||||
config.updateType === 'lockFileMaintenance' &&
|
||||
!config.committedFiles &&
|
||||
|
@ -273,7 +266,6 @@ async function processBranch(branchConfig, prHourlyLimitReached, packageFiles) {
|
|||
logger.info(
|
||||
'Deleting lock file maintenance branch as master lock file no longer needs updating'
|
||||
);
|
||||
// istanbul ignore if
|
||||
if (config.dryRun) {
|
||||
logger.info('DRY-RUN: Would delete lock file maintenance branch');
|
||||
} else {
|
||||
|
@ -421,7 +413,6 @@ async function processBranch(branchConfig, prHourlyLimitReached, packageFiles) {
|
|||
config.suppressNotifications.includes('lockFileErrors')
|
||||
)
|
||||
) {
|
||||
// istanbul ignore if
|
||||
if (config.dryRun) {
|
||||
logger.info(
|
||||
'DRY-RUN: Would ensure lock file error comment in PR #' +
|
||||
|
@ -446,7 +437,6 @@ async function processBranch(branchConfig, prHourlyLimitReached, packageFiles) {
|
|||
// Check if state needs setting
|
||||
if (existingState !== state) {
|
||||
logger.debug(`Updating status check state to failed`);
|
||||
// istanbul ignore if
|
||||
if (config.dryRun) {
|
||||
logger.info(
|
||||
'DRY-RUN: Would set branch status in ' + config.branchName
|
||||
|
|
|
@ -32,7 +32,6 @@ async function start() {
|
|||
'No repositories found - did you want to run with flag --autodiscover?'
|
||||
);
|
||||
}
|
||||
// istanbul ignore if
|
||||
if (
|
||||
config.platform === 'github' &&
|
||||
config.endpoint &&
|
||||
|
@ -41,7 +40,6 @@ async function start() {
|
|||
config.prFooter =
|
||||
'Available now for Enterprise: [Renovate Pro](https://renovatebot.com/pro) with real-time webhook handling and priority job queue.';
|
||||
}
|
||||
// istanbul ignore if
|
||||
if (
|
||||
config.platform === 'gitlab' &&
|
||||
config.endpoint &&
|
||||
|
|
|
@ -11,7 +11,7 @@ module.exports = {
|
|||
async function getTags(endpoint, versionScheme, repository) {
|
||||
let url = endpoint
|
||||
? endpoint.replace(/\/?$/, '/')
|
||||
: 'https://api.github.com/';
|
||||
: /* istanbul ignore next: not possible to test, maybe never possible? */ 'https://api.github.com/';
|
||||
url += `repos/${repository}/tags?per_page=100`;
|
||||
try {
|
||||
const res = await ghGot(url, {
|
||||
|
|
|
@ -50,7 +50,8 @@ async function validatePrs(config) {
|
|||
if (parsed) {
|
||||
const toValidate =
|
||||
file === 'package.json'
|
||||
? parsed.renovate || parsed['renovate-config']
|
||||
? /* istanbul ignore next */ parsed.renovate ||
|
||||
parsed['renovate-config']
|
||||
: parsed;
|
||||
if (toValidate) {
|
||||
logger.debug({ config: toValidate }, 'Validating config');
|
||||
|
|
|
@ -47,7 +47,6 @@ async function lookupUpdates(config) {
|
|||
res.warnings.push(result);
|
||||
return res;
|
||||
}
|
||||
// istanbul ignore if
|
||||
if (dependency.deprecationMessage) {
|
||||
logger.info({ dependency: depName }, 'Found deprecationMessage');
|
||||
res.deprecationMessage = dependency.deprecationMessage;
|
||||
|
@ -56,7 +55,6 @@ async function lookupUpdates(config) {
|
|||
dependency.sourceUrl && dependency.sourceUrl.length
|
||||
? dependency.sourceUrl
|
||||
: null;
|
||||
// istanbul ignore if
|
||||
if (dependency.sourceDirectory) {
|
||||
res.sourceDirectory = dependency.sourceDirectory;
|
||||
}
|
||||
|
|
|
@ -187,7 +187,6 @@ function generateBranchConfig(branchUpgrades) {
|
|||
config.hasTypes = true;
|
||||
} else {
|
||||
config.upgrades.sort((a, b) => {
|
||||
// istanbul ignore if
|
||||
if (a.fileReplacePosition && b.fileReplacePosition) {
|
||||
// This is because we need to replace from the bottom of the file up
|
||||
return a.fileReplacePosition > b.fileReplacePosition ? -1 : 1;
|
||||
|
|
|
@ -698,10 +698,8 @@ exports[`config/presets resolvePreset throws if valid and invalid 3`] = `undefin
|
|||
|
||||
exports[`config/presets resolvePreset works with valid 1`] = `
|
||||
Object {
|
||||
"description": Array [
|
||||
"Use version pinning (maintain a single version only and not semver ranges)",
|
||||
],
|
||||
"foo": 1,
|
||||
"ignoreDeps": Array [],
|
||||
"rangeStrategy": "pin",
|
||||
}
|
||||
`;
|
||||
|
|
|
@ -90,6 +90,7 @@ describe('config/presets', () => {
|
|||
});
|
||||
it('works with valid', async () => {
|
||||
config.foo = 1;
|
||||
config.ignoreDeps = [];
|
||||
config.extends = [':pinVersions'];
|
||||
const res = await presets.resolveConfigPresets(config);
|
||||
expect(res).toMatchSnapshot();
|
||||
|
|
|
@ -342,6 +342,14 @@ Array [
|
|||
"timeout": 10000,
|
||||
},
|
||||
],
|
||||
Array [
|
||||
"https://api.github.com/user/9287/repos?page=3&per_page=100",
|
||||
Object {
|
||||
"headers": Object {},
|
||||
"json": true,
|
||||
"timeout": 10000,
|
||||
},
|
||||
],
|
||||
Array [
|
||||
"https://registry.company.com/v2/",
|
||||
Object {
|
||||
|
@ -349,7 +357,7 @@ Array [
|
|||
},
|
||||
],
|
||||
Array [
|
||||
"https://registry.company.com/v2/node/manifests/1.0.0",
|
||||
"https://registry.company.com/v2/node/manifests/latest",
|
||||
Object {
|
||||
"headers": Object {
|
||||
"accept": "application/vnd.docker.distribution.manifest.v2+json",
|
||||
|
|
|
@ -44,10 +44,7 @@ describe('api/docker', () => {
|
|||
got.mockReturnValueOnce({
|
||||
headers: { 'docker-content-digest': 'some-digest' },
|
||||
});
|
||||
const res = await docker.getDigest(
|
||||
{ lookupName: 'some-dep' },
|
||||
'some-new-value'
|
||||
);
|
||||
const res = await docker.getDigest({ lookupName: 'some-dep' });
|
||||
expect(res).toBe('some-digest');
|
||||
});
|
||||
it('falls back to body for digest', async () => {
|
||||
|
@ -189,7 +186,14 @@ describe('api/docker', () => {
|
|||
got.mockReturnValueOnce({
|
||||
headers: {},
|
||||
});
|
||||
got.mockReturnValueOnce({ headers: {}, body: { tags } });
|
||||
got.mockReturnValueOnce({
|
||||
headers: {
|
||||
link:
|
||||
'<https://api.github.com/user/9287/repos?page=3&per_page=100>; rel="next", ',
|
||||
},
|
||||
body: { tags },
|
||||
});
|
||||
got.mockReturnValueOnce({ headers: {}, body: { tags: ['latest'] } });
|
||||
got.mockReturnValueOnce({
|
||||
headers: {},
|
||||
});
|
||||
|
|
|
@ -87,8 +87,13 @@ describe('datasource/github', () => {
|
|||
content: Buffer.from('{"foo":"bar"}').toString('base64'),
|
||||
},
|
||||
}));
|
||||
const content = await github.getPreset('some/repo', 'custom');
|
||||
expect(content).toEqual({ foo: 'bar' });
|
||||
try {
|
||||
global.appMode = true;
|
||||
const content = await github.getPreset('some/repo', 'custom');
|
||||
expect(content).toEqual({ foo: 'bar' });
|
||||
} finally {
|
||||
delete global.appMode;
|
||||
}
|
||||
});
|
||||
});
|
||||
describe('getPkgReleases', () => {
|
||||
|
|
|
@ -45,6 +45,9 @@ describe('api/npm', () => {
|
|||
};
|
||||
return global.renovateCache.rmAll();
|
||||
});
|
||||
afterEach(() => {
|
||||
delete process.env.RENOVATE_CACHE_NPM_MINUTES;
|
||||
});
|
||||
it('should return null for no versions', async () => {
|
||||
const missingVersions = { ...npmResponse };
|
||||
missingVersions.versions = {};
|
||||
|
@ -387,6 +390,7 @@ describe('api/npm', () => {
|
|||
.get('/foobar')
|
||||
.reply(200, npmResponse);
|
||||
process.env.REGISTRY = 'https://registry.from-env.com';
|
||||
process.env.RENOVATE_CACHE_NPM_MINUTES = '15';
|
||||
global.trustLevel = 'high';
|
||||
// eslint-disable-next-line no-template-curly-in-string
|
||||
const npmrc = 'registry=${REGISTRY}';
|
||||
|
|
|
@ -39,6 +39,16 @@ describe('manager/docker-compose/update', () => {
|
|||
const res = updateDependency(railsGemfile, upgrade);
|
||||
expect(res).toBeNull();
|
||||
});
|
||||
it('uses single quotes', () => {
|
||||
const upgrade = {
|
||||
lineNumber: 0,
|
||||
depName: 'rack-cache',
|
||||
newValue: '~> 1.3',
|
||||
};
|
||||
const gemFile = `gem 'rack-cache', '~> 1.2'`;
|
||||
const res = updateDependency(gemFile, upgrade);
|
||||
expect(res).toEqual(`gem 'rack-cache', '~> 1.3'`);
|
||||
});
|
||||
it('returns null if error', () => {
|
||||
const res = updateDependency(null, null);
|
||||
expect(res).toBeNull();
|
||||
|
|
|
@ -14,6 +14,9 @@ describe('.getArtifacts()', () => {
|
|||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
afterEach(() => {
|
||||
delete global.trustLevel;
|
||||
});
|
||||
it('returns null if no Cargo.lock found', async () => {
|
||||
const updatedDeps = [
|
||||
{
|
||||
|
@ -79,6 +82,7 @@ describe('.getArtifacts()', () => {
|
|||
currentValue: '1.2.3',
|
||||
},
|
||||
];
|
||||
global.trustLevel = 'high';
|
||||
expect(
|
||||
await cargo.getArtifacts('Cargo.toml', updatedDeps, '{}', config)
|
||||
).not.toBeNull();
|
||||
|
|
|
@ -16,6 +16,9 @@ describe('.getArtifacts()', () => {
|
|||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
afterEach(() => {
|
||||
delete global.trustLevel;
|
||||
});
|
||||
it('returns if no composer.lock found', async () => {
|
||||
expect(
|
||||
await composer.getArtifacts('composer.json', [], '{}', config)
|
||||
|
@ -62,6 +65,7 @@ describe('.getArtifacts()', () => {
|
|||
stderror: '',
|
||||
});
|
||||
fs.readFile = jest.fn(() => 'New composer.lock');
|
||||
global.trustLevel = 'high';
|
||||
expect(
|
||||
await composer.getArtifacts('composer.json', [], '{}', config)
|
||||
).not.toBeNull();
|
||||
|
|
|
@ -27,6 +27,10 @@ const config = {
|
|||
describe('.getArtifacts()', () => {
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
exec.mockResolvedValue({
|
||||
stdout: '',
|
||||
stderror: '',
|
||||
});
|
||||
});
|
||||
it('returns if no go.sum found', async () => {
|
||||
expect(await gomod.getArtifacts('go.mod', [], gomod1, config)).toBeNull();
|
||||
|
@ -82,6 +86,29 @@ describe('.getArtifacts()', () => {
|
|||
})
|
||||
).not.toBeNull();
|
||||
});
|
||||
it('supports docker mode with credentials, appMode and trustLevel=high', async () => {
|
||||
hostRules.find.mockReturnValue({
|
||||
token: 'some-token',
|
||||
});
|
||||
platform.getFile.mockResolvedValueOnce('Current go.sum');
|
||||
platform.getRepoStatus.mockResolvedValue({ modified: '' });
|
||||
fs.readFile.mockResolvedValueOnce('New go.sum');
|
||||
try {
|
||||
global.appMode = true;
|
||||
global.trustLevel = 'high';
|
||||
expect(
|
||||
await gomod.getArtifacts('go.mod', [], gomod1, {
|
||||
...config,
|
||||
binarySource: 'docker',
|
||||
postUpdateOptions: ['gomodTidy'],
|
||||
gitFs: 'https',
|
||||
})
|
||||
).toBeNull();
|
||||
} finally {
|
||||
delete global.appMode;
|
||||
delete global.trustLevel;
|
||||
}
|
||||
});
|
||||
it('catches errors', async () => {
|
||||
platform.getFile.mockReturnValueOnce('Current go.sum');
|
||||
fs.outputFile = jest.fn(() => {
|
||||
|
|
|
@ -186,6 +186,7 @@ describe('manager/gradle', () => {
|
|||
binarySource: 'docker',
|
||||
gitFs: true,
|
||||
...config,
|
||||
gradle: {},
|
||||
};
|
||||
await manager.extractAllPackageFiles(configWithDocker, ['build.gradle']);
|
||||
|
||||
|
|
|
@ -119,7 +119,7 @@ Object {
|
|||
"npmLock": undefined,
|
||||
"npmrc": undefined,
|
||||
"packageJsonName": undefined,
|
||||
"packageJsonType": "app",
|
||||
"packageJsonType": "library",
|
||||
"packageJsonVersion": undefined,
|
||||
"pnpmShrinkwrap": undefined,
|
||||
"skipInstalls": false,
|
||||
|
|
|
@ -79,3 +79,39 @@ Array [
|
|||
},
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`manager/npm/extract .extractPackageFile() uses yarn workspaces package settings 2`] = `
|
||||
Array [
|
||||
Object {
|
||||
"internalPackages": Array [
|
||||
"@org/a",
|
||||
"@org/b",
|
||||
],
|
||||
"packageFile": "package.json",
|
||||
"yarnWorkspacesPackages": "packages/*",
|
||||
},
|
||||
Object {
|
||||
"hasYarnWorkspaces": true,
|
||||
"internalPackages": Array [
|
||||
"@org/b",
|
||||
],
|
||||
"lernaClient": undefined,
|
||||
"lernaDir": undefined,
|
||||
"npmLock": undefined,
|
||||
"packageFile": "packages/a/package.json",
|
||||
"packageJsonName": "@org/a",
|
||||
"yarnLock": true,
|
||||
},
|
||||
Object {
|
||||
"internalPackages": Array [
|
||||
"@org/a",
|
||||
],
|
||||
"lernaClient": undefined,
|
||||
"lernaDir": undefined,
|
||||
"npmLock": undefined,
|
||||
"packageFile": "packages/b/package.json",
|
||||
"packageJsonName": "@org/b",
|
||||
"yarnLock": undefined,
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
|
|
@ -166,6 +166,7 @@ describe('manager/npm/extract', () => {
|
|||
npm: '^8.0.0',
|
||||
yarn: 'disabled',
|
||||
},
|
||||
main: 'index.js',
|
||||
};
|
||||
const pJsonStr = JSON.stringify(pJson);
|
||||
const res = await npmExtract.extractPackageFile(
|
||||
|
|
|
@ -48,5 +48,25 @@ describe('manager/npm/extract', () => {
|
|||
expect(packageFiles[1].lernaDir).toEqual('.');
|
||||
expect(packageFiles[1].internalPackages).toEqual(['@org/b']);
|
||||
});
|
||||
it('uses yarn workspaces package settings', async () => {
|
||||
const packageFiles = [
|
||||
{
|
||||
packageFile: 'package.json',
|
||||
yarnWorkspacesPackages: 'packages/*',
|
||||
},
|
||||
{
|
||||
packageFile: 'packages/a/package.json',
|
||||
packageJsonName: '@org/a',
|
||||
yarnLock: true,
|
||||
},
|
||||
{
|
||||
packageFile: 'packages/b/package.json',
|
||||
packageJsonName: '@org/b',
|
||||
},
|
||||
];
|
||||
await detectMonorepos(packageFiles);
|
||||
expect(packageFiles).toMatchSnapshot();
|
||||
expect(packageFiles[1].internalPackages).toEqual(['@org/b']);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -67,32 +67,62 @@ Object {
|
|||
"currentValue": ">=3.2.1,<4.0",
|
||||
"datasource": "pypi",
|
||||
"depName": "statsd",
|
||||
"lineNumber": 63,
|
||||
"lineNumber": 62,
|
||||
},
|
||||
Object {
|
||||
"currentValue": ">=2.10.0,<3.0",
|
||||
"datasource": "pypi",
|
||||
"depName": "requests",
|
||||
"lineNumber": 64,
|
||||
"lineNumber": 63,
|
||||
"skipReason": "ignored",
|
||||
},
|
||||
Object {
|
||||
"currentValue": ">=5.27.1,<7.0",
|
||||
"datasource": "pypi",
|
||||
"depName": "raven",
|
||||
"lineNumber": 65,
|
||||
"lineNumber": 64,
|
||||
},
|
||||
Object {
|
||||
"currentValue": ">=0.15.2,<0.17",
|
||||
"datasource": "pypi",
|
||||
"depName": "future",
|
||||
"lineNumber": 66,
|
||||
"lineNumber": 65,
|
||||
},
|
||||
Object {
|
||||
"currentValue": ">=1.0.16,<2.0",
|
||||
"datasource": "pypi",
|
||||
"depName": "ipaddress",
|
||||
"lineNumber": 67,
|
||||
"lineNumber": 66,
|
||||
},
|
||||
],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`lib/manager/pip_setup/extract getPythonAlias finds python 1`] = `
|
||||
[MockFunction] {
|
||||
"calls": Array [
|
||||
Array [
|
||||
"python --version",
|
||||
],
|
||||
Array [
|
||||
"python3 --version",
|
||||
],
|
||||
Array [
|
||||
"python3.7 --version",
|
||||
],
|
||||
],
|
||||
"results": Array [
|
||||
Object {
|
||||
"type": "return",
|
||||
"value": Promise {},
|
||||
},
|
||||
Object {
|
||||
"type": "return",
|
||||
"value": Promise {},
|
||||
},
|
||||
Object {
|
||||
"type": "return",
|
||||
"value": Promise {},
|
||||
},
|
||||
],
|
||||
}
|
||||
|
|
|
@ -60,8 +60,7 @@ setup(
|
|||
include_package_data=True,
|
||||
install_requires=[
|
||||
'gunicorn>=19.7.0,<20.0',
|
||||
'Werkzeug>=0.11.5,<0.15',
|
||||
'statsd>=3.2.1,<4.0',
|
||||
'Werkzeug>=0.11.5,<0.15', 'statsd>=3.2.1,<4.0',
|
||||
'requests>=2.10.0,<3.0', # renovate: ignore
|
||||
'raven>=5.27.1,<7.0', # pyup: nothing
|
||||
'future>=0.15.2,<0.17',
|
||||
|
|
|
@ -22,6 +22,9 @@ async function tmpFile() {
|
|||
}
|
||||
|
||||
describe('lib/manager/pip_setup/extract', () => {
|
||||
beforeEach(() => {
|
||||
jest.resetModules();
|
||||
});
|
||||
describe('extractPackageFile()', () => {
|
||||
it('returns found deps', async () => {
|
||||
expect(
|
||||
|
@ -42,6 +45,19 @@ describe('lib/manager/pip_setup/extract', () => {
|
|||
)
|
||||
).toBeNull();
|
||||
});
|
||||
it('catches error', async () => {
|
||||
const fExec = jest.fn(() => {
|
||||
throw new Error('No such file or directory');
|
||||
});
|
||||
jest.doMock('child-process-promise', () => {
|
||||
return {
|
||||
exec: fExec,
|
||||
};
|
||||
});
|
||||
const m = require('../../../lib/manager/pip_setup/extract');
|
||||
await m.extractPackageFile(content, packageFile, config);
|
||||
expect(fExec).toHaveBeenCalledTimes(4);
|
||||
});
|
||||
});
|
||||
|
||||
describe('parsePythonVersion', () => {
|
||||
|
@ -53,13 +69,27 @@ describe('lib/manager/pip_setup/extract', () => {
|
|||
it('returns the python alias to use', async () => {
|
||||
expect(pythonVersions.includes(await getPythonAlias())).toBe(true);
|
||||
});
|
||||
it('finds python', async () => {
|
||||
const fExec = jest.fn(() =>
|
||||
Promise.resolve({ stderr: 'Python 3.7.15rc1' })
|
||||
);
|
||||
jest.doMock('child-process-promise', () => {
|
||||
return {
|
||||
exec: fExec,
|
||||
};
|
||||
});
|
||||
const m = require('../../../lib/manager/pip_setup/extract');
|
||||
expect(pythonVersions.includes(await m.getPythonAlias())).toBe(true);
|
||||
expect(fExec).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
describe('Test for presence of mock lib', () => {
|
||||
it('should test if python mock lib is installed', async () => {
|
||||
const cp = jest.requireActual('child-process-promise');
|
||||
let isMockInstalled = true;
|
||||
// when binarysource === docker
|
||||
try {
|
||||
await exec(`python -c "import mock"`);
|
||||
await cp.exec(`python -c "import mock"`);
|
||||
} catch (err) {
|
||||
isMockInstalled = false;
|
||||
}
|
||||
|
|
|
@ -15,13 +15,16 @@ describe('.getArtifacts()', () => {
|
|||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
afterEach(() => {
|
||||
delete global.trustLevel;
|
||||
});
|
||||
it('returns if no Pipfile.lock found', async () => {
|
||||
expect(await pipenv.getArtifacts('Pipfile', [], '', config)).toBeNull();
|
||||
});
|
||||
it('returns null if unchanged', async () => {
|
||||
platform.getFile.mockReturnValueOnce('Current Pipfile.lock');
|
||||
exec.mockReturnValueOnce({
|
||||
stdout: '',
|
||||
stdout: 'Locking',
|
||||
stderror: '',
|
||||
});
|
||||
fs.readFile = jest.fn(() => 'Current Pipfile.lock');
|
||||
|
@ -34,6 +37,7 @@ describe('.getArtifacts()', () => {
|
|||
stderror: '',
|
||||
});
|
||||
fs.readFile = jest.fn(() => 'New Pipfile.lock');
|
||||
global.trustLevel = 'high';
|
||||
expect(
|
||||
await pipenv.getArtifacts('Pipfile', [], '{}', config)
|
||||
).not.toBeNull();
|
||||
|
|
|
@ -13,6 +13,9 @@ describe('.getArtifacts()', () => {
|
|||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
afterEach(() => {
|
||||
delete global.trustLevel;
|
||||
});
|
||||
it('returns null if no poetry.lock found', async () => {
|
||||
const updatedDeps = [
|
||||
{
|
||||
|
@ -59,6 +62,7 @@ describe('.getArtifacts()', () => {
|
|||
currentValue: '1.2.3',
|
||||
},
|
||||
];
|
||||
global.trustLevel = 'high';
|
||||
expect(
|
||||
await poetry.getArtifacts('pyproject.toml', updatedDeps, '{}', config)
|
||||
).not.toBeNull();
|
||||
|
|
|
@ -2,6 +2,13 @@
|
|||
|
||||
exports[`manager/travis/update updateDependency falls back to 2 spaces 1`] = `"hello: world"`;
|
||||
|
||||
exports[`manager/travis/update updateDependency it uses double quotes 1`] = `
|
||||
"node_js:
|
||||
- \\"6\\"
|
||||
- \\"8\\"
|
||||
"
|
||||
`;
|
||||
|
||||
exports[`manager/travis/update updateDependency updates values 1`] = `
|
||||
"dist: trusty
|
||||
language: node_js
|
||||
|
|
|
@ -25,6 +25,14 @@ describe('manager/travis/update', () => {
|
|||
const res = nodefile.updateDependency('hello: world', upgrade);
|
||||
expect(res).toMatchSnapshot();
|
||||
});
|
||||
it('it uses double quotes', () => {
|
||||
const upgrade = {
|
||||
currentValue: ['6'],
|
||||
newValue: [6, 8],
|
||||
};
|
||||
const res = nodefile.updateDependency('node_js:\n - "6"\n', upgrade);
|
||||
expect(res).toMatchSnapshot();
|
||||
});
|
||||
it('returns null if error', () => {
|
||||
const upgrade = {
|
||||
currentValue: [8, 6, 4],
|
||||
|
|
|
@ -191,10 +191,35 @@ Array [
|
|||
"paginate": true,
|
||||
},
|
||||
],
|
||||
Array [
|
||||
"repos/some/repo/pulls/91",
|
||||
],
|
||||
Array [
|
||||
"repos/some/repo/git/refs/heads/master",
|
||||
],
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`platform/github getBranchPr(branchName) should return the PR object 2`] = `null`;
|
||||
exports[`platform/github getBranchPr(branchName) should return the PR object 2`] = `
|
||||
Object {
|
||||
"additions": 1,
|
||||
"base": Object {
|
||||
"sha": "1234",
|
||||
},
|
||||
"branchName": "somebranch",
|
||||
"canRebase": true,
|
||||
"commits": 1,
|
||||
"deletions": 1,
|
||||
"displayNumber": "Pull Request #91",
|
||||
"head": Object {
|
||||
"ref": "somebranch",
|
||||
},
|
||||
"isStale": true,
|
||||
"number": 91,
|
||||
"sha": undefined,
|
||||
"state": "open",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`platform/github getCommitMessages() returns commits messages 1`] = `
|
||||
Array [
|
||||
|
@ -453,6 +478,35 @@ Array [
|
|||
]
|
||||
`;
|
||||
|
||||
exports[`platform/github getPrList() should return PRs 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"branchName": "somebranch",
|
||||
"closed_at": undefined,
|
||||
"createdAt": undefined,
|
||||
"number": 91,
|
||||
"sha": undefined,
|
||||
"sourceRepo": undefined,
|
||||
"state": "merged",
|
||||
"title": undefined,
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`platform/github getPrList() should return PRs 2`] = `
|
||||
Array [
|
||||
Array [
|
||||
"repos/some/repo",
|
||||
],
|
||||
Array [
|
||||
"repos/some/repo/pulls?per_page=100&state=all",
|
||||
Object {
|
||||
"paginate": true,
|
||||
},
|
||||
],
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`platform/github getRepos should return an array of repos 1`] = `
|
||||
Array [
|
||||
Array [
|
||||
|
@ -607,6 +661,17 @@ Array [
|
|||
]
|
||||
`;
|
||||
|
||||
exports[`platform/github setBaseBranch(branchName) sets the default base branch 1`] = `
|
||||
Array [
|
||||
Array [
|
||||
"repos/some/repo",
|
||||
],
|
||||
Array [
|
||||
"repos/some/repo/git/trees/master?recursive=true",
|
||||
],
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`platform/github updatePr(prNo, title, body) should update the PR 1`] = `
|
||||
Array [
|
||||
Array [
|
||||
|
|
|
@ -111,6 +111,7 @@
|
|||
"title": "feat(azure): abandon pr after delete branch",
|
||||
"mergeable": "MERGEABLE",
|
||||
"mergeStateStatus": "BEHIND",
|
||||
"reviews": { "nodes": [ ] },
|
||||
"commits": {
|
||||
"nodes": [
|
||||
{
|
||||
|
|
|
@ -285,7 +285,23 @@ describe('platform/github', () => {
|
|||
github.initRepo({
|
||||
repository: 'some/repo',
|
||||
})
|
||||
).rejects.toThrow();
|
||||
).rejects.toThrow('not-found');
|
||||
});
|
||||
it('should throw error if renamed', async () => {
|
||||
get.mockReturnValueOnce({
|
||||
body: {
|
||||
fork: true,
|
||||
full_name: 'some/other',
|
||||
owner: {},
|
||||
},
|
||||
});
|
||||
await expect(
|
||||
github.initRepo({
|
||||
gitFs: 'https',
|
||||
includeForks: true,
|
||||
repository: 'some/repo',
|
||||
})
|
||||
).rejects.toThrow('renamed');
|
||||
});
|
||||
});
|
||||
describe('getRepoForceRebase', () => {
|
||||
|
@ -367,6 +383,28 @@ describe('platform/github', () => {
|
|||
await github.setBaseBranch('some-branch');
|
||||
expect(get.mock.calls).toMatchSnapshot();
|
||||
});
|
||||
it('sets the default base branch', async () => {
|
||||
await initRepo({
|
||||
repository: 'some/repo',
|
||||
defaultBranch: 'some-branch',
|
||||
});
|
||||
get.mockImplementationOnce(() => ({
|
||||
body: {
|
||||
truncated: true,
|
||||
tree: [],
|
||||
},
|
||||
}));
|
||||
// getBranchCommit
|
||||
get.mockImplementationOnce(() => ({
|
||||
body: {
|
||||
object: {
|
||||
sha: '1238',
|
||||
},
|
||||
},
|
||||
}));
|
||||
await github.setBaseBranch();
|
||||
expect(get.mock.calls).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
describe('getFileList', () => {
|
||||
beforeEach(async () => {
|
||||
|
@ -420,6 +458,15 @@ describe('platform/github', () => {
|
|||
const files = await github.getFileList('npm-branch');
|
||||
expect(files).toMatchSnapshot();
|
||||
});
|
||||
it('uses default branch', async () => {
|
||||
get.mockImplementationOnce(() => ({
|
||||
body: {
|
||||
truncated: true,
|
||||
tree: [],
|
||||
},
|
||||
}));
|
||||
expect(await github.getFileList()).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
describe('branchExists(branchName)', () => {
|
||||
it('should return true if the branch exists (one result)', async () => {
|
||||
|
@ -523,6 +570,27 @@ describe('platform/github', () => {
|
|||
expect(await github.isBranchStale('thebranchname')).toBe(true);
|
||||
});
|
||||
});
|
||||
describe('getPrList()', () => {
|
||||
beforeEach(async () => {
|
||||
await initRepo({
|
||||
repository: 'some/repo',
|
||||
});
|
||||
});
|
||||
it('should return PRs', async () => {
|
||||
get.mockImplementationOnce(() => ({
|
||||
body: [
|
||||
{
|
||||
number: 91,
|
||||
head: { ref: 'somebranch', repo: {} },
|
||||
state: 'closed',
|
||||
merged_at: '12345',
|
||||
},
|
||||
],
|
||||
}));
|
||||
expect(await github.getPrList()).toMatchSnapshot();
|
||||
expect(get.mock.calls).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
describe('getBranchPr(branchName)', () => {
|
||||
it('should return null if no PR exists', async () => {
|
||||
await initRepo({
|
||||
|
@ -539,7 +607,7 @@ describe('platform/github', () => {
|
|||
repository: 'some/repo',
|
||||
});
|
||||
get.mockImplementationOnce(() => ({
|
||||
body: [{ number: 91, head: {} }],
|
||||
body: [{ number: 91, head: { ref: 'somebranch' }, state: 'open' }],
|
||||
}));
|
||||
get.mockImplementationOnce(() => ({
|
||||
body: {
|
||||
|
@ -550,8 +618,11 @@ describe('platform/github', () => {
|
|||
base: {
|
||||
sha: '1234',
|
||||
},
|
||||
head: { ref: 'somebranch' },
|
||||
state: 'open',
|
||||
},
|
||||
}));
|
||||
get.mockResolvedValue({ body: { object: { sha: '12345' } } });
|
||||
const pr = await github.getBranchPr('somebranch');
|
||||
expect(get.mock.calls).toMatchSnapshot();
|
||||
expect(pr).toMatchSnapshot();
|
||||
|
@ -1320,6 +1391,7 @@ describe('platform/github', () => {
|
|||
'some-branch',
|
||||
'The Title',
|
||||
'Hello world',
|
||||
null,
|
||||
true
|
||||
);
|
||||
expect(pr).toMatchSnapshot();
|
||||
|
|
57
test/util/__snapshots__/package-rules.spec.js.snap
Normal file
57
test/util/__snapshots__/package-rules.spec.js.snap
Normal file
|
@ -0,0 +1,57 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`applyPackageRules() applies 1`] = `
|
||||
Object {
|
||||
"currentValue": "1.0.0",
|
||||
"depName": "a",
|
||||
"isBump": true,
|
||||
"packageRules": Array [
|
||||
Object {
|
||||
"matchCurrentVersion": "<= 2.0.0",
|
||||
"packagePatterns": Array [
|
||||
"*",
|
||||
],
|
||||
},
|
||||
Object {
|
||||
"matchCurrentVersion": "<= 2.0.0",
|
||||
"packageNames": Array [
|
||||
"b",
|
||||
],
|
||||
},
|
||||
Object {
|
||||
"excludePackagePatterns": Array [
|
||||
"*",
|
||||
],
|
||||
"packageNames": Array [
|
||||
"b",
|
||||
],
|
||||
},
|
||||
Object {
|
||||
"updateTypes": Array [
|
||||
"bump",
|
||||
],
|
||||
},
|
||||
Object {
|
||||
"excludePackageNames": Array [
|
||||
"a",
|
||||
],
|
||||
"packageNames": Array [
|
||||
"b",
|
||||
],
|
||||
},
|
||||
Object {
|
||||
"matchCurrentVersion": "<= 2.0.0",
|
||||
},
|
||||
],
|
||||
"updateTypes": Array [
|
||||
"bump",
|
||||
],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`applyPackageRules() empty rules 1`] = `
|
||||
Object {
|
||||
"foo": "bar",
|
||||
"packageRules": null,
|
||||
}
|
||||
`;
|
|
@ -17,6 +17,38 @@ describe('applyPackageRules()', () => {
|
|||
},
|
||||
],
|
||||
};
|
||||
it('applies', () => {
|
||||
const config = {
|
||||
depName: 'a',
|
||||
isBump: true,
|
||||
currentValue: '1.0.0',
|
||||
packageRules: [
|
||||
{
|
||||
packagePatterns: ['*'],
|
||||
matchCurrentVersion: '<= 2.0.0',
|
||||
},
|
||||
{
|
||||
packageNames: ['b'],
|
||||
matchCurrentVersion: '<= 2.0.0',
|
||||
},
|
||||
{
|
||||
excludePackagePatterns: ['*'],
|
||||
packageNames: ['b'],
|
||||
},
|
||||
{
|
||||
updateTypes: ['bump'],
|
||||
},
|
||||
{
|
||||
excludePackageNames: ['a'],
|
||||
packageNames: ['b'],
|
||||
},
|
||||
{
|
||||
matchCurrentVersion: '<= 2.0.0',
|
||||
},
|
||||
],
|
||||
};
|
||||
expect(applyPackageRules(config)).toMatchSnapshot();
|
||||
});
|
||||
it('applies both rules for a', () => {
|
||||
const dep = {
|
||||
depName: 'a',
|
||||
|
@ -487,4 +519,9 @@ describe('applyPackageRules()', () => {
|
|||
});
|
||||
expect(res3.x).toBeDefined();
|
||||
});
|
||||
it('empty rules', () => {
|
||||
expect(
|
||||
applyPackageRules({ ...config1, packageRules: null })
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -95,6 +95,14 @@ describe('semver.getNewValue()', () => {
|
|||
expect(semver.getNewValue('5.0', 'bump', '5.0.0', '5.1.7')).toEqual('5.1');
|
||||
expect(semver.getNewValue('5.0', 'bump', '5.0.0', '6.1.7')).toEqual('6.1');
|
||||
});
|
||||
it('bumps greater or equals', () => {
|
||||
expect(semver.getNewValue('>=1.0.0', 'bump', '1.0.0', '1.1.0')).toEqual(
|
||||
'>=1.1.0'
|
||||
);
|
||||
expect(semver.getNewValue('>= 1.0.0', 'bump', '1.0.0', '1.1.0')).toEqual(
|
||||
'>= 1.1.0'
|
||||
);
|
||||
});
|
||||
it('replaces equals', () => {
|
||||
expect(semver.getNewValue('=1.0.0', 'replace', '1.0.0', '1.1.0')).toEqual(
|
||||
'=1.1.0'
|
||||
|
|
|
@ -10,6 +10,7 @@ const statusChecks = require('../../../lib/workers/branch/status-checks');
|
|||
const automerge = require('../../../lib/workers/branch/automerge');
|
||||
const prWorker = require('../../../lib/workers/pr');
|
||||
const getUpdated = require('../../../lib/workers/branch/get-updated');
|
||||
const { appSlug } = require('../../../lib/config/app-strings');
|
||||
|
||||
jest.mock('../../../lib/workers/branch/get-updated');
|
||||
jest.mock('../../../lib/workers/branch/schedule');
|
||||
|
@ -18,6 +19,7 @@ jest.mock('../../../lib/workers/branch/parent');
|
|||
jest.mock('../../../lib/manager/npm/post-update');
|
||||
jest.mock('../../../lib/workers/branch/status-checks');
|
||||
jest.mock('../../../lib/workers/branch/automerge');
|
||||
jest.mock('../../../lib/workers/branch/commit');
|
||||
jest.mock('../../../lib/workers/pr');
|
||||
|
||||
describe('workers/branch', () => {
|
||||
|
@ -33,7 +35,7 @@ describe('workers/branch', () => {
|
|||
upgrades: [{ depName: 'some-dep-name' }],
|
||||
};
|
||||
schedule.isScheduledNow.mockReturnValue(true);
|
||||
commit.commitFilesToBranch = jest.fn(() => true);
|
||||
commit.commitFilesToBranch.mockReturnValue(true);
|
||||
});
|
||||
afterEach(() => {
|
||||
platform.ensureComment.mockClear();
|
||||
|
@ -168,6 +170,7 @@ describe('workers/branch', () => {
|
|||
updatedArtifacts: [],
|
||||
});
|
||||
platform.branchExists.mockReturnValueOnce(false);
|
||||
commit.commitFilesToBranch.mockReturnValueOnce(false);
|
||||
expect(await branchWorker.processBranch(config)).toEqual('no-work');
|
||||
});
|
||||
it('returns if branch automerged', async () => {
|
||||
|
@ -325,5 +328,112 @@ describe('workers/branch', () => {
|
|||
});
|
||||
await branchWorker.processBranch(config);
|
||||
});
|
||||
|
||||
it('closed pr (dry run)', async () => {
|
||||
platform.branchExists.mockReturnValueOnce(true);
|
||||
checkExisting.prAlreadyExisted.mockResolvedValueOnce({ state: 'closed' });
|
||||
expect(
|
||||
await branchWorker.processBranch({ ...config, dryRun: true })
|
||||
).toEqual('already-existed');
|
||||
});
|
||||
|
||||
it('branch pr no rebase (dry run)', async () => {
|
||||
platform.branchExists.mockReturnValueOnce(true);
|
||||
platform.getBranchPr.mockResolvedValueOnce({
|
||||
state: 'open',
|
||||
canRebase: false,
|
||||
});
|
||||
expect(
|
||||
await branchWorker.processBranch({ ...config, dryRun: true })
|
||||
).toEqual('pr-edited');
|
||||
});
|
||||
|
||||
it('branch pr no schedule lockfile (dry run)', async () => {
|
||||
getUpdated.getUpdatedPackageFiles.mockReturnValueOnce({
|
||||
updatedPackageFiles: [{}],
|
||||
artifactErrors: [{}],
|
||||
});
|
||||
npmPostExtract.getAdditionalFiles.mockReturnValueOnce({
|
||||
artifactErrors: [],
|
||||
updatedArtifacts: [{}],
|
||||
});
|
||||
platform.branchExists.mockReturnValueOnce(true);
|
||||
platform.getBranchPr.mockResolvedValueOnce({
|
||||
title: 'rebase!',
|
||||
state: 'open',
|
||||
body: `- [x] <!-- ${appSlug}-rebase -->`,
|
||||
canRebase: false,
|
||||
});
|
||||
|
||||
schedule.isScheduledNow.mockReturnValueOnce(false);
|
||||
commit.commitFilesToBranch.mockReturnValueOnce(false);
|
||||
|
||||
expect(
|
||||
await branchWorker.processBranch({
|
||||
...config,
|
||||
dryRun: true,
|
||||
updateType: 'lockFileMaintenance',
|
||||
parentBranch: undefined,
|
||||
updatedArtifacts: [{ name: '|delete|', contents: 'dummy' }],
|
||||
})
|
||||
).toEqual('done');
|
||||
});
|
||||
|
||||
it('branch pr no schedule (dry run)', async () => {
|
||||
getUpdated.getUpdatedPackageFiles.mockReturnValueOnce({
|
||||
updatedPackageFiles: [{}],
|
||||
artifactErrors: [{}],
|
||||
});
|
||||
npmPostExtract.getAdditionalFiles.mockReturnValueOnce({
|
||||
artifactErrors: [],
|
||||
updatedArtifacts: [{}],
|
||||
});
|
||||
platform.branchExists.mockReturnValueOnce(true);
|
||||
platform.getBranchPr.mockResolvedValueOnce({
|
||||
title: 'rebase!',
|
||||
state: 'open',
|
||||
body: `- [x] <!-- ${appSlug}-rebase -->`,
|
||||
canRebase: false,
|
||||
});
|
||||
|
||||
schedule.isScheduledNow.mockReturnValueOnce(false);
|
||||
prWorker.ensurePr.mockResolvedValueOnce({});
|
||||
expect(
|
||||
await branchWorker.processBranch({
|
||||
...config,
|
||||
dryRun: true,
|
||||
artifactErrors: [{}],
|
||||
})
|
||||
).toEqual('done');
|
||||
});
|
||||
|
||||
it('branch pr no schedule', async () => {
|
||||
getUpdated.getUpdatedPackageFiles.mockReturnValueOnce({
|
||||
updatedPackageFiles: [{}],
|
||||
artifactErrors: [],
|
||||
});
|
||||
npmPostExtract.getAdditionalFiles.mockReturnValueOnce({
|
||||
artifactErrors: [],
|
||||
updatedArtifacts: [{}],
|
||||
});
|
||||
platform.branchExists.mockReturnValueOnce(true);
|
||||
platform.getBranchPr.mockResolvedValueOnce({
|
||||
title: 'rebase!',
|
||||
state: 'open',
|
||||
body: `- [x] <!-- ${appSlug}-rebase -->`,
|
||||
canRebase: false,
|
||||
});
|
||||
|
||||
schedule.isScheduledNow.mockReturnValueOnce(false);
|
||||
commit.commitFilesToBranch.mockReturnValueOnce(false);
|
||||
expect(
|
||||
await branchWorker.processBranch({
|
||||
...config,
|
||||
updateType: 'lockFileMaintenance',
|
||||
parentBranch: undefined,
|
||||
updatedArtifacts: [{ name: '|delete|', contents: 'dummy' }],
|
||||
})
|
||||
).toEqual('done');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -11,20 +11,16 @@ const { exec } = require('child-process-promise');
|
|||
const yarnHelper = require('../../../../lib/manager/npm/post-update/yarn');
|
||||
|
||||
describe('generateLockFile', () => {
|
||||
beforeEach(() => {
|
||||
delete process.env.YARN_MUTEX_FILE;
|
||||
jest.resetAllMocks();
|
||||
exec.mockResolvedValue({
|
||||
stdout: '',
|
||||
stderr: '',
|
||||
});
|
||||
});
|
||||
it('generates lock files', async () => {
|
||||
getInstalledPath.mockReturnValueOnce('node_modules/yarn');
|
||||
exec.mockReturnValueOnce({
|
||||
stdout: '',
|
||||
stderror: '',
|
||||
});
|
||||
exec.mockReturnValueOnce({
|
||||
stdout: '',
|
||||
stderror: '',
|
||||
});
|
||||
exec.mockReturnValueOnce({
|
||||
stdout: '',
|
||||
stderror: '',
|
||||
});
|
||||
fs.readFile = jest.fn(() => 'package-lock-contents');
|
||||
const env = {};
|
||||
const config = {
|
||||
|
@ -36,15 +32,9 @@ describe('generateLockFile', () => {
|
|||
});
|
||||
it('performs lock file updates', async () => {
|
||||
getInstalledPath.mockReturnValueOnce('node_modules/yarn');
|
||||
exec.mockReturnValueOnce({
|
||||
stdout: '',
|
||||
stderror: '',
|
||||
});
|
||||
exec.mockReturnValueOnce({
|
||||
stdout: '',
|
||||
stderror: '',
|
||||
});
|
||||
|
||||
fs.readFile = jest.fn(() => 'package-lock-contents');
|
||||
process.env.YARN_MUTEX_FILE = '/tmp/yarn.mutext';
|
||||
const res = await yarnHelper.generateLockFile('some-dir', {}, {}, [
|
||||
{ depName: 'some-dep', isLockfileUpdate: true },
|
||||
]);
|
||||
|
@ -52,14 +42,6 @@ describe('generateLockFile', () => {
|
|||
});
|
||||
it('detects yarnIntegrity', async () => {
|
||||
getInstalledPath.mockReturnValueOnce('node_modules/yarn');
|
||||
exec.mockReturnValueOnce({
|
||||
stdout: '',
|
||||
stderror: '',
|
||||
});
|
||||
exec.mockReturnValueOnce({
|
||||
stdout: '',
|
||||
stderror: '',
|
||||
});
|
||||
fs.readFile = jest.fn(() => 'package-lock-contents');
|
||||
const config = {
|
||||
upgrades: [{ yarnIntegrity: true }],
|
||||
|
@ -73,7 +55,7 @@ describe('generateLockFile', () => {
|
|||
getInstalledPath.mockReturnValueOnce('node_modules/yarn');
|
||||
exec.mockReturnValueOnce({
|
||||
stdout: '',
|
||||
stderror: 'some-error',
|
||||
stderr: 'some-error',
|
||||
});
|
||||
fs.readFile = jest.fn(() => {
|
||||
throw new Error('not found');
|
||||
|
@ -91,10 +73,6 @@ describe('generateLockFile', () => {
|
|||
getInstalledPath.mockImplementationOnce(
|
||||
() => '/node_modules/renovate/node_modules/yarn'
|
||||
);
|
||||
exec.mockReturnValueOnce({
|
||||
stdout: '',
|
||||
stderror: '',
|
||||
});
|
||||
fs.readFile = jest.fn(() => 'package-lock-contents');
|
||||
const res = await yarnHelper.generateLockFile('some-dir');
|
||||
expect(fs.readFile).toHaveBeenCalledTimes(1);
|
||||
|
@ -109,10 +87,6 @@ describe('generateLockFile', () => {
|
|||
throw new Error('not found');
|
||||
});
|
||||
getInstalledPath.mockImplementationOnce(() => '/node_modules/yarn');
|
||||
exec.mockReturnValueOnce({
|
||||
stdout: '',
|
||||
stderror: '',
|
||||
});
|
||||
fs.readFile = jest.fn(() => 'package-lock-contents');
|
||||
const res = await yarnHelper.generateLockFile('some-dir');
|
||||
expect(fs.readFile).toHaveBeenCalledTimes(1);
|
||||
|
@ -129,10 +103,6 @@ describe('generateLockFile', () => {
|
|||
getInstalledPath.mockImplementationOnce(() => {
|
||||
throw new Error('not found');
|
||||
});
|
||||
exec.mockReturnValueOnce({
|
||||
stdout: '',
|
||||
stderror: '',
|
||||
});
|
||||
fs.readFile = jest.fn(() => 'package-lock-contents');
|
||||
const res = await yarnHelper.generateLockFile('some-dir', undefined, {
|
||||
binarySource: 'global',
|
||||
|
|
|
@ -47,4 +47,27 @@ describe('lib/workers/global', () => {
|
|||
expect(configParser.parseConfigs).toHaveBeenCalledTimes(1);
|
||||
expect(repositoryWorker.renovateRepository).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
describe('processes platforms', () => {
|
||||
it('github', async () => {
|
||||
configParser.parseConfigs.mockReturnValueOnce({
|
||||
repositories: ['a'],
|
||||
platform: 'github',
|
||||
endpoint: 'https://github.com/',
|
||||
});
|
||||
await globalWorker.start();
|
||||
expect(configParser.parseConfigs).toHaveBeenCalledTimes(1);
|
||||
expect(repositoryWorker.renovateRepository).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
it('gitlab', async () => {
|
||||
configParser.parseConfigs.mockReturnValueOnce({
|
||||
repositories: [{ repository: 'a' }],
|
||||
platform: 'gitlab',
|
||||
endpoint: 'https://my.gitlab.com/',
|
||||
});
|
||||
await globalWorker.start();
|
||||
expect(configParser.parseConfigs).toHaveBeenCalledTimes(1);
|
||||
expect(repositoryWorker.renovateRepository).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -18,6 +18,7 @@ With your current configuration, Renovate will create 2 Pull Requests:
|
|||
<summary>Pin dependencies</summary>
|
||||
|
||||
- Branch name: \`renovate/pin-dependencies\`
|
||||
- Merge into: \`some-other\`
|
||||
- Pin [a](https://a) to \`1.1.0\`
|
||||
- Pin b to \`1.5.3\`
|
||||
|
||||
|
@ -28,7 +29,7 @@ With your current configuration, Renovate will create 2 Pull Requests:
|
|||
<summary>Update a to v2</summary>
|
||||
|
||||
- Branch name: \`renovate/a-2.x\`
|
||||
- Upgrade [a](https://a) to \`2.0.1\`
|
||||
- Upgrade [a](https://a) to \`undefined\`
|
||||
|
||||
|
||||
</details>
|
||||
|
|
|
@ -63,5 +63,10 @@ describe('workers/repository/onboarding/pr', () => {
|
|||
expect(platform.createPr).toHaveBeenCalledTimes(0);
|
||||
expect(platform.updatePr).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
it('creates PR (no require config)', async () => {
|
||||
config.requireConfig = false;
|
||||
await ensureOnboardingPr(config, packageFiles, branches);
|
||||
expect(platform.createPr).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -38,6 +38,7 @@ describe('workers/repository/onboarding/pr/pr-list', () => {
|
|||
const branches = [
|
||||
{
|
||||
prTitle: 'Pin dependencies',
|
||||
baseBranch: 'some-other',
|
||||
branchName: 'renovate/pin-dependencies',
|
||||
upgrades: [
|
||||
{
|
||||
|
@ -64,6 +65,7 @@ describe('workers/repository/onboarding/pr/pr-list', () => {
|
|||
currentValue: '^1.0.0',
|
||||
depType: 'devDependencies',
|
||||
newValue: '2.0.1',
|
||||
isLockfileUpdate: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
|
|
|
@ -267,6 +267,49 @@ Array [
|
|||
]
|
||||
`;
|
||||
|
||||
exports[`workers/repository/process/lookup .lookupUpdates() is deprecated 1`] = `
|
||||
Object {
|
||||
"changelogUrl": undefined,
|
||||
"homepage": undefined,
|
||||
"releases": Array [
|
||||
Object {
|
||||
"canBeUnpublished": false,
|
||||
"gitRef": "b26cace16f6070e756b6a546cf2693bece03f8f8",
|
||||
"releaseTimestamp": "2015-04-26T16:42:11.311Z",
|
||||
"version": "1.3.0",
|
||||
},
|
||||
Object {
|
||||
"canBeUnpublished": false,
|
||||
"gitRef": "05e20dc704421ca820553721c7178168a8461506",
|
||||
"releaseTimestamp": "2015-05-09T16:52:40.699Z",
|
||||
"version": "1.4.0",
|
||||
},
|
||||
Object {
|
||||
"canBeUnpublished": false,
|
||||
"gitRef": "d373079d3620152e3d60e82f27265a09ee0e81bd",
|
||||
"releaseTimestamp": "2015-05-17T04:25:07.299Z",
|
||||
"version": "1.4.1",
|
||||
},
|
||||
],
|
||||
"sourceDirectory": "test",
|
||||
"sourceUrl": null,
|
||||
"updates": Array [
|
||||
Object {
|
||||
"canBeUnpublished": false,
|
||||
"fromVersion": "1.3.0",
|
||||
"isSingleVersion": true,
|
||||
"newMajor": 1,
|
||||
"newMinor": 4,
|
||||
"newValue": "1.4.1",
|
||||
"releaseTimestamp": "2015-05-17T04:25:07.299Z",
|
||||
"toVersion": "1.4.1",
|
||||
"updateType": "minor",
|
||||
},
|
||||
],
|
||||
"warnings": Array [],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`workers/repository/process/lookup .lookupUpdates() pins minor ranged versions 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
|
|
|
@ -8,6 +8,7 @@ const nextJson = require('../../../../config/npm/_fixtures/next.json');
|
|||
const vueJson = require('../../../../config/npm/_fixtures/vue.json');
|
||||
const typescriptJson = require('../../../../config/npm/_fixtures/typescript.json');
|
||||
const docker = require('../../../../../lib/datasource/docker');
|
||||
const defaults = require('../../../../../lib/config/defaults');
|
||||
|
||||
jest.mock('../../../../../lib/datasource/docker');
|
||||
|
||||
|
@ -17,7 +18,7 @@ let config;
|
|||
|
||||
describe('workers/repository/process/lookup', () => {
|
||||
beforeEach(() => {
|
||||
config = { ...require('../../../../../lib/config/defaults').getConfig() };
|
||||
config = { ...defaults.getConfig() };
|
||||
config.manager = 'npm';
|
||||
config.versionScheme = 'npm';
|
||||
config.rangeStrategy = 'replace';
|
||||
|
@ -1032,6 +1033,25 @@ describe('workers/repository/process/lookup', () => {
|
|||
expect(res.releases).toHaveLength(2);
|
||||
expect(res.updates[0].toVersion).toEqual('1.4.0');
|
||||
});
|
||||
it('is deprecated', async () => {
|
||||
config.currentValue = '1.3.0';
|
||||
config.depName = 'q3';
|
||||
config.datasource = 'npm';
|
||||
const returnJson = {
|
||||
...JSON.parse(JSON.stringify(qJson)),
|
||||
name: 'q3',
|
||||
deprecated: true,
|
||||
repository: { url: null, directory: 'test' },
|
||||
};
|
||||
|
||||
nock('https://registry.npmjs.org')
|
||||
.get('/q3')
|
||||
.reply(200, returnJson);
|
||||
const res = await lookup.lookupUpdates(config);
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(res.releases).toHaveLength(3);
|
||||
expect(res.updates[0].toVersion).toEqual('1.4.1');
|
||||
});
|
||||
it('skips unsupported values', async () => {
|
||||
config.currentValue = 'alpine';
|
||||
config.depName = 'node';
|
||||
|
|
|
@ -224,6 +224,116 @@ Array [
|
|||
"warnings": Array [],
|
||||
"yarnrc": null,
|
||||
},
|
||||
Object {
|
||||
"assignees": Array [],
|
||||
"automerge": false,
|
||||
"automergeComment": "automergeComment",
|
||||
"automergeType": "pr",
|
||||
"azureAutoComplete": false,
|
||||
"azureWorkItemId": 0,
|
||||
"baseDir": null,
|
||||
"bbUseDefaultReviewers": true,
|
||||
"binarySource": "bundled",
|
||||
"branchName": "{{{branchPrefix}}}{{{managerBranchPrefix}}}{{{branchTopic}}}",
|
||||
"branchPrefix": "renovate/",
|
||||
"branchTopic": "{{{depNameSanitized}}}-{{{newMajor}}}{{#if isPatch}}.{{{newMinor}}}{{/if}}.x{{#if isLockfileUpdate}}-lockfile{{/if}}",
|
||||
"bumpVersion": null,
|
||||
"cacheDir": null,
|
||||
"commitBody": null,
|
||||
"commitMessage": "{{{commitMessagePrefix}}} {{{commitMessageAction}}} {{{commitMessageTopic}}} {{{commitMessageExtra}}} {{{commitMessageSuffix}}}",
|
||||
"commitMessageAction": "Update",
|
||||
"commitMessageExtra": "to {{#if isMajor}}v{{{newMajor}}}{{else}}{{#if isSingleVersion}}v{{{toVersion}}}{{else}}{{{newValue}}}{{/if}}{{/if}}",
|
||||
"commitMessagePrefix": null,
|
||||
"commitMessageSuffix": null,
|
||||
"commitMessageTopic": "dependency {{depName}}",
|
||||
"compatibility": Object {},
|
||||
"depNameSanitized": undefined,
|
||||
"dryRun": false,
|
||||
"errors": Array [],
|
||||
"excludeCommitPaths": Array [],
|
||||
"gitAuthor": null,
|
||||
"gitFs": null,
|
||||
"gitPrivateKey": null,
|
||||
"group": Object {
|
||||
"branchTopic": "{{{groupSlug}}}",
|
||||
"commitMessageTopic": "{{{groupName}}}",
|
||||
},
|
||||
"groupName": null,
|
||||
"groupSlug": null,
|
||||
"ignoreNpmrcFile": false,
|
||||
"labels": Array [],
|
||||
"language": "js",
|
||||
"lazyGrouping": true,
|
||||
"manager": "npm",
|
||||
"managerBranchPrefix": "",
|
||||
"masterIssue": false,
|
||||
"masterIssueApproval": false,
|
||||
"masterIssueAutoclose": false,
|
||||
"masterIssueTitle": "Update Dependencies (Renovate Bot)",
|
||||
"newValue": "2.0.0",
|
||||
"npmToken": null,
|
||||
"npmrc": null,
|
||||
"packageFile": "package.json",
|
||||
"persistRepoData": false,
|
||||
"platform": "github",
|
||||
"postUpdateOptions": Array [],
|
||||
"prBodyColumns": Array [
|
||||
"Package",
|
||||
"Type",
|
||||
"Update",
|
||||
"Change",
|
||||
"References",
|
||||
],
|
||||
"prBodyDefinitions": Object {
|
||||
"Change": "[{{#if displayFrom}}\`{{{displayFrom}}}\` -> {{else}}{{#if currentValue}}\`{{{currentValue}}}\` -> {{/if}}{{/if}}{{#if displayTo}}\`{{{displayTo}}}\`{{else}}\`{{{newValue}}}\`{{/if}}](https://diff.intrinsic.com/{{{depName}}}/{{{fromVersion}}}/{{{toVersion}}})",
|
||||
"Current value": "{{{currentValue}}}",
|
||||
"New value": "{{{newValue}}}",
|
||||
"Package": "{{{depName}}}",
|
||||
"Package file": "{{{packageFile}}}",
|
||||
"References": "{{{references}}}",
|
||||
"Type": "{{{depType}}}",
|
||||
"Update": "{{{updateType}}}",
|
||||
},
|
||||
"prBodyNotes": Array [],
|
||||
"prConcurrentLimit": 0,
|
||||
"prCreation": "immediate",
|
||||
"prHourlyLimit": 0,
|
||||
"prNotPendingHours": 25,
|
||||
"prTitle": null,
|
||||
"printConfig": false,
|
||||
"rangeStrategy": "replace",
|
||||
"rebaseLabel": "rebase",
|
||||
"rebaseStalePrs": null,
|
||||
"recreateClosed": false,
|
||||
"registryUrls": null,
|
||||
"requiredStatusChecks": Array [],
|
||||
"reviewers": Array [],
|
||||
"rollbackPrs": true,
|
||||
"schedule": "at any time",
|
||||
"semanticCommitScope": "deps",
|
||||
"semanticCommitType": "chore",
|
||||
"semanticCommits": null,
|
||||
"separateMajorMinor": true,
|
||||
"separateMinorPatch": false,
|
||||
"skipInstalls": null,
|
||||
"statusCheckVerify": false,
|
||||
"suppressNotifications": Array [],
|
||||
"timezone": null,
|
||||
"unpublishSafe": false,
|
||||
"updateLockFiles": true,
|
||||
"updateNotScheduled": true,
|
||||
"versionScheme": "npm",
|
||||
"vulnerabilityAlerts": Object {
|
||||
"commitMessageSuffix": "[SECURITY]",
|
||||
"enabled": true,
|
||||
"groupName": null,
|
||||
"masterIssueApproval": false,
|
||||
"rangeStrategy": "update-lockfile",
|
||||
"schedule": Array [],
|
||||
},
|
||||
"warnings": Array [],
|
||||
"yarnrc": null,
|
||||
},
|
||||
Object {
|
||||
"assignees": Array [],
|
||||
"automerge": false,
|
||||
|
|
|
@ -6,4 +6,41 @@ exports[`workers/repository/updates/generate generateBranchConfig() adds commit
|
|||
[skip-ci]"
|
||||
`;
|
||||
|
||||
exports[`workers/repository/updates/generate generateBranchConfig() handles @types specially (reversed) 1`] = `
|
||||
Object {
|
||||
"automerge": false,
|
||||
"blockedByPin": false,
|
||||
"branchName": "some-branch",
|
||||
"canBeUnpublished": false,
|
||||
"commitMessage": "",
|
||||
"depName": "@types/some-dep",
|
||||
"masterIssueApproval": false,
|
||||
"newValue": "0.5.7",
|
||||
"prTitle": "some-title",
|
||||
"prettyDepType": "dependency",
|
||||
"releaseTimestamp": undefined,
|
||||
"reuseLockFiles": true,
|
||||
"upgrades": Array [
|
||||
Object {
|
||||
"branchName": "some-branch",
|
||||
"commitMessage": "",
|
||||
"depName": "@types/some-dep",
|
||||
"newValue": "0.5.7",
|
||||
"prTitle": "some-title",
|
||||
"prettyDepType": "dependency",
|
||||
},
|
||||
Object {
|
||||
"branchName": "some-branch",
|
||||
"commitMessage": "",
|
||||
"depName": "some-dep",
|
||||
"newValue": "0.6.0",
|
||||
"prTitle": "some-title",
|
||||
"prettyDepType": "dependency",
|
||||
},
|
||||
],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`workers/repository/updates/generate generateBranchConfig() handles upgrades 1`] = `"some-title ()"`;
|
||||
|
||||
exports[`workers/repository/updates/generate generateBranchConfig() supports manual prTitle 1`] = `"upgrade some-dep"`;
|
||||
|
|
|
@ -33,6 +33,10 @@ describe('workers/repository/updates/flatten', () => {
|
|||
deps: [
|
||||
{ depName: '@org/a', updates: [{ newValue: '1.0.0' }] },
|
||||
{ depName: 'foo', updates: [{ newValue: '2.0.0' }] },
|
||||
{
|
||||
updateTypes: ['pin'],
|
||||
updates: [{ newValue: '2.0.0' }],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
|
@ -69,7 +73,7 @@ describe('workers/repository/updates/flatten', () => {
|
|||
};
|
||||
const res = await flattenUpdates(config, packageFiles);
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(res).toHaveLength(8);
|
||||
expect(res).toHaveLength(9);
|
||||
expect(
|
||||
res.filter(r => r.updateType === 'lockFileMaintenance')
|
||||
).toHaveLength(2);
|
||||
|
|
|
@ -461,6 +461,29 @@ describe('workers/repository/updates/generate', () => {
|
|||
expect(res.recreateClosed).toBe(false);
|
||||
expect(res.groupName).toBeUndefined();
|
||||
});
|
||||
it('handles @types specially (reversed)', () => {
|
||||
const branch = [
|
||||
{
|
||||
depName: 'some-dep',
|
||||
groupName: null,
|
||||
branchName: 'some-branch',
|
||||
prTitle: 'some-title',
|
||||
lazyGrouping: true,
|
||||
newValue: '0.6.0',
|
||||
group: {},
|
||||
},
|
||||
{
|
||||
depName: '@types/some-dep',
|
||||
groupName: null,
|
||||
branchName: 'some-branch',
|
||||
prTitle: 'some-title',
|
||||
lazyGrouping: true,
|
||||
newValue: '0.5.7',
|
||||
group: {},
|
||||
},
|
||||
];
|
||||
expect(generateBranchConfig(branch)).toMatchSnapshot();
|
||||
});
|
||||
it('overrides schedule for pin PRs', () => {
|
||||
const branch = [
|
||||
{
|
||||
|
@ -473,5 +496,52 @@ describe('workers/repository/updates/generate', () => {
|
|||
const res = generateBranchConfig(branch);
|
||||
expect(res.schedule).toEqual([]);
|
||||
});
|
||||
it('handles upgrades', () => {
|
||||
const branch = [
|
||||
{
|
||||
depName: 'some-dep',
|
||||
branchName: 'some-branch',
|
||||
prTitle: 'some-title',
|
||||
newValue: '0.6.0',
|
||||
hasBaseBranches: true,
|
||||
fileReplacePosition: 5,
|
||||
},
|
||||
{
|
||||
...defaultConfig,
|
||||
depName: 'some-dep',
|
||||
branchName: 'some-branch',
|
||||
prTitle: 'some-title',
|
||||
newValue: '0.6.0',
|
||||
isGroup: true,
|
||||
separateMinorPatch: true,
|
||||
updateType: 'minor',
|
||||
fileReplacePosition: 1,
|
||||
},
|
||||
{
|
||||
...defaultConfig,
|
||||
depName: 'some-dep',
|
||||
branchName: 'some-branch',
|
||||
prTitle: 'some-title',
|
||||
newValue: '0.6.0',
|
||||
isGroup: true,
|
||||
separateMajorMinor: true,
|
||||
updateType: 'major',
|
||||
fileReplacePosition: 2,
|
||||
},
|
||||
{
|
||||
...defaultConfig,
|
||||
depName: 'some-dep',
|
||||
branchName: 'some-branch',
|
||||
prTitle: 'some-title',
|
||||
newValue: '0.6.0',
|
||||
isGroup: true,
|
||||
separateMajorMinor: true,
|
||||
updateType: 'patch',
|
||||
fileReplacePosition: 0,
|
||||
},
|
||||
];
|
||||
const res = generateBranchConfig(branch);
|
||||
expect(res.prTitle).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
Loading…
Reference in a new issue