mirror of
https://github.com/renovatebot/renovate.git
synced 2025-03-13 07:43:27 +00:00
refactor: fix lint issues (#16779)
* refactor: fix lint issues * Update lib/renovate.ts * chore: revert toplevel await for ts
This commit is contained in:
parent
f1bcafc584
commit
8270d5512d
65 changed files with 239 additions and 138 deletions
lib
modules
datasource/crate
manager
npm/update/package-version
pip_requirements
pipenv
poetry
terraform
platform
versioning
util
workers
global
repository
config-migration
dependency-dashboard.tsextract
finalise
init
onboarding
package-files.tsprocess
result.tsupdate
branch
pr
updates
tools
|
@ -208,7 +208,7 @@ export class CrateDatasource extends Datasource {
|
|||
const host = url.hostname;
|
||||
const hash = hasha(url.pathname, {
|
||||
algorithm: 'sha256',
|
||||
}).substr(0, 7);
|
||||
}).substring(0, 7);
|
||||
|
||||
return `crate-registry-${proto}-${host}-${hash}`;
|
||||
}
|
||||
|
|
|
@ -12,6 +12,7 @@ export function bumpPackageVersion(
|
|||
{ bumpVersion, currentValue },
|
||||
'Checking if we should bump package.json version'
|
||||
);
|
||||
// TODO: types (#7154)
|
||||
let newPjVersion: string | null;
|
||||
let bumpedContent = content;
|
||||
try {
|
||||
|
@ -33,7 +34,7 @@ export function bumpPackageVersion(
|
|||
logger.debug({ newPjVersion });
|
||||
bumpedContent = content.replace(
|
||||
regEx(`(?<version>"version":\\s*")[^"]*`),
|
||||
`$<version>${newPjVersion}`
|
||||
`$<version>${newPjVersion!}`
|
||||
);
|
||||
if (bumpedContent === content) {
|
||||
logger.debug('Version was already bumped');
|
||||
|
|
|
@ -27,7 +27,8 @@ export async function updateArtifacts({
|
|||
for (const dep of updatedDeps) {
|
||||
const hashLine = lines.find(
|
||||
(line) =>
|
||||
line.startsWith(`${dep.depName}==`) && line.includes('--hash=')
|
||||
// TODO: types (#7154)
|
||||
line.startsWith(`${dep.depName!}==`) && line.includes('--hash=')
|
||||
);
|
||||
if (hashLine) {
|
||||
const depConstraint = hashLine.split(' ')[0];
|
||||
|
|
|
@ -47,7 +47,7 @@ function getPythonConstraint(
|
|||
function getPipenvConstraint(
|
||||
existingLockFileContent: string,
|
||||
config: UpdateArtifactsConfig
|
||||
): string | null {
|
||||
): string {
|
||||
const { constraints = {} } = config;
|
||||
const { pipenv } = constraints;
|
||||
|
||||
|
|
|
@ -63,7 +63,7 @@ function extractFromSection(
|
|||
const specifierMatches = specifierRegex.exec(currentValue!);
|
||||
if (!specifierMatches) {
|
||||
logger.debug(
|
||||
`Skipping dependency with malformed version specifier "${currentValue}".`
|
||||
`Skipping dependency with malformed version specifier "${currentValue!}".`
|
||||
);
|
||||
skipReason = 'invalid-version';
|
||||
}
|
||||
|
|
|
@ -8,7 +8,8 @@ export function updateLockedDependency(
|
|||
const { depName, currentVersion, newVersion, lockFile, lockFileContent } =
|
||||
config;
|
||||
logger.debug(
|
||||
`poetry.updateLockedDependency: ${depName}@${currentVersion} -> ${newVersion} [${lockFile}]`
|
||||
// TODO: types (#7154)
|
||||
`poetry.updateLockedDependency: ${depName}@${currentVersion!} -> ${newVersion} [${lockFile}]`
|
||||
);
|
||||
const locked = extractLockFileEntries(lockFileContent ?? '');
|
||||
if (depName && locked[depName] === newVersion) {
|
||||
|
|
|
@ -56,7 +56,7 @@ export function massageProviderLookupName(dep: PackageDependency): void {
|
|||
|
||||
// TODO #7154
|
||||
if (!dep.packageName!.includes('/')) {
|
||||
dep.packageName = `hashicorp/${dep.packageName}`;
|
||||
dep.packageName = `hashicorp/${dep.packageName!}`;
|
||||
}
|
||||
|
||||
// handle cases like `Telmate/proxmox`
|
||||
|
|
|
@ -18,7 +18,7 @@ export async function getRefs(
|
|||
repoId: string,
|
||||
branchName?: string
|
||||
): Promise<GitRef[]> {
|
||||
logger.debug(`getRefs(${repoId}, ${branchName})`);
|
||||
logger.debug(`getRefs(${repoId}, ${branchName!})`);
|
||||
const azureApiGit = await azureApi.gitApi();
|
||||
const refs = await azureApiGit.getRefs(
|
||||
repoId,
|
||||
|
@ -123,7 +123,8 @@ export async function getMergeMethod(
|
|||
const isRelevantScope = (scope: Scope): boolean => {
|
||||
if (
|
||||
scope.matchKind === 'DefaultBranch' &&
|
||||
(!branchRef || branchRef === `refs/heads/${defaultBranch}`)
|
||||
// TODO: types (#7154)
|
||||
(!branchRef || branchRef === `refs/heads/${defaultBranch!}`)
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
@ -149,7 +150,8 @@ export async function getMergeMethod(
|
|||
.map((p) => p.settings)[0];
|
||||
|
||||
logger.trace(
|
||||
`getMergeMethod(${repoId}, ${project}, ${branchRef}) determining mergeMethod from matched policy:\n${JSON.stringify(
|
||||
// TODO: types (#7154)
|
||||
`getMergeMethod(${repoId}, ${project}, ${branchRef!}) determining mergeMethod from matched policy:\n${JSON.stringify(
|
||||
policyConfigurations,
|
||||
null,
|
||||
4
|
||||
|
|
|
@ -113,6 +113,8 @@ export async function getRepos(): Promise<string[]> {
|
|||
logger.debug('Autodiscovering Azure DevOps repositories');
|
||||
const azureApiGit = await azureApi.gitApi();
|
||||
const repos = await azureApiGit.getRepositories();
|
||||
// TODO: types (#7154)
|
||||
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
|
||||
return repos.map((repo) => `${repo.project?.name}/${repo.name}`);
|
||||
}
|
||||
|
||||
|
@ -211,7 +213,8 @@ export async function initRepo({
|
|||
hostType: defaults.hostType,
|
||||
url: defaults.endpoint,
|
||||
});
|
||||
const manualUrl = `${defaults.endpoint}${encodeURIComponent(
|
||||
// TODO: types (#7154)
|
||||
const manualUrl = `${defaults.endpoint!}${encodeURIComponent(
|
||||
projectName
|
||||
)}/_git/${encodeURIComponent(repoName)}`;
|
||||
const url = repo.remoteUrl ?? manualUrl;
|
||||
|
@ -514,7 +517,7 @@ export async function ensureComment({
|
|||
topic,
|
||||
content,
|
||||
}: EnsureCommentConfig): Promise<boolean> {
|
||||
logger.debug(`ensureComment(${number}, ${topic}, content)`);
|
||||
logger.debug(`ensureComment(${number}, ${topic!}, content)`);
|
||||
const header = topic ? `### ${topic}\n\n` : '';
|
||||
const body = `${header}${sanitize(content)}`;
|
||||
const azureApiGit = await azureApi.gitApi();
|
||||
|
@ -630,7 +633,7 @@ export async function setBranchStatus({
|
|||
url: targetUrl,
|
||||
}: BranchStatusConfig): Promise<void> {
|
||||
logger.debug(
|
||||
`setBranchStatus(${branchName}, ${context}, ${description}, ${state}, ${targetUrl})`
|
||||
`setBranchStatus(${branchName}, ${context}, ${description}, ${state}, ${targetUrl!})`
|
||||
);
|
||||
const azureApiGit = await azureApi.gitApi();
|
||||
const branch = await azureApiGit.getBranch(
|
||||
|
@ -656,7 +659,7 @@ export async function mergePr({
|
|||
branchName,
|
||||
id: pullRequestId,
|
||||
}: MergePRConfig): Promise<boolean> {
|
||||
logger.debug(`mergePr(${pullRequestId}, ${branchName})`);
|
||||
logger.debug(`mergePr(${pullRequestId}, ${branchName!})`);
|
||||
const azureApiGit = await azureApi.gitApi();
|
||||
|
||||
let pr = await azureApiGit.getPullRequestById(pullRequestId, config.project);
|
||||
|
@ -685,6 +688,7 @@ export async function mergePr({
|
|||
`Updating PR ${pullRequestId} to status ${PullRequestStatus.Completed} (${
|
||||
PullRequestStatus[PullRequestStatus.Completed]
|
||||
}) with lastMergeSourceCommit ${
|
||||
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
|
||||
pr.lastMergeSourceCommit?.commitId
|
||||
} using mergeStrategy ${mergeMethod} (${
|
||||
GitPullRequestMergeStrategy[mergeMethod]
|
||||
|
|
|
@ -26,7 +26,8 @@ export function getGitStatusContextCombinedName(
|
|||
return undefined;
|
||||
}
|
||||
const combinedName = `${context.genre ? `${context.genre}/` : ''}${
|
||||
context.name
|
||||
// TODO: types (#7154)
|
||||
context.name!
|
||||
}`;
|
||||
logger.trace(`Got combined context name of ${combinedName}`);
|
||||
return combinedName;
|
||||
|
@ -55,7 +56,7 @@ export function getBranchNameWithoutRefsheadsPrefix(
|
|||
branchPath: string | undefined
|
||||
): string | undefined {
|
||||
if (!branchPath) {
|
||||
logger.error(`getBranchNameWithoutRefsheadsPrefix(${branchPath})`);
|
||||
logger.error(`getBranchNameWithoutRefsheadsPrefix(undefined)`);
|
||||
return undefined;
|
||||
}
|
||||
if (!branchPath.startsWith('refs/heads/')) {
|
||||
|
@ -71,7 +72,7 @@ export function getBranchNameWithoutRefsPrefix(
|
|||
branchPath?: string
|
||||
): string | undefined {
|
||||
if (!branchPath) {
|
||||
logger.error(`getBranchNameWithoutRefsPrefix(${branchPath})`);
|
||||
logger.error(`getBranchNameWithoutRefsPrefix(undefined)`);
|
||||
return undefined;
|
||||
}
|
||||
if (!branchPath.startsWith('refs/')) {
|
||||
|
@ -90,7 +91,8 @@ const stateMap = {
|
|||
|
||||
export function getRenovatePRFormat(azurePr: GitPullRequest): AzurePr {
|
||||
const number = azurePr.pullRequestId;
|
||||
const displayNumber = `Pull Request #${number}`;
|
||||
// TODO: types (#7154)
|
||||
const displayNumber = `Pull Request #${number!}`;
|
||||
|
||||
const sourceBranch = getBranchNameWithoutRefsheadsPrefix(
|
||||
azurePr.sourceRefName
|
||||
|
|
|
@ -325,7 +325,7 @@ export async function findPr({
|
|||
state = PrState.All,
|
||||
refreshCache,
|
||||
}: FindPRConfig): Promise<Pr | null> {
|
||||
logger.debug(`findPr(${branchName}, "${prTitle}", "${state}")`);
|
||||
logger.debug(`findPr(${branchName}, "${prTitle!}", "${state}")`);
|
||||
const prList = await getPrList(refreshCache);
|
||||
const pr = prList.find(isRelevantPr(branchName, prTitle, state));
|
||||
if (pr) {
|
||||
|
@ -362,7 +362,8 @@ async function getStatus(
|
|||
|
||||
return (
|
||||
await bitbucketServerHttp.getJson<utils.BitbucketCommitStatus>(
|
||||
`./rest/build-status/1.0/commits/stats/${branchCommit}`,
|
||||
// TODO: types (#7154)
|
||||
`./rest/build-status/1.0/commits/stats/${branchCommit!}`,
|
||||
{
|
||||
useCache,
|
||||
}
|
||||
|
@ -410,7 +411,8 @@ function getStatusCheck(
|
|||
const branchCommit = git.getBranchCommit(branchName);
|
||||
|
||||
return utils.accumulateValues(
|
||||
`./rest/build-status/1.0/commits/${branchCommit}`,
|
||||
// TODO: types (#7154)
|
||||
`./rest/build-status/1.0/commits/${branchCommit!}`,
|
||||
'get',
|
||||
{ useCache }
|
||||
);
|
||||
|
@ -483,7 +485,8 @@ export async function setBranchStatus({
|
|||
}
|
||||
|
||||
await bitbucketServerHttp.postJson(
|
||||
`./rest/build-status/1.0/commits/${branchCommit}`,
|
||||
// TODO: types (#7154)
|
||||
`./rest/build-status/1.0/commits/${branchCommit!}`,
|
||||
{ body }
|
||||
);
|
||||
|
||||
|
@ -936,7 +939,7 @@ export async function mergePr({
|
|||
branchName,
|
||||
id: prNo,
|
||||
}: MergePRConfig): Promise<boolean> {
|
||||
logger.debug(`mergePr(${prNo}, ${branchName})`);
|
||||
logger.debug(`mergePr(${prNo}, ${branchName!})`);
|
||||
// Used for "automerge" feature
|
||||
try {
|
||||
const pr = await getPr(prNo);
|
||||
|
@ -944,7 +947,10 @@ export async function mergePr({
|
|||
throw Object.assign(new Error(REPOSITORY_NOT_FOUND), { statusCode: 404 });
|
||||
}
|
||||
const { body } = await bitbucketServerHttp.postJson<{ version: number }>(
|
||||
`./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests/${prNo}/merge?version=${pr.version}`
|
||||
// TODO: types (#7154)
|
||||
`./rest/api/1.0/projects/${config.projectKey}/repos/${
|
||||
config.repositorySlug
|
||||
}/pull-requests/${prNo}/merge?version=${pr.version!}`
|
||||
);
|
||||
updatePrVersion(prNo, body.version);
|
||||
} catch (err) {
|
||||
|
|
|
@ -450,7 +450,7 @@ const platform: Platform = {
|
|||
prTitle: title,
|
||||
state = PrState.All,
|
||||
}: FindPRConfig): Promise<Pr | null> {
|
||||
logger.debug(`findPr(${branchName}, ${title}, ${state})`);
|
||||
logger.debug(`findPr(${branchName}, ${title!}, ${state})`);
|
||||
const prList = await platform.getPrList();
|
||||
const pr = prList.find(
|
||||
(p) =>
|
||||
|
@ -606,7 +606,8 @@ const platform: Platform = {
|
|||
if (!issue) {
|
||||
return null;
|
||||
}
|
||||
logger.debug(`Found Issue #${issue.number}`);
|
||||
// TODO: types (#7154)
|
||||
logger.debug(`Found Issue #${issue.number!}`);
|
||||
// TODO #7154
|
||||
return getIssue!(issue.number!);
|
||||
},
|
||||
|
@ -656,7 +657,8 @@ const platform: Platform = {
|
|||
// Close any duplicate issues
|
||||
for (const issue of issues) {
|
||||
if (issue.state === 'open' && issue.number !== activeIssue.number) {
|
||||
logger.warn(`Closing duplicate Issue #${issue.number}`);
|
||||
// TODO: types (#7154)
|
||||
logger.warn(`Closing duplicate Issue #${issue.number!}`);
|
||||
// TODO #7154
|
||||
await helper.closeIssue(config.repository, issue.number!);
|
||||
}
|
||||
|
@ -669,13 +671,15 @@ const platform: Platform = {
|
|||
activeIssue.state === 'open'
|
||||
) {
|
||||
logger.debug(
|
||||
`Issue #${activeIssue.number} is open and up to date - nothing to do`
|
||||
// TODO: types (#7154)
|
||||
`Issue #${activeIssue.number!} is open and up to date - nothing to do`
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
// Update issue body and re-open if enabled
|
||||
logger.debug(`Updating Issue #${activeIssue.number}`);
|
||||
// TODO: types (#7154)
|
||||
logger.debug(`Updating Issue #${activeIssue.number!}`);
|
||||
const existingIssue = await helper.updateIssue(
|
||||
config.repository,
|
||||
// TODO #7154
|
||||
|
|
|
@ -223,7 +223,8 @@ function getRepoUrl(
|
|||
const newPathname = pathname.slice(0, pathname.indexOf('/api'));
|
||||
const url = URL.format({
|
||||
protocol: protocol.slice(0, -1) || 'https',
|
||||
auth: `oauth2:${opts.token}`,
|
||||
// TODO: types (#7154)
|
||||
auth: `oauth2:${opts.token!}`,
|
||||
host,
|
||||
pathname: newPathname + '/' + repository + '.git',
|
||||
});
|
||||
|
@ -233,7 +234,8 @@ function getRepoUrl(
|
|||
|
||||
logger.debug({ url: res.body.http_url_to_repo }, `using http URL`);
|
||||
const repoUrl = URL.parse(`${res.body.http_url_to_repo}`);
|
||||
repoUrl.auth = `oauth2:${opts.token}`;
|
||||
// TODO: types (#7154)
|
||||
repoUrl.auth = `oauth2:${opts.token!}`;
|
||||
return URL.format(repoUrl);
|
||||
}
|
||||
|
||||
|
@ -356,7 +358,10 @@ async function getStatus(
|
|||
): Promise<GitlabBranchStatus[]> {
|
||||
const branchSha = git.getBranchCommit(branchName);
|
||||
try {
|
||||
const url = `projects/${config.repository}/repository/commits/${branchSha}/statuses`;
|
||||
// TODO: types (#7154)
|
||||
const url = `projects/${
|
||||
config.repository
|
||||
}/repository/commits/${branchSha!}/statuses`;
|
||||
|
||||
return (
|
||||
await gitlabApi.getJson<GitlabBranchStatus[]>(url, {
|
||||
|
@ -713,7 +718,7 @@ export async function findPr({
|
|||
prTitle,
|
||||
state = PrState.All,
|
||||
}: FindPRConfig): Promise<Pr | null> {
|
||||
logger.debug(`findPr(${branchName}, ${prTitle}, ${state})`);
|
||||
logger.debug(`findPr(${branchName}, ${prTitle!}, ${state})`);
|
||||
const prList = await getPrList();
|
||||
return (
|
||||
prList.find(
|
||||
|
@ -760,7 +765,8 @@ export async function setBranchStatus({
|
|||
// First, get the branch commit SHA
|
||||
const branchSha = git.getBranchCommit(branchName);
|
||||
// Now, check the statuses for that commit
|
||||
const url = `projects/${config.repository}/statuses/${branchSha}`;
|
||||
// TODO: types (#7154)
|
||||
const url = `projects/${config.repository}/statuses/${branchSha!}`;
|
||||
let state = 'success';
|
||||
if (renovateState === BranchStatus.yellow) {
|
||||
state = 'pending';
|
||||
|
@ -1092,14 +1098,15 @@ export async function ensureComment({
|
|||
let body: string;
|
||||
let commentId: number | undefined;
|
||||
let commentNeedsUpdating: boolean | undefined;
|
||||
// TODO: types (#7154)
|
||||
if (topic) {
|
||||
logger.debug(`Ensuring comment "${massagedTopic}" in #${number}`);
|
||||
logger.debug(`Ensuring comment "${massagedTopic!}" in #${number}`);
|
||||
body = `### ${topic}\n\n${sanitizedContent}`;
|
||||
body = body
|
||||
.replace(regEx(/Pull Request/g), 'Merge Request')
|
||||
.replace(regEx(/PR/g), 'MR');
|
||||
comments.forEach((comment: { body: string; id: number }) => {
|
||||
if (comment.body.startsWith(`### ${massagedTopic}\n\n`)) {
|
||||
if (comment.body.startsWith(`### ${massagedTopic!}\n\n`)) {
|
||||
commentId = comment.id;
|
||||
commentNeedsUpdating = comment.body !== body;
|
||||
}
|
||||
|
|
|
@ -185,22 +185,25 @@ function getNewValue({
|
|||
if (isVersion(currentValue)) {
|
||||
newValue = newVersion;
|
||||
} else if (regEx(/^[~^](0\.[1-9][0-9]*)$/).test(currentValue)) {
|
||||
const operator = currentValue.substr(0, 1);
|
||||
const operator = currentValue.substring(0, 1);
|
||||
// handle ~0.4 case first
|
||||
if (toMajor === 0) {
|
||||
newValue = `${operator}0.${toMinor}`;
|
||||
// TODO: types (#7154)
|
||||
newValue = `${operator}0.${toMinor!}`;
|
||||
} else {
|
||||
newValue = `${operator}${toMajor}.0`;
|
||||
// TODO: types (#7154)
|
||||
newValue = `${operator}${toMajor!}.0`;
|
||||
}
|
||||
} else if (regEx(/^[~^]([0-9]*)$/).test(currentValue)) {
|
||||
// handle ~4 case
|
||||
const operator = currentValue.substr(0, 1);
|
||||
newValue = `${operator}${toMajor}`;
|
||||
const operator = currentValue.substring(0, 1);
|
||||
// TODO: types (#7154)
|
||||
newValue = `${operator}${toMajor!}`;
|
||||
} else if (
|
||||
toMajor &&
|
||||
regEx(/^[~^]([0-9]*(?:\.[0-9]*)?)$/).test(currentValue)
|
||||
) {
|
||||
const operator = currentValue.substr(0, 1);
|
||||
const operator = currentValue.substring(0, 1);
|
||||
// handle ~4.1 case
|
||||
if ((currentMajor && toMajor > currentMajor) || !toMinor) {
|
||||
newValue = `${operator}${toMajor}.0`;
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
// TODO: types (#7154)
|
||||
/* eslint-disable @typescript-eslint/restrict-template-expressions */
|
||||
import * as semver from 'semver';
|
||||
import { SemVer, parseRange } from 'semver-utils';
|
||||
import { logger } from '../../../logger';
|
||||
|
|
|
@ -36,6 +36,7 @@ export function findScheduleForCodename(
|
|||
|
||||
export function findScheduleForVersion(version: string): NodeJsSchedule | null {
|
||||
const major = semver.getMajor(version);
|
||||
const schedule = nodeSchedule[`v${major}`];
|
||||
// TODO: types (#7154)
|
||||
const schedule = nodeSchedule[`v${major!}`];
|
||||
return schedule;
|
||||
}
|
||||
|
|
|
@ -93,21 +93,24 @@ export function getNewValue({
|
|||
// TODO fix this
|
||||
const splitCurrent = currentValue.split(element.operator);
|
||||
splitCurrent.pop();
|
||||
return `${splitCurrent.join(element.operator)}${newValue}`;
|
||||
// TODO: types (#7154)
|
||||
return `${splitCurrent.join(element.operator)}${newValue!}`;
|
||||
}
|
||||
if (parsedRange.length > 1) {
|
||||
const previousElement = parsedRange[parsedRange.length - 2];
|
||||
if (previousElement.operator === '-') {
|
||||
const splitCurrent = currentValue.split('-');
|
||||
splitCurrent.pop();
|
||||
return `${splitCurrent.join('-')}- ${newValue}`;
|
||||
// TODO: types (#7154)
|
||||
return `${splitCurrent.join('-')}- ${newValue!}`;
|
||||
}
|
||||
if (element.operator?.startsWith('>')) {
|
||||
logger.warn(`Complex ranges ending in greater than are not supported`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return `${currentValue} || ${newValue}`;
|
||||
// TODO: types (#7154)
|
||||
return `${currentValue} || ${newValue!}`;
|
||||
}
|
||||
const toVersionMajor = major(newVersion);
|
||||
const toVersionMinor = minor(newVersion);
|
||||
|
@ -235,7 +238,8 @@ export function getNewValue({
|
|||
const newMajor = toVersionMajor + 1;
|
||||
res = `<${newMajor}.0.0`;
|
||||
} else if (element.patch) {
|
||||
res = `<${increment(newVersion, 'patch')}`;
|
||||
// TODO: types (#7154)
|
||||
res = `<${increment(newVersion, 'patch')!}`;
|
||||
} else if (element.minor) {
|
||||
res = `<${toVersionMajor}.${toVersionMinor + 1}`;
|
||||
} else {
|
||||
|
|
|
@ -18,7 +18,7 @@ export async function prefetchDockerImage(taggedImage: string): Promise<void> {
|
|||
logger.debug(
|
||||
`Docker image is already prefetched: ${taggedImage}@${prefetchedImages.get(
|
||||
taggedImage
|
||||
)}`
|
||||
)!}`
|
||||
);
|
||||
} else {
|
||||
logger.debug(`Fetching Docker image: ${taggedImage}`);
|
||||
|
|
|
@ -16,7 +16,8 @@ export function getGitAuthenticatedEnvironmentVariables(
|
|||
): NodeJS.ProcessEnv {
|
||||
if (!token) {
|
||||
logger.warn(
|
||||
`Could not create environment variable for ${matchHost} as token was empty`
|
||||
// TODO: types (#7154)
|
||||
`Could not create environment variable for ${matchHost!} as token was empty`
|
||||
);
|
||||
return { ...environmentVariables };
|
||||
}
|
||||
|
|
|
@ -2,6 +2,8 @@ import URL from 'url';
|
|||
import is from '@sindresorhus/is';
|
||||
import delay from 'delay';
|
||||
import fs from 'fs-extra';
|
||||
// TODO: check if bug is fixed (#7154)
|
||||
// eslint-disable-next-line import/no-named-as-default
|
||||
import simpleGit, {
|
||||
Options,
|
||||
ResetMode,
|
||||
|
@ -197,7 +199,8 @@ async function fetchBranchCommits(): Promise<void> {
|
|||
const opts = ['ls-remote', '--heads', config.url];
|
||||
if (config.extraCloneOpts) {
|
||||
Object.entries(config.extraCloneOpts).forEach((e) =>
|
||||
opts.unshift(e[0], `${e[1]}`)
|
||||
// TODO: types (#7154)
|
||||
opts.unshift(e[0], `${e[1]!}`)
|
||||
);
|
||||
}
|
||||
try {
|
||||
|
@ -270,7 +273,7 @@ export function setGitAuthor(gitAuthor: string | undefined): void {
|
|||
const error = new Error(CONFIG_VALIDATION);
|
||||
error.validationSource = 'None';
|
||||
error.validationError = 'Invalid gitAuthor';
|
||||
error.validationMessage = `gitAuthor is not parsed as valid RFC5322 format: ${gitAuthor}`;
|
||||
error.validationMessage = `gitAuthor is not parsed as valid RFC5322 format: ${gitAuthor!}`;
|
||||
throw error;
|
||||
}
|
||||
config.gitAuthorName = gitAuthorParsed.name;
|
||||
|
@ -378,7 +381,8 @@ export async function syncGit(): Promise<void> {
|
|||
}
|
||||
if (config.extraCloneOpts) {
|
||||
Object.entries(config.extraCloneOpts).forEach((e) =>
|
||||
opts.push(e[0], `${e[1]}`)
|
||||
// TODO: types (#7154)
|
||||
opts.push(e[0], `${e[1]!}`)
|
||||
);
|
||||
}
|
||||
const emptyDirAndClone = async (): Promise<void> => {
|
||||
|
@ -475,7 +479,8 @@ export async function getBranchParentSha(
|
|||
}
|
||||
|
||||
try {
|
||||
parentSha = await git.revparse([`${branchSha}^`]);
|
||||
// TODO: branchSha can be null (#7154)
|
||||
parentSha = await git.revparse([`${branchSha!}^`]);
|
||||
return parentSha;
|
||||
} catch (err) {
|
||||
logger.debug({ err }, 'Error getting branch parent sha');
|
||||
|
@ -1082,7 +1087,8 @@ export function getUrl({
|
|||
repository: string;
|
||||
}): string {
|
||||
if (protocol === 'ssh') {
|
||||
return `git@${hostname}:${repository}.git`;
|
||||
// TODO: types (#7154)
|
||||
return `git@${hostname!}:${repository}.git`;
|
||||
}
|
||||
return URL.format({
|
||||
protocol: protocol ?? 'https',
|
||||
|
|
|
@ -48,6 +48,7 @@ export async function configSigningKey(cwd: string): Promise<void> {
|
|||
return;
|
||||
}
|
||||
logger.debug('Configuring commits signing');
|
||||
await exec(`git config user.signingkey ${keyId}`, { cwd });
|
||||
// TODO: types (#7154)
|
||||
await exec(`git config user.signingkey ${keyId!}`, { cwd });
|
||||
await exec(`git config commit.gpgsign true`, { cwd });
|
||||
}
|
||||
|
|
|
@ -47,7 +47,8 @@ export function add(params: HostRule): void {
|
|||
confidentialFields.forEach((field) => {
|
||||
if (rule[field]) {
|
||||
logger.debug(
|
||||
`Adding ${field} authentication for ${rule.matchHost} to hostRules`
|
||||
// TODO: types (#7154)
|
||||
`Adding ${field} authentication for ${rule.matchHost!} to hostRules`
|
||||
);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -18,9 +18,9 @@ export function replaceAt(
|
|||
): string {
|
||||
logger.trace(`Replacing ${oldString} with ${newString} at index ${index}`);
|
||||
return (
|
||||
content.substr(0, index) +
|
||||
content.substring(0, index) +
|
||||
newString +
|
||||
content.substr(index + oldString.length)
|
||||
content.substring(index + oldString.length)
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -53,7 +53,7 @@ export async function getConfig(env: NodeJS.ProcessEnv): Promise<AllConfig> {
|
|||
} catch (err) {
|
||||
// istanbul ignore if
|
||||
if (err instanceof SyntaxError || err instanceof TypeError) {
|
||||
logger.fatal(`Could not parse config file \n ${err.stack}`);
|
||||
logger.fatal(`Could not parse config file \n ${err.stack!}`);
|
||||
process.exit(1);
|
||||
} else if (err instanceof ReferenceError) {
|
||||
logger.fatal(
|
||||
|
|
|
@ -59,7 +59,8 @@ export async function parseConfigs(
|
|||
// istanbul ignore if
|
||||
if (config.logFile) {
|
||||
logger.debug(
|
||||
`Enabling ${config.logFileLevel} logging to ${config.logFile}`
|
||||
// TODO: types (#7154)
|
||||
`Enabling ${config.logFileLevel!} logging to ${config.logFile}`
|
||||
);
|
||||
await ensureDir(getParentDir(config.logFile));
|
||||
addStream({
|
||||
|
|
|
@ -31,7 +31,8 @@ export async function getRepositoryConfig(
|
|||
globalConfig,
|
||||
is.string(repository) ? { repository } : repository
|
||||
);
|
||||
const platform = GlobalConfig.get('platform');
|
||||
// TODO: types (#7154)
|
||||
const platform = GlobalConfig.get('platform')!;
|
||||
repoConfig.localDir = upath.join(
|
||||
repoConfig.baseDir,
|
||||
`./repos/${platform}/${repoConfig.repository}`
|
||||
|
|
|
@ -21,7 +21,7 @@ export function resetAllLimits(): void {
|
|||
export function setMaxLimit(key: Limit, val: unknown): void {
|
||||
const max = typeof val === 'number' ? Math.max(0, val) : null;
|
||||
limits.set(key, { current: 0, max });
|
||||
logger.debug(`${key} limit = ${max}`);
|
||||
logger.debug(`${key} limit = ${max!}`);
|
||||
}
|
||||
|
||||
export function incLimitedValue(key: Limit, incBy = 1): void {
|
||||
|
|
|
@ -51,7 +51,8 @@ describe('workers/repository/config-migration/branch/index', () => {
|
|||
platform.refreshPr = jest.fn().mockResolvedValueOnce(null);
|
||||
mockedFunction(rebaseMigrationBranch).mockResolvedValueOnce('committed');
|
||||
const res = await checkConfigMigrationBranch(config, migratedData);
|
||||
expect(res).toBe(`${config.branchPrefix}migrate-config`);
|
||||
// TODO: types (#7154)
|
||||
expect(res).toBe(`${config.branchPrefix!}migrate-config`);
|
||||
expect(git.checkoutBranch).toHaveBeenCalledTimes(1);
|
||||
expect(git.commitFiles).toHaveBeenCalledTimes(0);
|
||||
expect(logger.debug).toHaveBeenCalledWith(
|
||||
|
@ -66,7 +67,8 @@ describe('workers/repository/config-migration/branch/index', () => {
|
|||
platform.getBranchPr.mockResolvedValueOnce(mock<Pr>());
|
||||
mockedFunction(rebaseMigrationBranch).mockResolvedValueOnce('committed');
|
||||
const res = await checkConfigMigrationBranch(config, migratedData);
|
||||
expect(res).toBe(`${config.branchPrefix}migrate-config`);
|
||||
// TODO: types (#7154)
|
||||
expect(res).toBe(`${config.branchPrefix!}migrate-config`);
|
||||
expect(git.checkoutBranch).toHaveBeenCalledTimes(0);
|
||||
expect(git.commitFiles).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
@ -76,7 +78,8 @@ describe('workers/repository/config-migration/branch/index', () => {
|
|||
'committed'
|
||||
);
|
||||
const res = await checkConfigMigrationBranch(config, migratedData);
|
||||
expect(res).toBe(`${config.branchPrefix}migrate-config`);
|
||||
// TODO: types (#7154)
|
||||
expect(res).toBe(`${config.branchPrefix!}migrate-config`);
|
||||
expect(git.checkoutBranch).toHaveBeenCalledTimes(1);
|
||||
expect(git.commitFiles).toHaveBeenCalledTimes(0);
|
||||
expect(logger.debug).toHaveBeenCalledWith('Need to create migration PR');
|
||||
|
@ -90,7 +93,8 @@ describe('workers/repository/config-migration/branch/index', () => {
|
|||
'committed'
|
||||
);
|
||||
const res = await checkConfigMigrationBranch(config, migratedData);
|
||||
expect(res).toBe(`${config.branchPrefix}migrate-config`);
|
||||
// TODO: types (#7154)
|
||||
expect(res).toBe(`${config.branchPrefix!}migrate-config`);
|
||||
expect(git.checkoutBranch).toHaveBeenCalledTimes(0);
|
||||
expect(git.commitFiles).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
|
|
@ -48,6 +48,8 @@ ${
|
|||
}
|
||||
|
||||
:question: Got questions? Does something look wrong to you? Please don't hesitate to [request help here](${
|
||||
// TODO: types (#7154)
|
||||
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
|
||||
config.productLinks?.help
|
||||
}).\n\n`
|
||||
);
|
||||
|
|
|
@ -60,12 +60,14 @@ function getListItem(branch: BranchConfig, type: string): string {
|
|||
let item = ' - [ ] ';
|
||||
item += `<!-- ${type}-branch=${branch.branchName} -->`;
|
||||
if (branch.prNo) {
|
||||
item += `[${branch.prTitle}](../pull/${branch.prNo})`;
|
||||
// TODO: types (#7154)
|
||||
item += `[${branch.prTitle!}](../pull/${branch.prNo})`;
|
||||
} else {
|
||||
item += branch.prTitle;
|
||||
}
|
||||
const uniquePackages = [
|
||||
...new Set(branch.upgrades.map((upgrade) => `\`${upgrade.depName}\``)),
|
||||
// TODO: types (#7154)
|
||||
...new Set(branch.upgrades.map((upgrade) => `\`${upgrade.depName!}\``)),
|
||||
];
|
||||
if (uniquePackages.length < 2) {
|
||||
return item + '\n';
|
||||
|
|
|
@ -54,9 +54,9 @@ export function getMatchingFiles(
|
|||
const fileList = getFilteredFileList(config, allFiles);
|
||||
const { fileMatch, manager } = config;
|
||||
let matchedFiles: string[] = [];
|
||||
// TODO #7154
|
||||
// TODO: types (#7154)
|
||||
for (const match of fileMatch!) {
|
||||
logger.debug(`Using file match: ${match} for manager ${manager}`);
|
||||
logger.debug(`Using file match: ${match} for manager ${manager!}`);
|
||||
const re = regEx(match);
|
||||
matchedFiles = matchedFiles.concat(
|
||||
fileList.filter((file) => re.test(file))
|
||||
|
|
|
@ -94,12 +94,13 @@ export async function pruneStaleBranches(
|
|||
): Promise<void> {
|
||||
logger.debug('Removing any stale branches');
|
||||
logger.trace({ config }, `pruneStaleBranches`);
|
||||
logger.debug(`config.repoIsOnboarded=${config.repoIsOnboarded}`);
|
||||
// TODO: types (#7154)
|
||||
logger.debug(`config.repoIsOnboarded=${config.repoIsOnboarded!}`);
|
||||
if (!branchList) {
|
||||
logger.debug('No branchList');
|
||||
return;
|
||||
}
|
||||
// TODO #7154
|
||||
// TODO: types (#7154)
|
||||
let renovateBranches = getBranchList().filter((branchName) =>
|
||||
branchName.startsWith(config.branchPrefix!)
|
||||
);
|
||||
|
@ -114,7 +115,8 @@ export async function pruneStaleBranches(
|
|||
},
|
||||
'Branch lists'
|
||||
);
|
||||
const lockFileBranch = `${config.branchPrefix}lock-file-maintenance`;
|
||||
// TODO: types (#7154)
|
||||
const lockFileBranch = `${config.branchPrefix!}lock-file-maintenance`;
|
||||
renovateBranches = renovateBranches.filter(
|
||||
(branch) => branch !== lockFileBranch
|
||||
);
|
||||
|
|
|
@ -19,7 +19,8 @@ function initializeConfig(config: RenovateConfig): RenovateConfig {
|
|||
|
||||
function warnOnUnsupportedOptions(config: RenovateConfig): void {
|
||||
if (config.filterUnavailableUsers && !platform.filterUnavailableUsers) {
|
||||
const platform = GlobalConfig.get('platform');
|
||||
// TODO: types (#7154)
|
||||
const platform = GlobalConfig.get('platform')!;
|
||||
logger.warn(
|
||||
`Configuration option 'filterUnavailableUsers' is not supported on the current platform '${platform}'.`
|
||||
);
|
||||
|
|
|
@ -190,9 +190,10 @@ export async function detectVulnerabilityAlerts(
|
|||
} catch (err) /* istanbul ignore next */ {
|
||||
logger.warn({ err }, 'Error generating vulnerability PR notes');
|
||||
}
|
||||
// TODO: types (#7154)
|
||||
const allowedVersions =
|
||||
datasource === PypiDatasource.id
|
||||
? `==${val.firstPatchedVersion}`
|
||||
? `==${val.firstPatchedVersion!}`
|
||||
: val.firstPatchedVersion;
|
||||
let matchRule: PackageRule = {
|
||||
matchDatasources: [datasource],
|
||||
|
|
|
@ -40,7 +40,8 @@ async function getOnboardingConfig(
|
|||
|
||||
if (!orgPreset) {
|
||||
// Check for org/.{{platform}}
|
||||
const platform = GlobalConfig.get('platform');
|
||||
// TODO: types (#7154)
|
||||
const platform = GlobalConfig.get('platform')!;
|
||||
try {
|
||||
const repo = `${orgName}/.${platform}`;
|
||||
const presetName = 'renovate-config';
|
||||
|
|
|
@ -33,9 +33,9 @@ export function getConfigDesc(
|
|||
config: RenovateConfig,
|
||||
packageFiles?: Record<string, PackageFile[]>
|
||||
): string {
|
||||
// TODO #7154
|
||||
// TODO: type (#7154)
|
||||
const configFile = configFileNames.includes(config.onboardingConfigFileName!)
|
||||
? config.onboardingConfigFileName
|
||||
? config.onboardingConfigFileName!
|
||||
: defaultConfigFile;
|
||||
logger.debug('getConfigDesc()');
|
||||
logger.trace({ config });
|
||||
|
|
|
@ -76,7 +76,8 @@ If you need any further assistance then you can also [request help here](${
|
|||
let files: string[] = [];
|
||||
for (const [manager, managerFiles] of Object.entries(packageFiles)) {
|
||||
files = files.concat(
|
||||
managerFiles.map((file) => ` * \`${file.packageFile}\` (${manager})`)
|
||||
// TODO: types (#7154)
|
||||
managerFiles.map((file) => ` * \`${file.packageFile!}\` (${manager})`)
|
||||
);
|
||||
}
|
||||
prBody =
|
||||
|
@ -89,7 +90,8 @@ If you need any further assistance then you can also [request help here](${
|
|||
}
|
||||
let configDesc = '';
|
||||
if (GlobalConfig.get('dryRun')) {
|
||||
logger.info(`DRY-RUN: Would check branch ${config.onboardingBranch}`);
|
||||
// TODO: types (#7154)
|
||||
logger.info(`DRY-RUN: Would check branch ${config.onboardingBranch!}`);
|
||||
} else if (await isBranchModified(config.onboardingBranch!)) {
|
||||
configDesc = emojify(
|
||||
`### Configuration\n\n:abcd: Renovate has detected a custom config for this PR. Feel free to ask for [help](${
|
||||
|
@ -133,7 +135,8 @@ If you need any further assistance then you can also [request help here](${
|
|||
// Check if existing PR needs updating
|
||||
const prBodyHash = hashBody(prBody);
|
||||
if (existingPr.bodyStruct?.hash === prBodyHash) {
|
||||
logger.debug(`${existingPr.displayNumber} does not need updating`);
|
||||
// TODO: types (#7154)
|
||||
logger.debug(`${existingPr.displayNumber!} does not need updating`);
|
||||
return;
|
||||
}
|
||||
// PR must need updating
|
||||
|
|
|
@ -43,13 +43,15 @@ export function getPrList(
|
|||
text += ' - Upgrade ';
|
||||
}
|
||||
if (upgrade.sourceUrl) {
|
||||
text += `[${upgrade.depName}](${upgrade.sourceUrl})`;
|
||||
// TODO: types (#7154)
|
||||
text += `[${upgrade.depName!}](${upgrade.sourceUrl})`;
|
||||
} else {
|
||||
text += upgrade.depName!.replace(prTitleRe, '@​$1');
|
||||
}
|
||||
// TODO: types (#7154)
|
||||
text += upgrade.isLockfileUpdate
|
||||
? ` to \`${upgrade.newVersion}\``
|
||||
: ` to \`${upgrade.newDigest ?? upgrade.newValue}\``;
|
||||
? ` to \`${upgrade.newVersion!}\``
|
||||
: ` to \`${upgrade.newDigest ?? upgrade.newValue!}\``;
|
||||
text += '\n';
|
||||
}
|
||||
if (!seen.includes(text)) {
|
||||
|
@ -60,7 +62,7 @@ export function getPrList(
|
|||
prDesc += '\n\n';
|
||||
prDesc += '</details>\n\n';
|
||||
}
|
||||
// TODO #7154
|
||||
// TODO: type (#7154)
|
||||
const prHourlyLimit = config.prHourlyLimit!;
|
||||
if (
|
||||
prHourlyLimit > 0 &&
|
||||
|
|
|
@ -52,7 +52,8 @@ export class PackageFiles {
|
|||
for (const manager of managers) {
|
||||
deps += `<details><summary>${manager}</summary>\n<blockquote>\n\n`;
|
||||
for (const packageFile of packageFiles[manager]) {
|
||||
deps += `<details><summary>${packageFile.packageFile}</summary>\n\n`;
|
||||
// TODO: types (#7154)
|
||||
deps += `<details><summary>${packageFile.packageFile!}</summary>\n\n`;
|
||||
for (const dep of packageFile.deps) {
|
||||
const ver = dep.currentValue;
|
||||
const digest = dep.currentDigest;
|
||||
|
@ -60,7 +61,8 @@ export class PackageFiles {
|
|||
ver && digest
|
||||
? `${ver}@${digest}`
|
||||
: `${digest ?? ver ?? placeHolder}`;
|
||||
deps += ` - \`${dep.depName} ${version}\`\n`;
|
||||
// TODO: types (#7154)
|
||||
deps += ` - \`${dep.depName!} ${version}\`\n`;
|
||||
}
|
||||
deps += '\n</details>\n\n';
|
||||
}
|
||||
|
|
|
@ -69,7 +69,8 @@ async function fetchDepUpdates(
|
|||
dep.warnings ??= [];
|
||||
dep.warnings.push({
|
||||
topic: 'Lookup Error',
|
||||
message: `${depName}: ${cause.message}`,
|
||||
// TODO: types (#7154)
|
||||
message: `${depName!}: ${cause.message}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,11 +34,12 @@ async function getBaseBranchConfig(
|
|||
|
||||
// Retrieve config file name autodetected for this repo
|
||||
const cache = getCache();
|
||||
const configFileName = cache.configFileName;
|
||||
// TODO: types (#7154)
|
||||
const configFileName = cache.configFileName!;
|
||||
|
||||
try {
|
||||
baseBranchConfig = await platform.getJsonFile(
|
||||
configFileName!,
|
||||
configFileName,
|
||||
config.repository,
|
||||
baseBranch
|
||||
);
|
||||
|
|
|
@ -50,10 +50,10 @@ export function filterVersions(
|
|||
const versionRelease = releases.find(
|
||||
(release) => release.version === v.version
|
||||
);
|
||||
// TODO #7154
|
||||
// TODO: types (#7154)
|
||||
if (versionRelease!.isDeprecated) {
|
||||
logger.trace(
|
||||
`Skipping ${config.depName}@${v.version} because it is deprecated`
|
||||
`Skipping ${config.depName!}@${v.version} because it is deprecated`
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -146,7 +146,8 @@ export async function lookupUpdates(
|
|||
if (!rollback) {
|
||||
res.warnings.push({
|
||||
topic: depName,
|
||||
message: `Can't find version matching ${currentValue} for ${depName}`,
|
||||
// TODO: types (#7154)
|
||||
message: `Can't find version matching ${currentValue!} for ${depName}`,
|
||||
});
|
||||
return res;
|
||||
}
|
||||
|
|
|
@ -79,7 +79,8 @@ export function processResult(
|
|||
status = 'unknown';
|
||||
}
|
||||
logger.debug(
|
||||
`Repository result: ${res}, status: ${status}, enabled: ${enabled}, onboarded: ${onboarded}`
|
||||
// TODO: types (#7154)
|
||||
`Repository result: ${res}, status: ${status}, enabled: ${enabled!}, onboarded: ${onboarded!}`
|
||||
);
|
||||
return { res, status, enabled: enabled, onboarded };
|
||||
return { res, status, enabled, onboarded };
|
||||
}
|
||||
|
|
|
@ -82,7 +82,8 @@ export async function confirmIfDepUpdated(
|
|||
}
|
||||
|
||||
function getDepsSignature(deps: PackageDependency[]): string {
|
||||
return deps.map((dep) => `${dep.depName}${dep.packageName}`).join(',');
|
||||
// TODO: types (#7154)
|
||||
return deps.map((dep) => `${dep.depName!}${dep.packageName!}`).join(',');
|
||||
}
|
||||
|
||||
export async function checkBranchDepsMatchBaseDeps(
|
||||
|
|
|
@ -40,7 +40,8 @@ export async function tryBranchAutomerge(
|
|||
logger.debug(`Automerging branch`);
|
||||
try {
|
||||
if (GlobalConfig.get('dryRun')) {
|
||||
logger.info(`DRY-RUN: Would automerge branch ${config.branchName}`);
|
||||
// TODO: types (#7154)
|
||||
logger.info(`DRY-RUN: Would automerge branch ${config.branchName!}`);
|
||||
} else {
|
||||
await mergeBranch(config.branchName!);
|
||||
}
|
||||
|
|
|
@ -369,7 +369,8 @@ export async function processBranch(
|
|||
} else {
|
||||
config = { ...config, ...(await shouldReuseExistingBranch(config)) };
|
||||
}
|
||||
logger.debug(`Using reuseExistingBranch: ${config.reuseExistingBranch}`);
|
||||
// TODO: types (#7154)
|
||||
logger.debug(`Using reuseExistingBranch: ${config.reuseExistingBranch!}`);
|
||||
const res = await getUpdatedPackageFiles(config);
|
||||
// istanbul ignore if
|
||||
if (res.artifactErrors && config.artifactErrors) {
|
||||
|
@ -696,9 +697,10 @@ export async function processBranch(
|
|||
content +=
|
||||
' - you rename this PR\'s title to start with "rebase!" to trigger it manually';
|
||||
content += '\n\nThe artifact failure details are included below:\n\n';
|
||||
// TODO: types (#7154)
|
||||
config.artifactErrors.forEach((error) => {
|
||||
content += `##### File name: ${error.lockFile}\n\n`;
|
||||
content += `\`\`\`\n${error.stderr}\n\`\`\`\n\n`;
|
||||
content += `##### File name: ${error.lockFile!}\n\n`;
|
||||
content += `\`\`\`\n${error.stderr!}\n\`\`\`\n\n`;
|
||||
});
|
||||
content = platform.massageMarkdown(content);
|
||||
if (
|
||||
|
|
|
@ -118,7 +118,8 @@ export function isScheduledNow(
|
|||
): boolean {
|
||||
let configSchedule = config[scheduleKey];
|
||||
logger.debug(
|
||||
`Checking schedule(${String(configSchedule)}, ${config.timezone})`
|
||||
// TODO: types (#7154)
|
||||
`Checking schedule(${String(configSchedule)}, ${config.timezone!})`
|
||||
);
|
||||
if (
|
||||
!configSchedule ||
|
||||
|
|
|
@ -96,7 +96,8 @@ export async function checkAutoMerge(
|
|||
};
|
||||
}
|
||||
if (automergeType === 'pr-comment') {
|
||||
logger.debug(`Applying automerge comment: ${automergeComment}`);
|
||||
// TODO: types (#7154)
|
||||
logger.debug(`Applying automerge comment: ${automergeComment!}`);
|
||||
// istanbul ignore if
|
||||
if (GlobalConfig.get('dryRun')) {
|
||||
logger.info(
|
||||
|
@ -124,15 +125,19 @@ export async function checkAutoMerge(
|
|||
// Let's merge this
|
||||
// istanbul ignore if
|
||||
if (GlobalConfig.get('dryRun')) {
|
||||
// TODO: types (#7154)
|
||||
logger.info(
|
||||
`DRY-RUN: Would merge PR #${pr.number} with strategy "${automergeStrategy}"`
|
||||
`DRY-RUN: Would merge PR #${
|
||||
pr.number
|
||||
} with strategy "${automergeStrategy!}"`
|
||||
);
|
||||
return {
|
||||
automerged: false,
|
||||
prAutomergeBlockReason: PrAutomergeBlockReason.DryRun,
|
||||
};
|
||||
}
|
||||
logger.debug(`Automerging #${pr.number} with strategy ${automergeStrategy}`);
|
||||
// TODO: types (#7154)
|
||||
logger.debug(`Automerging #${pr.number} with strategy ${automergeStrategy!}`);
|
||||
const res = await platform.mergePr({
|
||||
branchName,
|
||||
id: pr.number,
|
||||
|
|
|
@ -22,9 +22,10 @@ export function getChangelogs(config: BranchConfig): string {
|
|||
|
||||
for (const upgrade of config.upgrades) {
|
||||
if (upgrade.hasReleaseNotes && upgrade.repoName) {
|
||||
// TODO: types (#7154)
|
||||
upgrade.releaseNotesSummaryTitle = `${upgrade.repoName}${
|
||||
countReleaseNodesByRepoName[upgrade.repoName] > 1
|
||||
? ` (${upgrade.depName})`
|
||||
? ` (${upgrade.depName!})`
|
||||
: ''
|
||||
}`;
|
||||
}
|
||||
|
|
|
@ -43,6 +43,8 @@ export async function getPrConfigDescription(
|
|||
prBody += `, or you tick the rebase/retry checkbox.\n\n`;
|
||||
if (config.recreateClosed) {
|
||||
prBody += emojify(
|
||||
// TODO: types (#7154)
|
||||
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
|
||||
`:ghost: **Immortal**: This PR will be recreated if closed unmerged. Get [config help](${config.productLinks?.help}) if that's undesired.\n\n`
|
||||
);
|
||||
} else {
|
||||
|
|
|
@ -21,7 +21,8 @@ function massageUpdateMetadata(config: BranchConfig): void {
|
|||
changelogUrl,
|
||||
dependencyUrl,
|
||||
} = upgrade;
|
||||
let depNameLinked = upgrade.depName;
|
||||
// TODO: types (#7154)
|
||||
let depNameLinked = upgrade.depName!;
|
||||
const primaryLink = homepage ?? sourceUrl ?? dependencyUrl;
|
||||
if (primaryLink) {
|
||||
depNameLinked = `[${depNameLinked}](${primaryLink})`;
|
||||
|
|
|
@ -56,7 +56,10 @@ export function getCachedReleaseList(
|
|||
project: ChangeLogProject,
|
||||
release: ChangeLogRelease
|
||||
): Promise<ChangeLogNotes[]> {
|
||||
const cacheKey = `getReleaseList-${project.apiBaseUrl}-${project.repository}`;
|
||||
// TODO: types (#7154)
|
||||
const cacheKey = `getReleaseList-${project.apiBaseUrl!}-${
|
||||
project.repository
|
||||
}`;
|
||||
const cachedResult = memCache.get<Promise<ChangeLogNotes[]>>(cacheKey);
|
||||
// istanbul ignore if
|
||||
if (cachedResult !== undefined) {
|
||||
|
@ -105,7 +108,8 @@ export async function getReleaseNotes(
|
|||
): Promise<ChangeLogNotes | null> {
|
||||
const { depName, repository } = project;
|
||||
const { version, gitRef } = release;
|
||||
logger.trace(`getReleaseNotes(${repository}, ${version}, ${depName})`);
|
||||
// TODO: types (#7154)
|
||||
logger.trace(`getReleaseNotes(${repository}, ${version}, ${depName!})`);
|
||||
const releases = await getCachedReleaseList(project, release);
|
||||
logger.trace({ releases }, 'Release list from getReleaseList');
|
||||
let releaseNotes: ChangeLogNotes | null = null;
|
||||
|
@ -160,9 +164,10 @@ async function releaseNotesResult(
|
|||
// there is a ready link
|
||||
releaseNotes.url = releaseMatch.url;
|
||||
} else {
|
||||
// TODO: types (#7154)
|
||||
releaseNotes.url = baseUrl.includes('gitlab')
|
||||
? `${baseUrl}${repository}/tags/${releaseMatch.tag}`
|
||||
: `${baseUrl}${repository}/releases/${releaseMatch.tag}`;
|
||||
? `${baseUrl}${repository}/tags/${releaseMatch.tag!}`
|
||||
: `${baseUrl}${repository}/releases/${releaseMatch.tag!}`;
|
||||
}
|
||||
// set body for release notes
|
||||
releaseNotes.body = massageBody(releaseNotes.body, baseUrl);
|
||||
|
@ -189,7 +194,7 @@ function sectionize(text: string, level: number): string[] {
|
|||
const tokens = markdown.parse(text, undefined);
|
||||
tokens.forEach((token) => {
|
||||
if (token.type === 'heading_open') {
|
||||
const lev = +token.tag.substr(1);
|
||||
const lev = +token.tag.substring(1);
|
||||
if (lev <= level) {
|
||||
sections.push([lev, token.map![0]]);
|
||||
}
|
||||
|
@ -262,9 +267,10 @@ export async function getReleaseNotesMdFileInner(
|
|||
export function getReleaseNotesMdFile(
|
||||
project: ChangeLogProject
|
||||
): Promise<ChangeLogFile | null> {
|
||||
// TODO: types (#7154)
|
||||
const cacheKey = `getReleaseNotesMdFile@v2-${project.repository}${
|
||||
project.sourceDirectory ? `-${project.sourceDirectory}` : ''
|
||||
}-${project.apiBaseUrl}`;
|
||||
}-${project.apiBaseUrl!}`;
|
||||
const cachedResult = memCache.get<Promise<ChangeLogFile | null>>(cacheKey);
|
||||
// istanbul ignore if
|
||||
if (cachedResult !== undefined) {
|
||||
|
|
|
@ -46,7 +46,8 @@ export async function getChangeLogJSON(
|
|||
}
|
||||
const version = allVersioning.get(versioning);
|
||||
const { protocol, host, pathname } = URL.parse(sourceUrl);
|
||||
const baseUrl = `${protocol}//${host}/`;
|
||||
// TODO: types (#7154)
|
||||
const baseUrl = `${protocol!}//${host!}/`;
|
||||
const url = sourceUrl.startsWith('https://github.com/')
|
||||
? 'https://api.github.com/'
|
||||
: sourceUrl;
|
||||
|
|
|
@ -188,16 +188,20 @@ export async function ensurePr(
|
|||
function getRepoNameWithSourceDirectory(
|
||||
upgrade: BranchUpgradeConfig
|
||||
): string {
|
||||
return `${upgrade.repoName}${
|
||||
// TODO: types (#7154)
|
||||
return `${upgrade.repoName!}${
|
||||
upgrade.sourceDirectory ? `:${upgrade.sourceDirectory}` : ''
|
||||
}`;
|
||||
}
|
||||
|
||||
// Get changelog and then generate template strings
|
||||
for (const upgrade of upgrades) {
|
||||
const upgradeKey = `${upgrade.depType}-${upgrade.depName}-${
|
||||
// TODO: types (#7154)
|
||||
const upgradeKey = `${upgrade.depType!}-${upgrade.depName!}-${
|
||||
upgrade.manager
|
||||
}-${upgrade.currentVersion ?? upgrade.currentValue}-${upgrade.newVersion}`;
|
||||
}-${
|
||||
upgrade.currentVersion ?? upgrade.currentValue!
|
||||
}-${upgrade.newVersion!}`;
|
||||
if (processedUpgrades.includes(upgradeKey)) {
|
||||
continue;
|
||||
}
|
||||
|
@ -248,7 +252,8 @@ export async function ensurePr(
|
|||
for (const upgrade of config.upgrades) {
|
||||
let notesSourceUrl = upgrade.releases?.[0]?.releaseNotes?.notesSourceUrl;
|
||||
if (!notesSourceUrl) {
|
||||
notesSourceUrl = `${upgrade.sourceUrl}${
|
||||
// TODO: types (#7154)
|
||||
notesSourceUrl = `${upgrade.sourceUrl!}${
|
||||
upgrade.sourceDirectory ? `:${upgrade.sourceDirectory}` : ''
|
||||
}`;
|
||||
}
|
||||
|
@ -292,7 +297,8 @@ export async function ensurePr(
|
|||
existingPrTitle === newPrTitle &&
|
||||
existingPrBodyHash === newPrBodyHash
|
||||
) {
|
||||
logger.debug(`${existingPr.displayNumber} does not need updating`);
|
||||
// TODO: types (#7154)
|
||||
logger.debug(`${existingPr.displayNumber!} does not need updating`);
|
||||
return { type: 'with-pr', pr: existingPr };
|
||||
}
|
||||
// PR must need updating
|
||||
|
@ -414,7 +420,8 @@ export async function ensurePr(
|
|||
} else {
|
||||
await addParticipants(config, pr);
|
||||
}
|
||||
logger.debug(`Created ${pr.displayNumber}`);
|
||||
// TODO: types (#7154)
|
||||
logger.debug(`Created ${pr.displayNumber!}`);
|
||||
return { type: 'with-pr', pr };
|
||||
}
|
||||
} catch (err) {
|
||||
|
|
|
@ -35,8 +35,9 @@ export function generateBranchName(update: RenovateConfig): void {
|
|||
// Check whether to use a group name
|
||||
if (update.groupName) {
|
||||
logger.debug('Using group branchName template');
|
||||
// TODO: types (#7154)
|
||||
logger.debug(
|
||||
`Dependency ${update.depName} is part of group ${update.groupName}`
|
||||
`Dependency ${update.depName!} is part of group ${update.groupName}`
|
||||
);
|
||||
update.groupSlug = slugify(update.groupSlug ?? update.groupName, {
|
||||
lower: true,
|
||||
|
@ -83,7 +84,8 @@ export function generateBranchName(update: RenovateConfig): void {
|
|||
|
||||
const hash = hasha(hashInput);
|
||||
|
||||
update.branchName = `${update.branchPrefix}${hash.slice(0, hashLength)}`;
|
||||
// TODO: types (#7154)
|
||||
update.branchName = `${update.branchPrefix!}${hash.slice(0, hashLength)}`;
|
||||
} else {
|
||||
update.branchName = template.compile(update.branchName!, update);
|
||||
|
||||
|
|
|
@ -53,7 +53,8 @@ export async function branchifyUpgrades(
|
|||
.filter((upgrade) => {
|
||||
const { manager, packageFile, depName, currentValue, newValue } =
|
||||
upgrade;
|
||||
const upgradeKey = `${packageFile}:${depName}:${currentValue}`;
|
||||
// TODO: types (#7154)
|
||||
const upgradeKey = `${packageFile!}:${depName!}:${currentValue!}`;
|
||||
const previousNewValue = seenUpdates[upgradeKey];
|
||||
if (previousNewValue && previousNewValue !== newValue) {
|
||||
logger.info(
|
||||
|
@ -79,7 +80,8 @@ export async function branchifyUpgrades(
|
|||
branches.push(branch);
|
||||
}
|
||||
removeMeta(['branch']);
|
||||
logger.debug(`config.repoIsOnboarded=${config.repoIsOnboarded}`);
|
||||
// TODO: types (#7154)
|
||||
logger.debug(`config.repoIsOnboarded=${config.repoIsOnboarded!}`);
|
||||
const branchList = config.repoIsOnboarded
|
||||
? branches.map((upgrade) => upgrade.branchName)
|
||||
: config.branchList;
|
||||
|
|
|
@ -13,7 +13,7 @@ import type { BranchUpgradeConfig } from '../../types';
|
|||
import { generateBranchName } from './branch-name';
|
||||
|
||||
const upper = (str: string): string =>
|
||||
str.charAt(0).toUpperCase() + str.substr(1);
|
||||
str.charAt(0).toUpperCase() + str.substring(1);
|
||||
|
||||
function sanitizeDepName(depName: string): string {
|
||||
return depName
|
||||
|
|
|
@ -130,7 +130,7 @@ export function generateBranchConfig(
|
|||
if (pendingVersionsLength) {
|
||||
upgrade.displayPending = `\`${upgrade
|
||||
.pendingVersions!.slice(-1)
|
||||
.pop()}\``;
|
||||
.pop()!}\``;
|
||||
if (pendingVersionsLength > 1) {
|
||||
upgrade.displayPending += ` (+${pendingVersionsLength - 1})`;
|
||||
}
|
||||
|
|
|
@ -56,8 +56,7 @@ async function processFile(file) {
|
|||
});
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
(async () => {
|
||||
await (async () => {
|
||||
const files = await glob(markdownGlob);
|
||||
|
||||
for (const file of files) {
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import semver from 'semver';
|
||||
import shell from 'shelljs';
|
||||
import simpleGit from 'simple-git';
|
||||
import { simpleGit } from 'simple-git';
|
||||
|
||||
const GIT_MINIMUM_VERSION = '2.33.0';
|
||||
const git = simpleGit();
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
(async () => {
|
||||
|
||||
await (async () => {
|
||||
try {
|
||||
const regex = /\d+\.\d+\.\d+/;
|
||||
const stdout = await git.raw('--version');
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import shell from 'shelljs';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
(async () => {
|
||||
await (async () => {
|
||||
shell.echo('-n', 'Checking re2 ... ');
|
||||
try {
|
||||
const { default: RE2 } = await import('re2');
|
||||
|
|
|
@ -2,8 +2,7 @@ import { tmpdir } from 'os';
|
|||
import { remove } from 'fs-extra';
|
||||
import upath from 'upath';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
(async () => {
|
||||
await (async () => {
|
||||
const tmpDir = process.env.RENOVATE_TMPDIR ?? tmpdir();
|
||||
const renovateDir = upath.join(tmpDir, 'renovate');
|
||||
// eslint-disable-next-line no-console
|
||||
|
|
|
@ -74,7 +74,7 @@ async function update(url, file) {
|
|||
await updateJsonFile(file, json);
|
||||
}
|
||||
|
||||
(async () => {
|
||||
await (async () => {
|
||||
await update(ubuntuUrl, `./data/ubuntu-distro-info.json`);
|
||||
await update(debianUrl, `./data/debian-distro-info.json`);
|
||||
})().catch(() => 'obligatory catch');
|
||||
})();
|
||||
|
|
|
@ -173,8 +173,7 @@ async function generateHash() {
|
|||
}
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
(async () => {
|
||||
await (async () => {
|
||||
try {
|
||||
// data-files
|
||||
await generateData();
|
||||
|
|
Loading…
Reference in a new issue