ci: apply unsafe fixes with biome

This commit is contained in:
Wolfgang Walther
2025-08-20 15:01:39 +02:00
parent 1fa55d3900
commit 23b82b3228
3 changed files with 26 additions and 29 deletions

View File

@@ -1,7 +1,5 @@
module.exports = async ({ github, context, core, dry }) => {
const { execFileSync } = require('node:child_process')
const { readFile } = require('node:fs/promises')
const { join } = require('node:path')
const { classify } = require('../supportedBranches.js')
const withRateLimit = require('./withRateLimit.js')
@@ -18,7 +16,7 @@ module.exports = async ({ github, context, core, dry }) => {
run_id: context.runId,
per_page: 100,
})
).find(({ name }) => name == 'Check / cherry-pick').html_url +
).find(({ name }) => name === 'Check / cherry-pick').html_url +
'?pr=' +
pull_number
@@ -178,7 +176,7 @@ module.exports = async ({ github, context, core, dry }) => {
// Only create step summary below in case of warnings or errors.
// Also clean up older reviews, when all checks are good now.
if (results.every(({ severity }) => severity == 'info')) {
if (results.every(({ severity }) => severity === 'info')) {
if (!dry) {
await Promise.all(
(
@@ -187,9 +185,9 @@ module.exports = async ({ github, context, core, dry }) => {
pull_number,
})
)
.filter((review) => review.user.login == 'github-actions[bot]')
.filter((review) => review.user.login === 'github-actions[bot]')
.map(async (review) => {
if (review.state == 'CHANGES_REQUESTED') {
if (review.state === 'CHANGES_REQUESTED') {
await github.rest.pulls.dismissReview({
...context.repo,
pull_number,
@@ -215,7 +213,7 @@ module.exports = async ({ github, context, core, dry }) => {
// In the case of "error" severity, we also fail the job.
// Those should be considered blocking and not be dismissable via review.
if (results.some(({ severity }) => severity == 'error'))
if (results.some(({ severity }) => severity === 'error'))
process.exitCode = 1
core.summary.addRaw(
@@ -272,7 +270,7 @@ module.exports = async ({ github, context, core, dry }) => {
)
results.forEach(({ severity, message, diff }) => {
if (severity == 'info') return
if (severity === 'info') return
// The docs for markdown alerts only show examples with markdown blockquote syntax, like this:
// > [!WARNING]
@@ -336,18 +334,18 @@ module.exports = async ({ github, context, core, dry }) => {
})
).find(
(review) =>
review.user.login == 'github-actions[bot]' &&
review.user.login === 'github-actions[bot]' &&
// If a review is still pending, we can just update this instead
// of posting a new one.
(review.state == 'CHANGES_REQUESTED' ||
(review.state === 'CHANGES_REQUESTED' ||
// No need to post a new review, if an older one with the exact
// same content had already been dismissed.
review.body == body),
review.body === body),
)
if (dry) {
if (pendingReview)
core.info('pending review found: ' + pendingReview.html_url)
core.info(`pending review found: ${pendingReview.html_url}`)
else core.info('no pending review found')
} else {
// Either of those two requests could fail for very long comments. This can only happen

View File

@@ -27,7 +27,7 @@ module.exports = async ({ github, context, core, dry }) => {
const approvals = new Set(
reviews
.filter((review) => review.state == 'APPROVED')
.filter((review) => review.state === 'APPROVED')
.map((review) => review.user?.id),
)
@@ -37,7 +37,7 @@ module.exports = async ({ github, context, core, dry }) => {
// This is intentionally less than the time that Eval takes, so that the label job
// running after Eval can indeed label the PR as conflicted if that is the case.
const merge_commit_sha_valid =
new Date() - new Date(pull_request.created_at) > 3 * 60 * 1000
Date.now() - new Date(pull_request.created_at) > 3 * 60 * 1000
const prLabels = {
// We intentionally don't use the mergeable or mergeable_state attributes.
@@ -53,8 +53,8 @@ module.exports = async ({ github, context, core, dry }) => {
// The second pass will then read the result from the first pass and set the label.
'2.status: merge conflict':
merge_commit_sha_valid && !pull_request.merge_commit_sha,
'12.approvals: 1': approvals.size == 1,
'12.approvals: 2': approvals.size == 2,
'12.approvals: 1': approvals.size === 1,
'12.approvals: 2': approvals.size === 2,
'12.approvals: 3+': approvals.size >= 3,
'12.first-time contribution': [
'NONE',
@@ -104,8 +104,8 @@ module.exports = async ({ github, context, core, dry }) => {
// existing reviews, too.
'9.needs: reviewer':
!pull_request.draft &&
pull_request.requested_reviewers.length == 0 &&
reviews.length == 0,
pull_request.requested_reviewers.length === 0 &&
reviews.length === 0,
})
}
@@ -125,8 +125,7 @@ module.exports = async ({ github, context, core, dry }) => {
// called "comparison", yet, will skip the download.
const expired =
!artifact ||
new Date(artifact?.expires_at ?? 0) <
new Date(new Date().getTime() + 60 * 1000)
new Date(artifact?.expires_at ?? 0) < new Date(Date.now() + 60 * 1000)
log('Artifact expires at', artifact?.expires_at ?? '<n/a>')
if (!expired) {
stats.artifacts++
@@ -175,7 +174,7 @@ module.exports = async ({ github, context, core, dry }) => {
async function handle({ item, stats }) {
try {
const log = (k, v, skip) => {
core.info(`#${item.number} - ${k}: ${v}` + (skip ? ' (skipped)' : ''))
core.info(`#${item.number} - ${k}: ${v}${skip ? ' (skipped)' : ''}`)
return skip
}
@@ -257,7 +256,7 @@ module.exports = async ({ github, context, core, dry }) => {
// No need for an API request, if all labels are the same.
const hasChanges = Object.keys(after).some(
(name) => (before[name] ?? false) != after[name],
(name) => (before[name] ?? false) !== after[name],
)
if (log('Has changes', hasChanges, !hasChanges)) return
@@ -298,14 +297,14 @@ module.exports = async ({ github, context, core, dry }) => {
// we are not leaving anyone behind on GHA failures.
// Defaults to go back 1 hour on the first run.
new Date(
lastRun?.created_at ?? new Date().getTime() - 1 * 60 * 60 * 1000,
lastRun?.created_at ?? Date.now() - 1 * 60 * 60 * 1000,
).getTime(),
// Go back max. 1 day to prevent hitting all API rate limits immediately,
// when GH API returns a wrong workflow by accident.
new Date().getTime() - 24 * 60 * 60 * 1000,
Date.now() - 24 * 60 * 60 * 1000,
),
)
core.info('cutoff timestamp: ' + cutoff.toISOString())
core.info(`cutoff timestamp: ${cutoff.toISOString()}`)
const updatedItems = await github.paginate(
github.rest.search.issuesAndPullRequests,
@@ -402,12 +401,12 @@ module.exports = async ({ github, context, core, dry }) => {
.concat(updatedItems, allItems.data)
.filter(
(thisItem, idx, arr) =>
idx ==
arr.findIndex((firstItem) => firstItem.number == thisItem.number),
idx ===
arr.findIndex((firstItem) => firstItem.number === thisItem.number),
)
;(await Promise.allSettled(items.map((item) => handle({ item, stats }))))
.filter(({ status }) => status == 'rejected')
.filter(({ status }) => status === 'rejected')
.map(({ reason }) =>
core.setFailed(`${reason.message}\n${reason.cause.stack}`),
)

View File

@@ -23,7 +23,7 @@ module.exports = async ({ github, core }, callback) => {
// Requests to a different host do not count against the rate limit.
if (options.url.startsWith('https://github.com')) return request(options)
// Requests to the /rate_limit endpoint do not count against the rate limit.
if (options.url == '/rate_limit') return request(options)
if (options.url === '/rate_limit') return request(options)
// Search requests are in a different resource group, which allows 30 requests / minute.
// We do less than a handful each run, so not implementing throttling for now.
if (options.url.startsWith('/search/')) return request(options)