ci/github-script: move from ci/labels
This just moves things around to use less specific naming - `labels` is only *one* script that can potentially be run locally while still being written in github-script. Later, we can add more.
This commit is contained in:
4
ci/github-script/.editorconfig
Normal file
4
ci/github-script/.editorconfig
Normal file
@@ -0,0 +1,4 @@
|
||||
# TODO: Move to <top-level>/.editorconfig, once ci/.editorconfig has made its way through staging.
|
||||
[*.cjs]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
1
ci/github-script/.gitignore
vendored
Normal file
1
ci/github-script/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
node_modules
|
||||
1
ci/github-script/.npmrc
Normal file
1
ci/github-script/.npmrc
Normal file
@@ -0,0 +1 @@
|
||||
package-lock-only = true
|
||||
13
ci/github-script/README.md
Normal file
13
ci/github-script/README.md
Normal file
@@ -0,0 +1,13 @@
|
||||
# GitHub specific CI scripts
|
||||
|
||||
This folder contains [`actions/github-script`](https://github.com/actions/github-script)-based JavaScript code.
|
||||
It provides a `nix-shell` environment to run and test these actions locally.
|
||||
|
||||
To run any of the scripts locally:
|
||||
|
||||
- Provide `gh` on `PATH` and make sure it's authenticated.
|
||||
- Enter `nix-shell` in `./ci/github-script`.
|
||||
|
||||
## Labeler
|
||||
|
||||
Run `./run.js OWNER REPO`, where OWNER is your username or "NixOS" and REPO the name of your fork or "nixpkgs".
|
||||
463
ci/github-script/labels.cjs
Normal file
463
ci/github-script/labels.cjs
Normal file
@@ -0,0 +1,463 @@
|
||||
module.exports = async function ({ github, context, core, dry }) {
|
||||
const Bottleneck = require('bottleneck')
|
||||
const path = require('node:path')
|
||||
const { DefaultArtifactClient } = require('@actions/artifact')
|
||||
const { readFile, writeFile } = require('node:fs/promises')
|
||||
|
||||
const artifactClient = new DefaultArtifactClient()
|
||||
|
||||
const stats = {
|
||||
issues: 0,
|
||||
prs: 0,
|
||||
requests: 0,
|
||||
artifacts: 0,
|
||||
}
|
||||
|
||||
// Rate-Limiting and Throttling, see for details:
|
||||
// https://github.com/octokit/octokit.js/issues/1069#throttling
|
||||
// https://docs.github.com/en/rest/using-the-rest-api/best-practices-for-using-the-rest-api
|
||||
const allLimits = new Bottleneck({
|
||||
// Avoid concurrent requests
|
||||
maxConcurrent: 1,
|
||||
// Will be updated with first `updateReservoir()` call below.
|
||||
reservoir: 0,
|
||||
})
|
||||
// Pause between mutative requests
|
||||
const writeLimits = new Bottleneck({ minTime: 1000 }).chain(allLimits)
|
||||
github.hook.wrap('request', async (request, options) => {
|
||||
// Requests to the /rate_limit endpoint do not count against the rate limit.
|
||||
if (options.url == '/rate_limit') return request(options)
|
||||
// Search requests are in a different resource group, which allows 30 requests / minute.
|
||||
// We do less than a handful each run, so not implementing throttling for now.
|
||||
if (options.url.startsWith('/search/')) return request(options)
|
||||
stats.requests++
|
||||
if (['POST', 'PUT', 'PATCH', 'DELETE'].includes(options.method))
|
||||
return writeLimits.schedule(request.bind(null, options))
|
||||
else return allLimits.schedule(request.bind(null, options))
|
||||
})
|
||||
|
||||
async function updateReservoir() {
|
||||
let response
|
||||
try {
|
||||
response = await github.rest.rateLimit.get()
|
||||
} catch (err) {
|
||||
core.error(`Failed updating reservoir:\n${err}`)
|
||||
// Keep retrying on failed rate limit requests instead of exiting the script early.
|
||||
return
|
||||
}
|
||||
// Always keep 1000 spare requests for other jobs to do their regular duty.
|
||||
// They normally use below 100, so 1000 is *plenty* of room to work with.
|
||||
const reservoir = Math.max(0, response.data.resources.core.remaining - 1000)
|
||||
core.info(`Updating reservoir to: ${reservoir}`)
|
||||
allLimits.updateSettings({ reservoir })
|
||||
}
|
||||
await updateReservoir()
|
||||
// Update remaining requests every minute to account for other jobs running in parallel.
|
||||
const reservoirUpdater = setInterval(updateReservoir, 60 * 1000)
|
||||
|
||||
async function handlePullRequest(item) {
|
||||
const log = (k, v) => core.info(`PR #${item.number} - ${k}: ${v}`)
|
||||
|
||||
const pull_number = item.number
|
||||
|
||||
// This API request is important for the merge-conflict label, because it triggers the
|
||||
// creation of a new test merge commit. This is needed to actually determine the state of a PR.
|
||||
const pull_request = (
|
||||
await github.rest.pulls.get({
|
||||
...context.repo,
|
||||
pull_number,
|
||||
})
|
||||
).data
|
||||
|
||||
const reviews = await github.paginate(github.rest.pulls.listReviews, {
|
||||
...context.repo,
|
||||
pull_number,
|
||||
})
|
||||
|
||||
const approvals = new Set(
|
||||
reviews
|
||||
.filter((review) => review.state == 'APPROVED')
|
||||
.map((review) => review.user?.id),
|
||||
)
|
||||
|
||||
// After creation of a Pull Request, `merge_commit_sha` will be null initially:
|
||||
// The very first merge commit will only be calculated after a little while.
|
||||
// To avoid labeling the PR as conflicted before that, we wait a few minutes.
|
||||
// This is intentionally less than the time that Eval takes, so that the label job
|
||||
// running after Eval can indeed label the PR as conflicted if that is the case.
|
||||
const merge_commit_sha_valid =
|
||||
new Date() - new Date(pull_request.created_at) > 3 * 60 * 1000
|
||||
|
||||
const prLabels = {
|
||||
// We intentionally don't use the mergeable or mergeable_state attributes.
|
||||
// Those have an intermediate state while the test merge commit is created.
|
||||
// This doesn't work well for us, because we might have just triggered another
|
||||
// test merge commit creation by request the pull request via API at the start
|
||||
// of this function.
|
||||
// The attribute merge_commit_sha keeps the old value of null or the hash *until*
|
||||
// the new test merge commit has either successfully been created or failed so.
|
||||
// This essentially means we are updating the merge conflict label in two steps:
|
||||
// On the first pass of the day, we just fetch the pull request, which triggers
|
||||
// the creation. At this stage, the label is likely not updated, yet.
|
||||
// The second pass will then read the result from the first pass and set the label.
|
||||
'2.status: merge conflict':
|
||||
merge_commit_sha_valid && !pull_request.merge_commit_sha,
|
||||
'12.approvals: 1': approvals.size == 1,
|
||||
'12.approvals: 2': approvals.size == 2,
|
||||
'12.approvals: 3+': approvals.size >= 3,
|
||||
'12.first-time contribution': [
|
||||
'NONE',
|
||||
'FIRST_TIMER',
|
||||
'FIRST_TIME_CONTRIBUTOR',
|
||||
].includes(pull_request.author_association),
|
||||
}
|
||||
|
||||
const { id: run_id, conclusion } =
|
||||
(
|
||||
await github.rest.actions.listWorkflowRuns({
|
||||
...context.repo,
|
||||
workflow_id: 'pr.yml',
|
||||
event: 'pull_request_target',
|
||||
exclude_pull_requests: true,
|
||||
head_sha: pull_request.head.sha,
|
||||
})
|
||||
).data.workflow_runs[0] ??
|
||||
// TODO: Remove this after 2025-09-17, at which point all eval.yml artifacts will have expired.
|
||||
(
|
||||
await github.rest.actions.listWorkflowRuns({
|
||||
...context.repo,
|
||||
// In older PRs, we need eval.yml instead of pr.yml.
|
||||
workflow_id: 'eval.yml',
|
||||
event: 'pull_request_target',
|
||||
status: 'success',
|
||||
exclude_pull_requests: true,
|
||||
head_sha: pull_request.head.sha,
|
||||
})
|
||||
).data.workflow_runs[0] ??
|
||||
{}
|
||||
|
||||
// Newer PRs might not have run Eval to completion, yet.
|
||||
// Older PRs might not have an eval.yml workflow, yet.
|
||||
// In either case we continue without fetching an artifact on a best-effort basis.
|
||||
log('Last eval run', run_id ?? '<n/a>')
|
||||
|
||||
if (conclusion === 'success') {
|
||||
Object.assign(prLabels, {
|
||||
// We only set this label if the latest eval run was successful, because if it was not, it
|
||||
// *could* have requested reviewers. We will let the PR author fix CI first, before "escalating"
|
||||
// this PR to "needs: reviewer".
|
||||
// Since the first Eval run on a PR always sets rebuild labels, the same PR will be "recently
|
||||
// updated" for the next scheduled run. Thus, this label will still be set within a few minutes
|
||||
// after a PR is created, if required.
|
||||
// Note that a "requested reviewer" disappears once they have given a review, so we check
|
||||
// existing reviews, too.
|
||||
'9.needs: reviewer':
|
||||
!pull_request.draft &&
|
||||
pull_request.requested_reviewers.length == 0 &&
|
||||
reviews.length == 0,
|
||||
})
|
||||
}
|
||||
|
||||
const artifact =
|
||||
run_id &&
|
||||
(
|
||||
await github.rest.actions.listWorkflowRunArtifacts({
|
||||
...context.repo,
|
||||
run_id,
|
||||
name: 'comparison',
|
||||
})
|
||||
).data.artifacts[0]
|
||||
|
||||
// Instead of checking the boolean artifact.expired, we will give us a minute to
|
||||
// actually download the artifact in the next step and avoid that race condition.
|
||||
// Older PRs, where the workflow run was already eval.yml, but the artifact was not
|
||||
// called "comparison", yet, will skip the download.
|
||||
const expired =
|
||||
!artifact ||
|
||||
new Date(artifact?.expires_at ?? 0) <
|
||||
new Date(new Date().getTime() + 60 * 1000)
|
||||
log('Artifact expires at', artifact?.expires_at ?? '<n/a>')
|
||||
if (!expired) {
|
||||
stats.artifacts++
|
||||
|
||||
await artifactClient.downloadArtifact(artifact.id, {
|
||||
findBy: {
|
||||
repositoryName: context.repo.repo,
|
||||
repositoryOwner: context.repo.owner,
|
||||
token: core.getInput('github-token'),
|
||||
},
|
||||
path: path.resolve(pull_number.toString()),
|
||||
expectedHash: artifact.digest,
|
||||
})
|
||||
|
||||
const maintainers = new Set(
|
||||
Object.keys(
|
||||
JSON.parse(
|
||||
await readFile(`${pull_number}/maintainers.json`, 'utf-8'),
|
||||
),
|
||||
).map((m) => Number.parseInt(m, 10)),
|
||||
)
|
||||
|
||||
const evalLabels = JSON.parse(
|
||||
await readFile(`${pull_number}/changed-paths.json`, 'utf-8'),
|
||||
).labels
|
||||
|
||||
Object.assign(
|
||||
prLabels,
|
||||
// Ignore `evalLabels` if it's an array.
|
||||
// This can happen for older eval runs, before we switched to objects.
|
||||
// The old eval labels would have been set by the eval run,
|
||||
// so now they'll be present in `before`.
|
||||
// TODO: Simplify once old eval results have expired (~2025-10)
|
||||
Array.isArray(evalLabels) ? undefined : evalLabels,
|
||||
{
|
||||
'12.approved-by: package-maintainer': Array.from(maintainers).some(
|
||||
(m) => approvals.has(m),
|
||||
),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
return prLabels
|
||||
}
|
||||
|
||||
async function handle(item) {
|
||||
try {
|
||||
const log = (k, v, skip) => {
|
||||
core.info(`#${item.number} - ${k}: ${v}` + (skip ? ' (skipped)' : ''))
|
||||
return skip
|
||||
}
|
||||
|
||||
log('Last updated at', item.updated_at)
|
||||
log('URL', item.html_url)
|
||||
|
||||
const issue_number = item.number
|
||||
|
||||
const itemLabels = {}
|
||||
|
||||
if (item.pull_request || context.payload.pull_request) {
|
||||
stats.prs++
|
||||
Object.assign(itemLabels, await handlePullRequest(item))
|
||||
} else {
|
||||
stats.issues++
|
||||
}
|
||||
|
||||
const latest_event_at = new Date(
|
||||
(
|
||||
await github.paginate(github.rest.issues.listEventsForTimeline, {
|
||||
...context.repo,
|
||||
issue_number,
|
||||
per_page: 100,
|
||||
})
|
||||
)
|
||||
.filter(({ event }) =>
|
||||
[
|
||||
// These events are hand-picked from:
|
||||
// https://docs.github.com/en/rest/using-the-rest-api/issue-event-types?apiVersion=2022-11-28
|
||||
// Each of those causes a PR/issue to *not* be considered as stale anymore.
|
||||
// Most of these use created_at.
|
||||
'assigned',
|
||||
'commented', // uses updated_at, because that could be > created_at
|
||||
'committed', // uses committer.date
|
||||
'head_ref_force_pushed',
|
||||
'milestoned',
|
||||
'pinned',
|
||||
'ready_for_review',
|
||||
'renamed',
|
||||
'reopened',
|
||||
'review_dismissed',
|
||||
'review_requested',
|
||||
'reviewed', // uses submitted_at
|
||||
'unlocked',
|
||||
'unmarked_as_duplicate',
|
||||
].includes(event),
|
||||
)
|
||||
.map(
|
||||
({ created_at, updated_at, committer, submitted_at }) =>
|
||||
new Date(
|
||||
updated_at ?? created_at ?? submitted_at ?? committer.date,
|
||||
),
|
||||
)
|
||||
// Reverse sort by date value. The default sort() sorts by string representation, which is bad for dates.
|
||||
.sort((a, b) => b - a)
|
||||
.at(0) ?? item.created_at,
|
||||
)
|
||||
log('latest_event_at', latest_event_at.toISOString())
|
||||
|
||||
const stale_at = new Date(new Date().setDate(new Date().getDate() - 180))
|
||||
|
||||
// Create a map (Label -> Boolean) of all currently set labels.
|
||||
// Each label is set to True and can be disabled later.
|
||||
const before = Object.fromEntries(
|
||||
(
|
||||
await github.paginate(github.rest.issues.listLabelsOnIssue, {
|
||||
...context.repo,
|
||||
issue_number,
|
||||
})
|
||||
).map(({ name }) => [name, true]),
|
||||
)
|
||||
|
||||
Object.assign(itemLabels, {
|
||||
'2.status: stale':
|
||||
!before['1.severity: security'] && latest_event_at < stale_at,
|
||||
})
|
||||
|
||||
const after = Object.assign({}, before, itemLabels)
|
||||
|
||||
// No need for an API request, if all labels are the same.
|
||||
const hasChanges = Object.keys(after).some(
|
||||
(name) => (before[name] ?? false) != after[name],
|
||||
)
|
||||
if (log('Has changes', hasChanges, !hasChanges)) return
|
||||
|
||||
// Skipping labeling on a pull_request event, because we have no privileges.
|
||||
const labels = Object.entries(after)
|
||||
.filter(([, value]) => value)
|
||||
.map(([name]) => name)
|
||||
if (log('Set labels', labels, dry)) return
|
||||
|
||||
await github.rest.issues.setLabels({
|
||||
...context.repo,
|
||||
issue_number,
|
||||
labels,
|
||||
})
|
||||
} catch (cause) {
|
||||
throw new Error(`Labeling #${item.number} failed.`, { cause })
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
if (context.payload.pull_request) {
|
||||
await handle(context.payload.pull_request)
|
||||
} else {
|
||||
const lastRun = (
|
||||
await github.rest.actions.listWorkflowRuns({
|
||||
...context.repo,
|
||||
workflow_id: 'labels.yml',
|
||||
event: 'schedule',
|
||||
status: 'success',
|
||||
exclude_pull_requests: true,
|
||||
per_page: 1,
|
||||
})
|
||||
).data.workflow_runs[0]
|
||||
|
||||
// Go back as far as the last successful run of this workflow to make sure
|
||||
// we are not leaving anyone behind on GHA failures.
|
||||
// Defaults to go back 1 hour on the first run.
|
||||
const cutoff = new Date(
|
||||
lastRun?.created_at ?? new Date().getTime() - 1 * 60 * 60 * 1000,
|
||||
)
|
||||
core.info('cutoff timestamp: ' + cutoff.toISOString())
|
||||
|
||||
const updatedItems = await github.paginate(
|
||||
github.rest.search.issuesAndPullRequests,
|
||||
{
|
||||
q: [
|
||||
`repo:"${context.repo.owner}/${context.repo.repo}"`,
|
||||
'is:open',
|
||||
`updated:>=${cutoff.toISOString()}`,
|
||||
].join(' AND '),
|
||||
// TODO: Remove in 2025-10, when it becomes the default.
|
||||
advanced_search: true,
|
||||
},
|
||||
)
|
||||
|
||||
let cursor
|
||||
|
||||
// No workflow run available the first time.
|
||||
if (lastRun) {
|
||||
// The cursor to iterate through the full list of issues and pull requests
|
||||
// is passed between jobs as an artifact.
|
||||
const artifact = (
|
||||
await github.rest.actions.listWorkflowRunArtifacts({
|
||||
...context.repo,
|
||||
run_id: lastRun.id,
|
||||
name: 'pagination-cursor',
|
||||
})
|
||||
).data.artifacts[0]
|
||||
|
||||
// If the artifact is not available, the next iteration starts at the beginning.
|
||||
if (artifact) {
|
||||
stats.artifacts++
|
||||
|
||||
const { downloadPath } = await artifactClient.downloadArtifact(
|
||||
artifact.id,
|
||||
{
|
||||
findBy: {
|
||||
repositoryName: context.repo.repo,
|
||||
repositoryOwner: context.repo.owner,
|
||||
token: core.getInput('github-token'),
|
||||
},
|
||||
expectedHash: artifact.digest,
|
||||
},
|
||||
)
|
||||
|
||||
cursor = await readFile(path.resolve(downloadPath, 'cursor'), 'utf-8')
|
||||
}
|
||||
}
|
||||
|
||||
// From GitHub's API docs:
|
||||
// GitHub's REST API considers every pull request an issue, but not every issue is a pull request.
|
||||
// For this reason, "Issues" endpoints may return both issues and pull requests in the response.
|
||||
// You can identify pull requests by the pull_request key.
|
||||
const allItems = await github.rest.issues.listForRepo({
|
||||
...context.repo,
|
||||
state: 'open',
|
||||
sort: 'created',
|
||||
direction: 'asc',
|
||||
per_page: 100,
|
||||
after: cursor,
|
||||
})
|
||||
|
||||
// Regex taken and comment adjusted from:
|
||||
// https://github.com/octokit/plugin-paginate-rest.js/blob/8e5da25f975d2f31dda6b8b588d71f2c768a8df2/src/iterator.ts#L36-L41
|
||||
// `allItems.headers.link` format:
|
||||
// <https://api.github.com/repositories/4542716/issues?page=3&per_page=100&after=Y3Vyc29yOnYyOpLPAAABl8qNnYDOvnSJxA%3D%3D>; rel="next",
|
||||
// <https://api.github.com/repositories/4542716/issues?page=1&per_page=100&before=Y3Vyc29yOnYyOpLPAAABl8xFV9DOvoouJg%3D%3D>; rel="prev"
|
||||
// Sets `next` to undefined if "next" URL is not present or `link` header is not set.
|
||||
const next = ((allItems.headers.link ?? '').match(
|
||||
/<([^<>]+)>;\s*rel="next"/,
|
||||
) ?? [])[1]
|
||||
if (next) {
|
||||
cursor = new URL(next).searchParams.get('after')
|
||||
const uploadPath = path.resolve('cursor')
|
||||
await writeFile(uploadPath, cursor, 'utf-8')
|
||||
if (dry) {
|
||||
core.info(`pagination-cursor: ${cursor} (upload skipped)`)
|
||||
} else {
|
||||
// No stats.artifacts++, because this does not allow passing a custom token.
|
||||
// Thus, the upload will not happen with the app token, but the default github.token.
|
||||
await artifactClient.uploadArtifact(
|
||||
'pagination-cursor',
|
||||
[uploadPath],
|
||||
path.resolve('.'),
|
||||
{
|
||||
retentionDays: 1,
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Some items might be in both search results, so filtering out duplicates as well.
|
||||
const items = []
|
||||
.concat(updatedItems, allItems.data)
|
||||
.filter(
|
||||
(thisItem, idx, arr) =>
|
||||
idx ==
|
||||
arr.findIndex((firstItem) => firstItem.number == thisItem.number),
|
||||
)
|
||||
|
||||
;(await Promise.allSettled(items.map(handle)))
|
||||
.filter(({ status }) => status == 'rejected')
|
||||
.map(({ reason }) =>
|
||||
core.setFailed(`${reason.message}\n${reason.cause.stack}`),
|
||||
)
|
||||
|
||||
core.notice(
|
||||
`Processed ${stats.prs} PRs, ${stats.issues} Issues, made ${stats.requests + stats.artifacts} API requests and downloaded ${stats.artifacts} artifacts.`,
|
||||
)
|
||||
}
|
||||
} finally {
|
||||
clearInterval(reservoirUpdater)
|
||||
}
|
||||
}
|
||||
1897
ci/github-script/package-lock.json
generated
Normal file
1897
ci/github-script/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
9
ci/github-script/package.json
Normal file
9
ci/github-script/package.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"dependencies": {
|
||||
"@actions/artifact": "2.3.2",
|
||||
"@actions/github": "6.0.1",
|
||||
"bottleneck": "2.19.5"
|
||||
}
|
||||
}
|
||||
45
ci/github-script/run.js
Executable file
45
ci/github-script/run.js
Executable file
@@ -0,0 +1,45 @@
|
||||
#!/usr/bin/env node
|
||||
import { execSync } from 'node:child_process'
|
||||
import { mkdtempSync, rmSync } from 'node:fs'
|
||||
import { tmpdir } from 'node:os'
|
||||
import { join } from 'node:path'
|
||||
import { getOctokit } from '@actions/github'
|
||||
import labels from './labels.cjs'
|
||||
|
||||
if (process.argv.length !== 4)
|
||||
throw new Error('Call this with exactly three arguments: ./run OWNER REPO')
|
||||
const [, , owner, repo] = process.argv
|
||||
|
||||
const token = execSync('gh auth token', { encoding: 'utf-8' }).trim()
|
||||
|
||||
const tmp = mkdtempSync(join(tmpdir(), 'labels-'))
|
||||
try {
|
||||
process.env.GITHUB_WORKSPACE = tmp
|
||||
process.chdir(tmp)
|
||||
|
||||
await labels({
|
||||
github: getOctokit(token),
|
||||
context: {
|
||||
payload: {},
|
||||
repo: {
|
||||
owner,
|
||||
repo,
|
||||
},
|
||||
},
|
||||
core: {
|
||||
getInput() {
|
||||
return token
|
||||
},
|
||||
error: console.error,
|
||||
info: console.log,
|
||||
notice: console.log,
|
||||
setFailed(msg) {
|
||||
console.error(msg)
|
||||
process.exitCode = 1
|
||||
},
|
||||
},
|
||||
dry: true,
|
||||
})
|
||||
} finally {
|
||||
rmSync(tmp, { recursive: true })
|
||||
}
|
||||
23
ci/github-script/shell.nix
Normal file
23
ci/github-script/shell.nix
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
system ? builtins.currentSystem,
|
||||
pkgs ? (import ../. { inherit system; }).pkgs,
|
||||
}:
|
||||
|
||||
pkgs.callPackage (
|
||||
{
|
||||
mkShell,
|
||||
importNpmLock,
|
||||
nodejs,
|
||||
}:
|
||||
mkShell {
|
||||
packages = [
|
||||
importNpmLock.hooks.linkNodeModulesHook
|
||||
nodejs
|
||||
];
|
||||
|
||||
npmDeps = importNpmLock.buildNodeModules {
|
||||
npmRoot = ./.;
|
||||
inherit nodejs;
|
||||
};
|
||||
}
|
||||
) { }
|
||||
Reference in New Issue
Block a user