From d2eeb384dfb21a3993b0c722d1a5d36a4b747982 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Fri, 21 Jun 2024 21:06:17 -0700
Subject: [PATCH 01/10] build(deps-dev): bump braces from 3.0.2 to 3.0.3 (#146)
Bumps [braces](https://github.com/micromatch/braces) from 3.0.2 to
3.0.3.
Commits
[![Dependabot compatibility
score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=braces&package-manager=npm_and_yarn&previous-version=3.0.2&new-version=3.0.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
You can disable automated security fix PRs for this repo from the
[Security Alerts
page](https://github.com/actions/create-github-app-token/network/alerts).
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
package-lock.json | 18 +++++++++---------
1 file changed, 9 insertions(+), 9 deletions(-)
diff --git a/package-lock.json b/package-lock.json
index b2a25de..00047b9 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "create-github-app-token",
- "version": "1.10.0",
+ "version": "1.10.1",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "create-github-app-token",
- "version": "1.10.0",
+ "version": "1.10.1",
"license": "MIT",
"dependencies": {
"@actions/core": "^1.10.1",
@@ -994,12 +994,12 @@
}
},
"node_modules/braces": {
- "version": "3.0.2",
- "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
- "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
+ "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
"dev": true,
"dependencies": {
- "fill-range": "^7.0.1"
+ "fill-range": "^7.1.1"
},
"engines": {
"node": ">=8"
@@ -1668,9 +1668,9 @@
"dev": true
},
"node_modules/fill-range": {
- "version": "7.0.1",
- "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
- "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
+ "version": "7.1.1",
+ "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
+ "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
"dev": true,
"dependencies": {
"to-regex-range": "^5.0.1"
From 66a70456860bafc79e37635eea77b8b2a929f6c8 Mon Sep 17 00:00:00 2001
From: Wechuli
Date: Wed, 26 Jun 2024 04:10:54 +0300
Subject: [PATCH 02/10] fix: do not revoke token if already expired (#147)
Closes #140
The pull request at #95 introduced changes to avoid revoking expired
tokens by saving the `expiresAt` value in the state. The change,
however, used `core.setOutput` instead of `core.setState` meaning the
value is not saved in the state but rather available in the output.
```javascript
if (!skipTokenRevoke) {
core.saveState("token", authentication.token);
core.setOutput("expiresAt", authentication.expiresAt);
}
```
This means that when we use the value downstream, it evaluates to an
empty string and the following code block is never run:
```javascript
const expiresAt = core.getState("expiresAt");
if (expiresAt && tokenExpiresIn(expiresAt) < 0) {
core.info("Token expired, skipping token revocation");
return;
}
```
This is a tiny PR to correct that typo.
---
dist/main.cjs | 2 +-
lib/main.js | 2 +-
tests/snapshots/index.js.md | 27 +++++++++------------------
tests/snapshots/index.js.snap | Bin 1131 -> 1128 bytes
4 files changed, 11 insertions(+), 20 deletions(-)
diff --git a/dist/main.cjs b/dist/main.cjs
index a96b5c0..474eaef 100644
--- a/dist/main.cjs
+++ b/dist/main.cjs
@@ -39753,7 +39753,7 @@ async function main(appId2, privateKey2, owner2, repositories2, core3, createApp
core3.setOutput("app-slug", appSlug);
if (!skipTokenRevoke2) {
core3.saveState("token", authentication.token);
- core3.setOutput("expiresAt", authentication.expiresAt);
+ core3.saveState("expiresAt", authentication.expiresAt);
}
}
async function getTokenFromOwner(request2, auth5, parsedOwner) {
diff --git a/lib/main.js b/lib/main.js
index d685277..97443c0 100644
--- a/lib/main.js
+++ b/lib/main.js
@@ -104,7 +104,7 @@ export async function main(
// Make token accessible to post function (so we can invalidate it)
if (!skipTokenRevoke) {
core.saveState("token", authentication.token);
- core.setOutput("expiresAt", authentication.expiresAt);
+ core.saveState("expiresAt", authentication.expiresAt);
}
}
diff --git a/tests/snapshots/index.js.md b/tests/snapshots/index.js.md
index 21918c0..c458d39 100644
--- a/tests/snapshots/index.js.md
+++ b/tests/snapshots/index.js.md
@@ -33,8 +33,7 @@ Generated by [AVA](https://avajs.dev).
␊
::set-output name=app-slug::github-actions␊
::save-state name=token::ghs_16C7e42F292c6912E7710c838347Ae178B4a␊
- ␊
- ::set-output name=expiresAt::2016-07-11T22:14:10Z`
+ ::save-state name=expiresAt::2016-07-11T22:14:10Z`
## main-missing-app-id.test.js
@@ -94,8 +93,7 @@ Generated by [AVA](https://avajs.dev).
␊
::set-output name=app-slug::github-actions␊
::save-state name=token::ghs_16C7e42F292c6912E7710c838347Ae178B4a␊
- ␊
- ::set-output name=expiresAt::2016-07-11T22:14:10Z`
+ ::save-state name=expiresAt::2016-07-11T22:14:10Z`
## main-token-get-owner-set-repo-set-to-many.test.js
@@ -114,8 +112,7 @@ Generated by [AVA](https://avajs.dev).
␊
::set-output name=app-slug::github-actions␊
::save-state name=token::ghs_16C7e42F292c6912E7710c838347Ae178B4a␊
- ␊
- ::set-output name=expiresAt::2016-07-11T22:14:10Z`
+ ::save-state name=expiresAt::2016-07-11T22:14:10Z`
## main-token-get-owner-set-repo-set-to-one.test.js
@@ -134,8 +131,7 @@ Generated by [AVA](https://avajs.dev).
␊
::set-output name=app-slug::github-actions␊
::save-state name=token::ghs_16C7e42F292c6912E7710c838347Ae178B4a␊
- ␊
- ::set-output name=expiresAt::2016-07-11T22:14:10Z`
+ ::save-state name=expiresAt::2016-07-11T22:14:10Z`
## main-token-get-owner-set-to-org-repo-unset.test.js
@@ -154,8 +150,7 @@ Generated by [AVA](https://avajs.dev).
␊
::set-output name=app-slug::github-actions␊
::save-state name=token::ghs_16C7e42F292c6912E7710c838347Ae178B4a␊
- ␊
- ::set-output name=expiresAt::2016-07-11T22:14:10Z`
+ ::save-state name=expiresAt::2016-07-11T22:14:10Z`
## main-token-get-owner-set-to-user-fail-response.test.js
@@ -175,8 +170,7 @@ Generated by [AVA](https://avajs.dev).
␊
::set-output name=app-slug::github-actions␊
::save-state name=token::ghs_16C7e42F292c6912E7710c838347Ae178B4a␊
- ␊
- ::set-output name=expiresAt::2016-07-11T22:14:10Z`
+ ::save-state name=expiresAt::2016-07-11T22:14:10Z`
## main-token-get-owner-set-to-user-repo-unset.test.js
@@ -195,8 +189,7 @@ Generated by [AVA](https://avajs.dev).
␊
::set-output name=app-slug::github-actions␊
::save-state name=token::ghs_16C7e42F292c6912E7710c838347Ae178B4a␊
- ␊
- ::set-output name=expiresAt::2016-07-11T22:14:10Z`
+ ::save-state name=expiresAt::2016-07-11T22:14:10Z`
## main-token-get-owner-unset-repo-set.test.js
@@ -215,8 +208,7 @@ Generated by [AVA](https://avajs.dev).
␊
::set-output name=app-slug::github-actions␊
::save-state name=token::ghs_16C7e42F292c6912E7710c838347Ae178B4a␊
- ␊
- ::set-output name=expiresAt::2016-07-11T22:14:10Z`
+ ::save-state name=expiresAt::2016-07-11T22:14:10Z`
## main-token-get-owner-unset-repo-unset.test.js
@@ -235,8 +227,7 @@ Generated by [AVA](https://avajs.dev).
␊
::set-output name=app-slug::github-actions␊
::save-state name=token::ghs_16C7e42F292c6912E7710c838347Ae178B4a␊
- ␊
- ::set-output name=expiresAt::2016-07-11T22:14:10Z`
+ ::save-state name=expiresAt::2016-07-11T22:14:10Z`
## post-revoke-token-fail-response.test.js
diff --git a/tests/snapshots/index.js.snap b/tests/snapshots/index.js.snap
index 53720f3db20c1a6b28f5ab5d2b11dba78c399cea..5a0653706ded1917da4cb52e95edaeab23821f70 100644
GIT binary patch
literal 1128
zcmV-u1eg0kRzVdMHI#-iUC)?%G85m
zp)w(w3XS5Kfr#3u1|ZuShH3z5)S1V{VkdIc%F(f0=3tU`72{|v%OMw%c4LDoqFK&1s3VgRe8tAV4CePV5sp3?51a*CU|We38<
zWj4_=Nz{GPc|0Ck-eipXJ`K4UFxI~?ZLj%$3mWx%^}Twxx#!gnS}m{E-QC&UX|(pi
zYwg}`@Xbxe3?Nl8Mllk}LwMIlC?=8Qp9cZAdq((-dG(#f?dH6p$(&3O_Zb^aGM!H{
zln;R#%gOoe
zX$ne}VuW~i3#IZV6E%FUB$8dFNCZ|NN
z=J8F(<9I(Vuz2KHWHUKkn$25xk}y|I>W(#>R=$3+trAU|Y_LR?_P7YB1`{c1ph#u=
z%c*RC6sv5BEAXwuxmE0*eV-&Lt$kBM?r*m^B7_kV?VWt2N{`g)=shYG3B$WF?5aGZzs{#JK4xg2pcn
z8b4&+&MXt@^U3o6!!}1q(v7tSiJ8*Y_%yq@MrSqX&}HZpt}8z}@O+ooDle)qcmZQF
zCRZtBtDJ|@8kZH}Qi+to>}EWp=9=1_^SX2Y^%i57TuUMQjGb8!lAv9iM3P?JFf#ul
z(^^curBlqmVO{R3>A2cTtz@!QlKfi`O<0{+oabyfWvtMiI=|tVdTElM0r*REVm%ki
u$=go&*JqaJk{gmmnd|1&DK`bn%lR9QmzPs7Vq8v~_(EQxUlDTjbc2r-M(Oo}ZDLT-I7n22dA&J!uV
zdJ=|I(vMzTdwETNDle_QwgwUHK-3j71kSC&!wQ`?dEul&)3nV(^z_SbP}M$H5ff+<
z6VW(C7c7cU2M~urGE`$MMig~3nQ=fv;6lmVLJtHOAK3Vag#;RIt*4v~m7jJW9@{8W
z1LZ>Hj<9t!iYNLkZOfz&ahtifiN5U>8(OkQpnUIt7EwPM(AszOlC
zZck%SsUGPnp%(XxOvSZi$B9HDOVfx!NRah6gQz0prj)(mC^Ntn3p;Rx?H(MR9zM9+
zKH0y2{K?Vj@yWyX@uwf}pP<}Z4Gq)=(Hl^uP+^LycZ~lJs#lYW9H~>~qF69-1lJa3
za?0t|G`{U<9B;@u5)U1TFa6IwNI>>*Isd#vPeCNRBb0H
zQ`-J=O4}dhN}E=tj!!)}vy9y{?{$)F@tbnw{&tf}34>H3?v
zl6E3kRmCM$MQ=WP`avct?{V?=c?R-+cgXv7sfTK9^j;=O)Ms*a$UaA$>d%Bz5|^rD
zs}}8lN{?SikA{ZZ!yISo7?frB`QG8@+lm8(X+{djQHgfx(StoURIe?AD@2x!NOK+{
znNZKy)-L_&VDj`jF%g3#=tnTWOBxxiI;4M`7MBIPADvfblF|r&+-kZsYL1Nta
zlwspHhmD`|j^{EIc6_q_|FG3zF$yxyA(pwP&GCVDbH&b**ujg~DP389cIf$`s9&B}
zW8^uE(U{z*C_ail58=X`
x7|o?}@~#v9%}dL3K?TaA%vAIGl$w(D
Date: Wed, 26 Jun 2024 01:11:30 +0000
Subject: [PATCH 03/10] build(release): 1.10.2 [skip ci]
## [1.10.2](https://github.com/actions/create-github-app-token/compare/v1.10.1...v1.10.2) (2024-06-26)
### Bug Fixes
* do not revoke token if already expired ([#147](https://github.com/actions/create-github-app-token/issues/147)) ([66a7045](https://github.com/actions/create-github-app-token/commit/66a70456860bafc79e37635eea77b8b2a929f6c8)), closes [#140](https://github.com/actions/create-github-app-token/issues/140) [#95](https://github.com/actions/create-github-app-token/issues/95)
---
package.json | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/package.json b/package.json
index ec5dac8..57ac337 100644
--- a/package.json
+++ b/package.json
@@ -2,7 +2,7 @@
"name": "create-github-app-token",
"private": true,
"type": "module",
- "version": "1.10.1",
+ "version": "1.10.2",
"description": "GitHub Action for creating a GitHub App Installation Access Token",
"scripts": {
"build": "esbuild main.js post.js --bundle --outdir=dist --out-extension:.js=.cjs --platform=node --target=node20.0.0",
From 74cd7f68cb7eb068322f8b2e8f44df4f25d39fdf Mon Sep 17 00:00:00 2001
From: "Anuraag (Rag) Agrawal"
Date: Fri, 28 Jun 2024 23:51:24 +0900
Subject: [PATCH 04/10] docs(README): fix committer string example and add git
config example (#145)
---
README.md | 40 +++++++++++++++++++++++++++++++++++++++-
1 file changed, 39 insertions(+), 1 deletion(-)
diff --git a/README.md b/README.md
index 57c2021..3d3060d 100644
--- a/README.md
+++ b/README.md
@@ -79,11 +79,49 @@ jobs:
# required
app-id: ${{ vars.APP_ID }}
private-key: ${{ secrets.PRIVATE_KEY }}
+ - name: Retrieve GitHub App User ID
+ id: get-user-id
+ run: echo "user-id=$(gh api "/users/${{ steps.generate-token.outputs.app-slug }}[bot]" --jq .id)" >> "$GITHUB_OUTPUT"
+ env:
+ GH_TOKEN: ${{ steps.app-token.outputs.token }}
- id: committer
- run: echo "string=${{steps.app-auth.outputs.app-slug}}[bot] <${{ steps.app-auth.outputs.installation-id }}+${{ steps.app-auth.outputs.app-slug }}[bot]@users.noreply.github.com>" >> "$GITHUB_OUTPUT"
+ run: echo "string=${{steps.app-token.outputs.app-slug}}[bot] <${{steps.get-user-id.outputs.user-id}}+${{ steps.app-token.outputs.app-slug }}[bot]@users.noreply.github.com>" >> "$GITHUB_OUTPUT"
- run: echo "committer string is ${{steps.committer.outputs.string}}"
```
+### Configure git CLI for an app's bot user
+
+```yaml
+on: [pull_request]
+
+jobs:
+ auto-format:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/create-github-app-token@v1
+ id: app-token
+ with:
+ # required
+ app-id: ${{ vars.APP_ID }}
+ private-key: ${{ secrets.PRIVATE_KEY }}
+ - name: Retrieve GitHub App User ID
+ id: get-user-id
+ run: echo "user-id=$(gh api "/users/${{ steps.generate-token.outputs.app-slug }}[bot]" --jq .id)" >> "$GITHUB_OUTPUT"
+ env:
+ GH_TOKEN: ${{ steps.app-token.outputs.token }}
+ - run: |
+ git config --global user.name '${{steps.app-token.outputs.app-slug}}[bot]'
+ git config --global user.email '${{steps.get-user-id.outputs.user-id}}+${{ steps.app-token.outputs.app-slug }}[bot]@users.noreply.github.com>'
+ # git commands like commit work using the bot user
+ - run: |
+ git add .
+ git commit -m "Auto-generated changes"
+ git push
+```
+
+The `` is the numeric user ID of the app's bot user, which can be found under `https://api.github.com/users/%5Bbot%5D`.
+For example, we can check at `https://api.github.com/users/dependabot%5Bbot%5D` to see the user ID of dependabot is 49699333.
+
### Create a token for all repositories in the current owner's installation
```yaml
From cc82279e84540c5543078cedc5af4fcfab0a96bb Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 1 Jul 2024 06:14:36 -0700
Subject: [PATCH 05/10] fix(deps): bump undici from 6.18.2 to 6.19.2 in the
production-dependencies group (#149)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Bumps the production-dependencies group with 1 update:
[undici](https://github.com/nodejs/undici).
Updates `undici` from 6.18.2 to 6.19.2
Release notes
Sourced from undici's
releases.
v6.19.2
What's Changed
Full Changelog: https://github.com/nodejs/undici/compare/v6.19.1...v6.19.2
v6.19.1
What's Changed
Full Changelog: https://github.com/nodejs/undici/compare/v6.19.0...v6.19.1
v6.19.0
What's Changed
New Contributors
Full Changelog: https://github.com/nodejs/undici/compare/v6.18.2...v6.19.0
Commits
[![Dependabot compatibility
score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=undici&package-manager=npm_and_yarn&previous-version=6.18.2&new-version=6.19.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore major version` will close this
group update PR and stop Dependabot creating any more for the specific
dependency's major version (unless you unignore this specific
dependency's major version or upgrade to it yourself)
- `@dependabot ignore minor version` will close this
group update PR and stop Dependabot creating any more for the specific
dependency's minor version (unless you unignore this specific
dependency's minor version or upgrade to it yourself)
- `@dependabot ignore ` will close this group update PR
and stop Dependabot creating any more for the specific dependency
(unless you unignore this specific dependency or upgrade to it yourself)
- `@dependabot unignore ` will remove all of the ignore
conditions of the specified dependency
- `@dependabot unignore ` will
remove the ignore condition of the specified dependency and ignore
conditions
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
package-lock.json | 12 ++++++------
package.json | 2 +-
2 files changed, 7 insertions(+), 7 deletions(-)
diff --git a/package-lock.json b/package-lock.json
index 00047b9..73d78ce 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,19 +1,19 @@
{
"name": "create-github-app-token",
- "version": "1.10.1",
+ "version": "1.10.2",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "create-github-app-token",
- "version": "1.10.1",
+ "version": "1.10.2",
"license": "MIT",
"dependencies": {
"@actions/core": "^1.10.1",
"@octokit/auth-app": "^7.1.0",
"@octokit/request": "^9.0.1",
"p-retry": "^6.2.0",
- "undici": "^6.18.2"
+ "undici": "^6.19.2"
},
"devDependencies": {
"@sinonjs/fake-timers": "^11.2.2",
@@ -3412,9 +3412,9 @@
}
},
"node_modules/undici": {
- "version": "6.18.2",
- "resolved": "https://registry.npmjs.org/undici/-/undici-6.18.2.tgz",
- "integrity": "sha512-o/MQLTwRm9IVhOqhZ0NQ9oXax1ygPjw6Vs+Vq/4QRjbOAC3B1GCHy7TYxxbExKlb7bzDRzt9vBWU6BDz0RFfYg==",
+ "version": "6.19.2",
+ "resolved": "https://registry.npmjs.org/undici/-/undici-6.19.2.tgz",
+ "integrity": "sha512-JfjKqIauur3Q6biAtHJ564e3bWa8VvT+7cSiOJHFbX4Erv6CLGDpg8z+Fmg/1OI/47RA+GI2QZaF48SSaLvyBA==",
"engines": {
"node": ">=18.17"
}
diff --git a/package.json b/package.json
index 57ac337..1b5aa20 100644
--- a/package.json
+++ b/package.json
@@ -16,7 +16,7 @@
"@octokit/auth-app": "^7.1.0",
"@octokit/request": "^9.0.1",
"p-retry": "^6.2.0",
- "undici": "^6.18.2"
+ "undici": "^6.19.2"
},
"devDependencies": {
"@sinonjs/fake-timers": "^11.2.2",
From 31c86eb3b33c9b601a1f60f98dcbfd1d70f379b4 Mon Sep 17 00:00:00 2001
From: semantic-release-bot
Date: Mon, 1 Jul 2024 13:15:08 +0000
Subject: [PATCH 06/10] build(release): 1.10.3 [skip ci]
## [1.10.3](https://github.com/actions/create-github-app-token/compare/v1.10.2...v1.10.3) (2024-07-01)
### Bug Fixes
* **deps:** bump undici from 6.18.2 to 6.19.2 in the production-dependencies group ([#149](https://github.com/actions/create-github-app-token/issues/149)) ([cc82279](https://github.com/actions/create-github-app-token/commit/cc82279e84540c5543078cedc5af4fcfab0a96bb)), closes [#3337](https://github.com/actions/create-github-app-token/issues/3337) [nodejs/undici#3338](https://github.com/nodejs/undici/issues/3338) [nodejs/undici#3340](https://github.com/nodejs/undici/issues/3340) [nodejs/undici#3332](https://github.com/nodejs/undici/issues/3332) [nodejs/undici#3335](https://github.com/nodejs/undici/issues/3335) [nodejs/undici#3305](https://github.com/nodejs/undici/issues/3305) [nodejs/undici#3303](https://github.com/nodejs/undici/issues/3303) [nodejs/undici#3304](https://github.com/nodejs/undici/issues/3304) [nodejs/undici#3306](https://github.com/nodejs/undici/issues/3306) [nodejs/undici#3309](https://github.com/nodejs/undici/issues/3309) [nodejs/undici#3313](https://github.com/nodejs/undici/issues/3313) [nodejs/undici#3311](https://github.com/nodejs/undici/issues/3311) [nodejs/undici#3107](https://github.com/nodejs/undici/issues/3107) [nodejs/undici#3302](https://github.com/nodejs/undici/issues/3302) [nodejs/undici#3320](https://github.com/nodejs/undici/issues/3320) [nodejs/undici#3321](https://github.com/nodejs/undici/issues/3321) [nodejs/undici#3316](https://github.com/nodejs/undici/issues/3316) [nodejs/undici#3318](https://github.com/nodejs/undici/issues/3318) [nodejs/undici#3326](https://github.com/nodejs/undici/issues/3326) [nodejs/undici#3324](https://github.com/nodejs/undici/issues/3324) [nodejs/undici#3325](https://github.com/nodejs/undici/issues/3325) [nodejs/undici#3316](https://github.com/nodejs/undici/issues/3316) [nodejs/undici#3318](https://github.com/nodejs/undici/issues/3318) [#3342](https://github.com/actions/create-github-app-token/issues/3342) [#3332](https://github.com/actions/create-github-app-token/issues/3332) [#3340](https://github.com/actions/create-github-app-token/issues/3340) [#3337](https://github.com/actions/create-github-app-token/issues/3337) [#3338](https://github.com/actions/create-github-app-token/issues/3338) [#3336](https://github.com/actions/create-github-app-token/issues/3336) [#3335](https://github.com/actions/create-github-app-token/issues/3335) [#3325](https://github.com/actions/create-github-app-token/issues/3325) [#3324](https://github.com/actions/create-github-app-token/issues/3324) [#3326](https://github.com/actions/create-github-app-token/issues/3326)
---
dist/main.cjs | 229 +++++++++++++++++++++++---------------------------
dist/post.cjs | 229 +++++++++++++++++++++++---------------------------
package.json | 2 +-
3 files changed, 215 insertions(+), 245 deletions(-)
diff --git a/dist/main.cjs b/dist/main.cjs
index 474eaef..a98f495 100644
--- a/dist/main.cjs
+++ b/dist/main.cjs
@@ -20172,6 +20172,27 @@ var require_util8 = __commonJS({
}
var kEnumerableProperty = /* @__PURE__ */ Object.create(null);
kEnumerableProperty.enumerable = true;
+ var normalizedMethodRecordsBase = {
+ delete: "DELETE",
+ DELETE: "DELETE",
+ get: "GET",
+ GET: "GET",
+ head: "HEAD",
+ HEAD: "HEAD",
+ options: "OPTIONS",
+ OPTIONS: "OPTIONS",
+ post: "POST",
+ POST: "POST",
+ put: "PUT",
+ PUT: "PUT"
+ };
+ var normalizedMethodRecords = {
+ ...normalizedMethodRecordsBase,
+ patch: "patch",
+ PATCH: "PATCH"
+ };
+ Object.setPrototypeOf(normalizedMethodRecordsBase, null);
+ Object.setPrototypeOf(normalizedMethodRecords, null);
module2.exports = {
kEnumerableProperty,
nop,
@@ -20210,6 +20231,8 @@ var require_util8 = __commonJS({
isValidHeaderValue,
isTokenCharCode,
parseRangeHeader,
+ normalizedMethodRecordsBase,
+ normalizedMethodRecords,
isValidPort,
isHttpOrHttpsPrefixed,
nodeMajor,
@@ -20425,7 +20448,8 @@ var require_request3 = __commonJS({
isBlobLike,
buildURL,
validateHandler,
- getServerName
+ getServerName,
+ normalizedMethodRecords
} = require_util8();
var { channels } = require_diagnostics();
var { headerNameLowerCasedRecord } = require_constants6();
@@ -20452,12 +20476,12 @@ var require_request3 = __commonJS({
throw new InvalidArgumentError("path must be a string");
} else if (path[0] !== "/" && !(path.startsWith("http://") || path.startsWith("https://")) && method !== "CONNECT") {
throw new InvalidArgumentError("path must be an absolute URL or start with a slash");
- } else if (invalidPathRegex.exec(path) !== null) {
+ } else if (invalidPathRegex.test(path)) {
throw new InvalidArgumentError("invalid request path");
}
if (typeof method !== "string") {
throw new InvalidArgumentError("method must be a string");
- } else if (!isValidHTTPToken(method)) {
+ } else if (normalizedMethodRecords[method] === void 0 && !isValidHTTPToken(method)) {
throw new InvalidArgumentError("invalid request method");
}
if (upgrade && typeof upgrade !== "string") {
@@ -21002,7 +21026,7 @@ var require_connect2 = __commonJS({
}
};
}
- function buildConnector({ allowH2, maxCachedSessions, socketPath, timeout, ...opts }) {
+ function buildConnector({ allowH2, maxCachedSessions, socketPath, timeout, session: customSession, ...opts }) {
if (maxCachedSessions != null && (!Number.isInteger(maxCachedSessions) || maxCachedSessions < 0)) {
throw new InvalidArgumentError("maxCachedSessions must be a positive integer or zero");
}
@@ -21018,7 +21042,7 @@ var require_connect2 = __commonJS({
}
servername = servername || options.servername || util.getServerName(host) || null;
const sessionKey = servername || hostname;
- const session = sessionCache.get(sessionKey) || null;
+ const session = customSession || sessionCache.get(sessionKey) || null;
assert(sessionKey);
socket = tls.connect({
highWaterMark: 16384,
@@ -22536,7 +22560,7 @@ var require_util9 = __commonJS({
var { getGlobalOrigin } = require_global3();
var { collectASequenceOfCodePoints, collectAnHTTPQuotedString, removeChars, parseMIMEType } = require_data_url();
var { performance: performance2 } = require("node:perf_hooks");
- var { isBlobLike, ReadableStreamFrom, isValidHTTPToken } = require_util8();
+ var { isBlobLike, ReadableStreamFrom, isValidHTTPToken, normalizedMethodRecordsBase } = require_util8();
var assert = require("node:assert");
var { isUint8Array } = require("node:util/types");
var { webidl } = require_webidl2();
@@ -22643,7 +22667,7 @@ var require_util9 = __commonJS({
}
function appendRequestOriginHeader(request2) {
let serializedOrigin = request2.origin;
- if (serializedOrigin === "client") {
+ if (serializedOrigin === "client" || serializedOrigin === void 0) {
return;
}
if (request2.responseTainting === "cors" || request2.mode === "websocket") {
@@ -22924,29 +22948,8 @@ var require_util9 = __commonJS({
function isCancelled(fetchParams) {
return fetchParams.controller.state === "aborted" || fetchParams.controller.state === "terminated";
}
- var normalizeMethodRecordBase = {
- delete: "DELETE",
- DELETE: "DELETE",
- get: "GET",
- GET: "GET",
- head: "HEAD",
- HEAD: "HEAD",
- options: "OPTIONS",
- OPTIONS: "OPTIONS",
- post: "POST",
- POST: "POST",
- put: "PUT",
- PUT: "PUT"
- };
- var normalizeMethodRecord = {
- ...normalizeMethodRecordBase,
- patch: "patch",
- PATCH: "PATCH"
- };
- Object.setPrototypeOf(normalizeMethodRecordBase, null);
- Object.setPrototypeOf(normalizeMethodRecord, null);
function normalizeMethod(method) {
- return normalizeMethodRecordBase[method.toLowerCase()] ?? method;
+ return normalizedMethodRecordsBase[method.toLowerCase()] ?? method;
}
function serializeJavascriptValueToJSONString(value) {
const result = JSON.stringify(value);
@@ -23083,7 +23086,7 @@ var require_util9 = __commonJS({
}
});
}
- async function fullyReadBody(body, processBody, processBodyError, shouldClone) {
+ async function fullyReadBody(body, processBody, processBodyError) {
const successSteps = processBody;
const errorSteps = processBodyError;
let reader;
@@ -23094,7 +23097,7 @@ var require_util9 = __commonJS({
return;
}
try {
- successSteps(await readAllBytes(reader, shouldClone));
+ successSteps(await readAllBytes(reader));
} catch (e) {
errorSteps(e);
}
@@ -23117,19 +23120,12 @@ var require_util9 = __commonJS({
assert(!invalidIsomorphicEncodeValueRegex.test(input));
return input;
}
- async function readAllBytes(reader, shouldClone) {
+ async function readAllBytes(reader) {
const bytes = [];
let byteLength = 0;
while (true) {
const { done, value: chunk } = await reader.read();
if (done) {
- if (bytes.length === 1) {
- const { buffer, byteOffset, byteLength: byteLength2 } = bytes[0];
- if (shouldClone === false) {
- return Buffer.from(buffer, byteOffset, byteLength2);
- }
- return Buffer.from(buffer.slice(byteOffset, byteOffset + byteLength2), 0, byteLength2);
- }
return Buffer.concat(bytes, byteLength);
}
if (!isUint8Array(chunk)) {
@@ -23392,7 +23388,6 @@ var require_util9 = __commonJS({
urlHasHttpsScheme,
urlIsHttpHttpsScheme,
readAllBytes,
- normalizeMethodRecord,
simpleRangeHeaderValue,
buildContentRange,
parseMetadata,
@@ -24064,18 +24059,18 @@ Content-Type: ${value.type || "application/octet-stream"}\r
mimeType = serializeAMimeType(mimeType);
}
return new Blob2([bytes], { type: mimeType });
- }, instance, false);
+ }, instance);
},
arrayBuffer() {
return consumeBody(this, (bytes) => {
- return bytes.buffer;
- }, instance, true);
+ return new Uint8Array(bytes).buffer;
+ }, instance);
},
text() {
- return consumeBody(this, utf8DecodeBytes, instance, false);
+ return consumeBody(this, utf8DecodeBytes, instance);
},
json() {
- return consumeBody(this, parseJSONFromBytes, instance, false);
+ return consumeBody(this, parseJSONFromBytes, instance);
},
formData() {
return consumeBody(this, (value) => {
@@ -24104,12 +24099,12 @@ Content-Type: ${value.type || "application/octet-stream"}\r
throw new TypeError(
'Content-Type was not one of "multipart/form-data" or "application/x-www-form-urlencoded".'
);
- }, instance, false);
+ }, instance);
},
bytes() {
return consumeBody(this, (bytes) => {
- return new Uint8Array(bytes.buffer, 0, bytes.byteLength);
- }, instance, true);
+ return new Uint8Array(bytes);
+ }, instance);
}
};
return methods;
@@ -24117,7 +24112,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r
function mixinBody(prototype) {
Object.assign(prototype.prototype, bodyMixinMethods(prototype));
}
- async function consumeBody(object, convertBytesToJSValue, instance, shouldClone) {
+ async function consumeBody(object, convertBytesToJSValue, instance) {
webidl.brandCheck(object, instance);
if (bodyUnusable(object[kState].body)) {
throw new TypeError("Body is unusable: Body has already been read");
@@ -24136,7 +24131,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r
successSteps(Buffer.allocUnsafe(0));
return promise.promise;
}
- await fullyReadBody(object[kState].body, successSteps, errorSteps, shouldClone);
+ await fullyReadBody(object[kState].body, successSteps, errorSteps);
return promise.promise;
}
function bodyUnusable(body) {
@@ -24889,25 +24884,25 @@ upgrade: ${upgrade}\r
channels.sendHeaders.publish({ request: request2, headers: header, socket });
}
if (!body || bodyLength === 0) {
- writeBuffer({ abort, body: null, client, request: request2, socket, contentLength, header, expectsPayload });
+ writeBuffer(abort, null, client, request2, socket, contentLength, header, expectsPayload);
} else if (util.isBuffer(body)) {
- writeBuffer({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload });
+ writeBuffer(abort, body, client, request2, socket, contentLength, header, expectsPayload);
} else if (util.isBlobLike(body)) {
if (typeof body.stream === "function") {
- writeIterable({ abort, body: body.stream(), client, request: request2, socket, contentLength, header, expectsPayload });
+ writeIterable(abort, body.stream(), client, request2, socket, contentLength, header, expectsPayload);
} else {
- writeBlob({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload });
+ writeBlob(abort, body, client, request2, socket, contentLength, header, expectsPayload);
}
} else if (util.isStream(body)) {
- writeStream({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload });
+ writeStream(abort, body, client, request2, socket, contentLength, header, expectsPayload);
} else if (util.isIterable(body)) {
- writeIterable({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload });
+ writeIterable(abort, body, client, request2, socket, contentLength, header, expectsPayload);
} else {
assert(false);
}
return true;
}
- function writeStream({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload }) {
+ function writeStream(abort, body, client, request2, socket, contentLength, header, expectsPayload) {
assert(contentLength !== 0 || client[kRunning] === 0, "stream body cannot be pipelined");
let finished = false;
const writer = new AsyncWriter({ abort, socket, request: request2, contentLength, client, expectsPayload, header });
@@ -24976,7 +24971,7 @@ upgrade: ${upgrade}\r
setImmediate(onClose);
}
}
- function writeBuffer({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload }) {
+ function writeBuffer(abort, body, client, request2, socket, contentLength, header, expectsPayload) {
try {
if (!body) {
if (contentLength === 0) {
@@ -25007,7 +25002,7 @@ upgrade: ${upgrade}\r
abort(err);
}
}
- async function writeBlob({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload }) {
+ async function writeBlob(abort, body, client, request2, socket, contentLength, header, expectsPayload) {
assert(contentLength === body.size, "blob body must have content length");
try {
if (contentLength != null && contentLength !== body.size) {
@@ -25030,7 +25025,7 @@ upgrade: ${upgrade}\r
abort(err);
}
}
- async function writeIterable({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload }) {
+ async function writeIterable(abort, body, client, request2, socket, contentLength, header, expectsPayload) {
assert(contentLength !== 0 || client[kRunning] === 0, "iterator body cannot be pipelined");
let callback = null;
function onDrain() {
@@ -25493,81 +25488,79 @@ var require_client_h2 = __commonJS({
return true;
function writeBodyH2() {
if (!body || contentLength === 0) {
- writeBuffer({
+ writeBuffer(
abort,
+ stream,
+ null,
client,
- request: request2,
+ request2,
+ client[kSocket],
contentLength,
- expectsPayload,
- h2stream: stream,
- body: null,
- socket: client[kSocket]
- });
+ expectsPayload
+ );
} else if (util.isBuffer(body)) {
- writeBuffer({
+ writeBuffer(
abort,
+ stream,
+ body,
client,
- request: request2,
+ request2,
+ client[kSocket],
contentLength,
- body,
- expectsPayload,
- h2stream: stream,
- socket: client[kSocket]
- });
+ expectsPayload
+ );
} else if (util.isBlobLike(body)) {
if (typeof body.stream === "function") {
- writeIterable({
+ writeIterable(
abort,
+ stream,
+ body.stream(),
client,
- request: request2,
+ request2,
+ client[kSocket],
contentLength,
- expectsPayload,
- h2stream: stream,
- body: body.stream(),
- socket: client[kSocket]
- });
+ expectsPayload
+ );
} else {
- writeBlob({
+ writeBlob(
abort,
+ stream,
body,
client,
- request: request2,
+ request2,
+ client[kSocket],
contentLength,
- expectsPayload,
- h2stream: stream,
- socket: client[kSocket]
- });
+ expectsPayload
+ );
}
} else if (util.isStream(body)) {
- writeStream({
+ writeStream(
abort,
+ client[kSocket],
+ expectsPayload,
+ stream,
body,
client,
- request: request2,
- contentLength,
- expectsPayload,
- socket: client[kSocket],
- h2stream: stream,
- header: ""
- });
+ request2,
+ contentLength
+ );
} else if (util.isIterable(body)) {
- writeIterable({
+ writeIterable(
abort,
+ stream,
body,
client,
- request: request2,
+ request2,
+ client[kSocket],
contentLength,
- expectsPayload,
- header: "",
- h2stream: stream,
- socket: client[kSocket]
- });
+ expectsPayload
+ );
} else {
assert(false);
}
}
}
- function writeBuffer({ abort, h2stream, body, client, request: request2, socket, contentLength, expectsPayload }) {
+ function writeBuffer(abort, h2stream, body, client, request2, socket, contentLength, expectsPayload) {
try {
if (body != null && util.isBuffer(body)) {
assert(contentLength === body.byteLength, "buffer body must have content length");
@@ -25586,7 +25579,7 @@ var require_client_h2 = __commonJS({
abort(error);
}
}
- function writeStream({ abort, socket, expectsPayload, h2stream, body, client, request: request2, contentLength }) {
+ function writeStream(abort, socket, expectsPayload, h2stream, body, client, request2, contentLength) {
assert(contentLength !== 0 || client[kRunning] === 0, "stream body cannot be pipelined");
const pipe = pipeline(
body,
@@ -25610,7 +25603,7 @@ var require_client_h2 = __commonJS({
request2.onBodySent(chunk);
}
}
- async function writeBlob({ abort, h2stream, body, client, request: request2, socket, contentLength, expectsPayload }) {
+ async function writeBlob(abort, h2stream, body, client, request2, socket, contentLength, expectsPayload) {
assert(contentLength === body.size, "blob body must have content length");
try {
if (contentLength != null && contentLength !== body.size) {
@@ -25631,7 +25624,7 @@ var require_client_h2 = __commonJS({
abort(err);
}
}
- async function writeIterable({ abort, h2stream, body, client, request: request2, socket, contentLength, expectsPayload }) {
+ async function writeIterable(abort, h2stream, body, client, request2, socket, contentLength, expectsPayload) {
assert(contentLength !== 0 || client[kRunning] === 0, "iterator body cannot be pipelined");
let callback = null;
function onDrain() {
@@ -27376,7 +27369,7 @@ var require_retry_handler = __commonJS({
this.abort(
new RequestRetryError("Content-Range mismatch", statusCode, {
headers,
- count: this.retryCount
+ data: { count: this.retryCount }
})
);
return false;
@@ -27385,7 +27378,7 @@ var require_retry_handler = __commonJS({
this.abort(
new RequestRetryError("ETag mismatch", statusCode, {
headers,
- count: this.retryCount
+ data: { count: this.retryCount }
})
);
return false;
@@ -30609,9 +30602,7 @@ var require_request4 = __commonJS({
var {
isValidHTTPToken,
sameOrigin,
- normalizeMethod,
- environmentSettingsObject,
- normalizeMethodRecord
+ environmentSettingsObject
} = require_util9();
var {
forbiddenMethodsSet,
@@ -30623,7 +30614,7 @@ var require_request4 = __commonJS({
requestCache,
requestDuplex
} = require_constants8();
- var { kEnumerableProperty } = util;
+ var { kEnumerableProperty, normalizedMethodRecordsBase, normalizedMethodRecords } = util;
var { kHeaders, kSignal, kState, kDispatcher } = require_symbols7();
var { webidl } = require_webidl2();
var { URLSerializer } = require_data_url();
@@ -30820,17 +30811,18 @@ var require_request4 = __commonJS({
}
if (init.method !== void 0) {
let method = init.method;
- const mayBeNormalized = normalizeMethodRecord[method];
+ const mayBeNormalized = normalizedMethodRecords[method];
if (mayBeNormalized !== void 0) {
request2.method = mayBeNormalized;
} else {
if (!isValidHTTPToken(method)) {
throw new TypeError(`'${method}' is not a valid HTTP method.`);
}
- if (forbiddenMethodsSet.has(method.toUpperCase())) {
+ const upperCase = method.toUpperCase();
+ if (forbiddenMethodsSet.has(upperCase)) {
throw new TypeError(`'${method}' HTTP method is unsupported.`);
}
- method = normalizeMethod(method);
+ method = normalizedMethodRecordsBase[upperCase] ?? method;
request2.method = method;
}
if (!patchMethodWarning && request2.method === "patch") {
@@ -35546,7 +35538,6 @@ var require_websocket2 = __commonJS({
var { types } = require("node:util");
var { ErrorEvent, CloseEvent } = require_events2();
var { SendQueue } = require_sender();
- var experimentalWarned = false;
var WebSocket = class _WebSocket extends EventTarget {
#events = {
open: null,
@@ -35567,12 +35558,6 @@ var require_websocket2 = __commonJS({
super();
const prefix = "WebSocket constructor";
webidl.argumentLengthCheck(arguments, 1, prefix);
- if (!experimentalWarned) {
- experimentalWarned = true;
- process.emitWarning("WebSockets are experimental, expect them to change at any time.", {
- code: "UNDICI-WS"
- });
- }
const options = webidl.converters["DOMString or sequence or WebSocketInit"](protocols, prefix, "options");
url = webidl.converters.USVString(url, prefix, "url");
protocols = options.protocols;
diff --git a/dist/post.cjs b/dist/post.cjs
index 0307466..090c0f1 100644
--- a/dist/post.cjs
+++ b/dist/post.cjs
@@ -19943,6 +19943,27 @@ var require_util8 = __commonJS({
}
var kEnumerableProperty = /* @__PURE__ */ Object.create(null);
kEnumerableProperty.enumerable = true;
+ var normalizedMethodRecordsBase = {
+ delete: "DELETE",
+ DELETE: "DELETE",
+ get: "GET",
+ GET: "GET",
+ head: "HEAD",
+ HEAD: "HEAD",
+ options: "OPTIONS",
+ OPTIONS: "OPTIONS",
+ post: "POST",
+ POST: "POST",
+ put: "PUT",
+ PUT: "PUT"
+ };
+ var normalizedMethodRecords = {
+ ...normalizedMethodRecordsBase,
+ patch: "patch",
+ PATCH: "PATCH"
+ };
+ Object.setPrototypeOf(normalizedMethodRecordsBase, null);
+ Object.setPrototypeOf(normalizedMethodRecords, null);
module2.exports = {
kEnumerableProperty,
nop,
@@ -19981,6 +20002,8 @@ var require_util8 = __commonJS({
isValidHeaderValue,
isTokenCharCode,
parseRangeHeader,
+ normalizedMethodRecordsBase,
+ normalizedMethodRecords,
isValidPort,
isHttpOrHttpsPrefixed,
nodeMajor,
@@ -20196,7 +20219,8 @@ var require_request3 = __commonJS({
isBlobLike,
buildURL,
validateHandler,
- getServerName
+ getServerName,
+ normalizedMethodRecords
} = require_util8();
var { channels } = require_diagnostics();
var { headerNameLowerCasedRecord } = require_constants6();
@@ -20223,12 +20247,12 @@ var require_request3 = __commonJS({
throw new InvalidArgumentError("path must be a string");
} else if (path[0] !== "/" && !(path.startsWith("http://") || path.startsWith("https://")) && method !== "CONNECT") {
throw new InvalidArgumentError("path must be an absolute URL or start with a slash");
- } else if (invalidPathRegex.exec(path) !== null) {
+ } else if (invalidPathRegex.test(path)) {
throw new InvalidArgumentError("invalid request path");
}
if (typeof method !== "string") {
throw new InvalidArgumentError("method must be a string");
- } else if (!isValidHTTPToken(method)) {
+ } else if (normalizedMethodRecords[method] === void 0 && !isValidHTTPToken(method)) {
throw new InvalidArgumentError("invalid request method");
}
if (upgrade && typeof upgrade !== "string") {
@@ -20773,7 +20797,7 @@ var require_connect2 = __commonJS({
}
};
}
- function buildConnector({ allowH2, maxCachedSessions, socketPath, timeout, ...opts }) {
+ function buildConnector({ allowH2, maxCachedSessions, socketPath, timeout, session: customSession, ...opts }) {
if (maxCachedSessions != null && (!Number.isInteger(maxCachedSessions) || maxCachedSessions < 0)) {
throw new InvalidArgumentError("maxCachedSessions must be a positive integer or zero");
}
@@ -20789,7 +20813,7 @@ var require_connect2 = __commonJS({
}
servername = servername || options.servername || util.getServerName(host) || null;
const sessionKey = servername || hostname;
- const session = sessionCache.get(sessionKey) || null;
+ const session = customSession || sessionCache.get(sessionKey) || null;
assert(sessionKey);
socket = tls.connect({
highWaterMark: 16384,
@@ -22307,7 +22331,7 @@ var require_util9 = __commonJS({
var { getGlobalOrigin } = require_global3();
var { collectASequenceOfCodePoints, collectAnHTTPQuotedString, removeChars, parseMIMEType } = require_data_url();
var { performance: performance2 } = require("node:perf_hooks");
- var { isBlobLike, ReadableStreamFrom, isValidHTTPToken } = require_util8();
+ var { isBlobLike, ReadableStreamFrom, isValidHTTPToken, normalizedMethodRecordsBase } = require_util8();
var assert = require("node:assert");
var { isUint8Array } = require("node:util/types");
var { webidl } = require_webidl2();
@@ -22414,7 +22438,7 @@ var require_util9 = __commonJS({
}
function appendRequestOriginHeader(request2) {
let serializedOrigin = request2.origin;
- if (serializedOrigin === "client") {
+ if (serializedOrigin === "client" || serializedOrigin === void 0) {
return;
}
if (request2.responseTainting === "cors" || request2.mode === "websocket") {
@@ -22695,29 +22719,8 @@ var require_util9 = __commonJS({
function isCancelled(fetchParams) {
return fetchParams.controller.state === "aborted" || fetchParams.controller.state === "terminated";
}
- var normalizeMethodRecordBase = {
- delete: "DELETE",
- DELETE: "DELETE",
- get: "GET",
- GET: "GET",
- head: "HEAD",
- HEAD: "HEAD",
- options: "OPTIONS",
- OPTIONS: "OPTIONS",
- post: "POST",
- POST: "POST",
- put: "PUT",
- PUT: "PUT"
- };
- var normalizeMethodRecord = {
- ...normalizeMethodRecordBase,
- patch: "patch",
- PATCH: "PATCH"
- };
- Object.setPrototypeOf(normalizeMethodRecordBase, null);
- Object.setPrototypeOf(normalizeMethodRecord, null);
function normalizeMethod(method) {
- return normalizeMethodRecordBase[method.toLowerCase()] ?? method;
+ return normalizedMethodRecordsBase[method.toLowerCase()] ?? method;
}
function serializeJavascriptValueToJSONString(value) {
const result = JSON.stringify(value);
@@ -22854,7 +22857,7 @@ var require_util9 = __commonJS({
}
});
}
- async function fullyReadBody(body, processBody, processBodyError, shouldClone) {
+ async function fullyReadBody(body, processBody, processBodyError) {
const successSteps = processBody;
const errorSteps = processBodyError;
let reader;
@@ -22865,7 +22868,7 @@ var require_util9 = __commonJS({
return;
}
try {
- successSteps(await readAllBytes(reader, shouldClone));
+ successSteps(await readAllBytes(reader));
} catch (e) {
errorSteps(e);
}
@@ -22888,19 +22891,12 @@ var require_util9 = __commonJS({
assert(!invalidIsomorphicEncodeValueRegex.test(input));
return input;
}
- async function readAllBytes(reader, shouldClone) {
+ async function readAllBytes(reader) {
const bytes = [];
let byteLength = 0;
while (true) {
const { done, value: chunk } = await reader.read();
if (done) {
- if (bytes.length === 1) {
- const { buffer, byteOffset, byteLength: byteLength2 } = bytes[0];
- if (shouldClone === false) {
- return Buffer.from(buffer, byteOffset, byteLength2);
- }
- return Buffer.from(buffer.slice(byteOffset, byteOffset + byteLength2), 0, byteLength2);
- }
return Buffer.concat(bytes, byteLength);
}
if (!isUint8Array(chunk)) {
@@ -23163,7 +23159,6 @@ var require_util9 = __commonJS({
urlHasHttpsScheme,
urlIsHttpHttpsScheme,
readAllBytes,
- normalizeMethodRecord,
simpleRangeHeaderValue,
buildContentRange,
parseMetadata,
@@ -23835,18 +23830,18 @@ Content-Type: ${value.type || "application/octet-stream"}\r
mimeType = serializeAMimeType(mimeType);
}
return new Blob2([bytes], { type: mimeType });
- }, instance, false);
+ }, instance);
},
arrayBuffer() {
return consumeBody(this, (bytes) => {
- return bytes.buffer;
- }, instance, true);
+ return new Uint8Array(bytes).buffer;
+ }, instance);
},
text() {
- return consumeBody(this, utf8DecodeBytes, instance, false);
+ return consumeBody(this, utf8DecodeBytes, instance);
},
json() {
- return consumeBody(this, parseJSONFromBytes, instance, false);
+ return consumeBody(this, parseJSONFromBytes, instance);
},
formData() {
return consumeBody(this, (value) => {
@@ -23875,12 +23870,12 @@ Content-Type: ${value.type || "application/octet-stream"}\r
throw new TypeError(
'Content-Type was not one of "multipart/form-data" or "application/x-www-form-urlencoded".'
);
- }, instance, false);
+ }, instance);
},
bytes() {
return consumeBody(this, (bytes) => {
- return new Uint8Array(bytes.buffer, 0, bytes.byteLength);
- }, instance, true);
+ return new Uint8Array(bytes);
+ }, instance);
}
};
return methods;
@@ -23888,7 +23883,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r
function mixinBody(prototype) {
Object.assign(prototype.prototype, bodyMixinMethods(prototype));
}
- async function consumeBody(object, convertBytesToJSValue, instance, shouldClone) {
+ async function consumeBody(object, convertBytesToJSValue, instance) {
webidl.brandCheck(object, instance);
if (bodyUnusable(object[kState].body)) {
throw new TypeError("Body is unusable: Body has already been read");
@@ -23907,7 +23902,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r
successSteps(Buffer.allocUnsafe(0));
return promise.promise;
}
- await fullyReadBody(object[kState].body, successSteps, errorSteps, shouldClone);
+ await fullyReadBody(object[kState].body, successSteps, errorSteps);
return promise.promise;
}
function bodyUnusable(body) {
@@ -24660,25 +24655,25 @@ upgrade: ${upgrade}\r
channels.sendHeaders.publish({ request: request2, headers: header, socket });
}
if (!body || bodyLength === 0) {
- writeBuffer({ abort, body: null, client, request: request2, socket, contentLength, header, expectsPayload });
+ writeBuffer(abort, null, client, request2, socket, contentLength, header, expectsPayload);
} else if (util.isBuffer(body)) {
- writeBuffer({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload });
+ writeBuffer(abort, body, client, request2, socket, contentLength, header, expectsPayload);
} else if (util.isBlobLike(body)) {
if (typeof body.stream === "function") {
- writeIterable({ abort, body: body.stream(), client, request: request2, socket, contentLength, header, expectsPayload });
+ writeIterable(abort, body.stream(), client, request2, socket, contentLength, header, expectsPayload);
} else {
- writeBlob({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload });
+ writeBlob(abort, body, client, request2, socket, contentLength, header, expectsPayload);
}
} else if (util.isStream(body)) {
- writeStream({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload });
+ writeStream(abort, body, client, request2, socket, contentLength, header, expectsPayload);
} else if (util.isIterable(body)) {
- writeIterable({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload });
+ writeIterable(abort, body, client, request2, socket, contentLength, header, expectsPayload);
} else {
assert(false);
}
return true;
}
- function writeStream({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload }) {
+ function writeStream(abort, body, client, request2, socket, contentLength, header, expectsPayload) {
assert(contentLength !== 0 || client[kRunning] === 0, "stream body cannot be pipelined");
let finished = false;
const writer = new AsyncWriter({ abort, socket, request: request2, contentLength, client, expectsPayload, header });
@@ -24747,7 +24742,7 @@ upgrade: ${upgrade}\r
setImmediate(onClose);
}
}
- function writeBuffer({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload }) {
+ function writeBuffer(abort, body, client, request2, socket, contentLength, header, expectsPayload) {
try {
if (!body) {
if (contentLength === 0) {
@@ -24778,7 +24773,7 @@ upgrade: ${upgrade}\r
abort(err);
}
}
- async function writeBlob({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload }) {
+ async function writeBlob(abort, body, client, request2, socket, contentLength, header, expectsPayload) {
assert(contentLength === body.size, "blob body must have content length");
try {
if (contentLength != null && contentLength !== body.size) {
@@ -24801,7 +24796,7 @@ upgrade: ${upgrade}\r
abort(err);
}
}
- async function writeIterable({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload }) {
+ async function writeIterable(abort, body, client, request2, socket, contentLength, header, expectsPayload) {
assert(contentLength !== 0 || client[kRunning] === 0, "iterator body cannot be pipelined");
let callback = null;
function onDrain() {
@@ -25264,81 +25259,79 @@ var require_client_h2 = __commonJS({
return true;
function writeBodyH2() {
if (!body || contentLength === 0) {
- writeBuffer({
+ writeBuffer(
abort,
+ stream,
+ null,
client,
- request: request2,
+ request2,
+ client[kSocket],
contentLength,
- expectsPayload,
- h2stream: stream,
- body: null,
- socket: client[kSocket]
- });
+ expectsPayload
+ );
} else if (util.isBuffer(body)) {
- writeBuffer({
+ writeBuffer(
abort,
+ stream,
+ body,
client,
- request: request2,
+ request2,
+ client[kSocket],
contentLength,
- body,
- expectsPayload,
- h2stream: stream,
- socket: client[kSocket]
- });
+ expectsPayload
+ );
} else if (util.isBlobLike(body)) {
if (typeof body.stream === "function") {
- writeIterable({
+ writeIterable(
abort,
+ stream,
+ body.stream(),
client,
- request: request2,
+ request2,
+ client[kSocket],
contentLength,
- expectsPayload,
- h2stream: stream,
- body: body.stream(),
- socket: client[kSocket]
- });
+ expectsPayload
+ );
} else {
- writeBlob({
+ writeBlob(
abort,
+ stream,
body,
client,
- request: request2,
+ request2,
+ client[kSocket],
contentLength,
- expectsPayload,
- h2stream: stream,
- socket: client[kSocket]
- });
+ expectsPayload
+ );
}
} else if (util.isStream(body)) {
- writeStream({
+ writeStream(
abort,
+ client[kSocket],
+ expectsPayload,
+ stream,
body,
client,
- request: request2,
- contentLength,
- expectsPayload,
- socket: client[kSocket],
- h2stream: stream,
- header: ""
- });
+ request2,
+ contentLength
+ );
} else if (util.isIterable(body)) {
- writeIterable({
+ writeIterable(
abort,
+ stream,
body,
client,
- request: request2,
+ request2,
+ client[kSocket],
contentLength,
- expectsPayload,
- header: "",
- h2stream: stream,
- socket: client[kSocket]
- });
+ expectsPayload
+ );
} else {
assert(false);
}
}
}
- function writeBuffer({ abort, h2stream, body, client, request: request2, socket, contentLength, expectsPayload }) {
+ function writeBuffer(abort, h2stream, body, client, request2, socket, contentLength, expectsPayload) {
try {
if (body != null && util.isBuffer(body)) {
assert(contentLength === body.byteLength, "buffer body must have content length");
@@ -25357,7 +25350,7 @@ var require_client_h2 = __commonJS({
abort(error);
}
}
- function writeStream({ abort, socket, expectsPayload, h2stream, body, client, request: request2, contentLength }) {
+ function writeStream(abort, socket, expectsPayload, h2stream, body, client, request2, contentLength) {
assert(contentLength !== 0 || client[kRunning] === 0, "stream body cannot be pipelined");
const pipe = pipeline(
body,
@@ -25381,7 +25374,7 @@ var require_client_h2 = __commonJS({
request2.onBodySent(chunk);
}
}
- async function writeBlob({ abort, h2stream, body, client, request: request2, socket, contentLength, expectsPayload }) {
+ async function writeBlob(abort, h2stream, body, client, request2, socket, contentLength, expectsPayload) {
assert(contentLength === body.size, "blob body must have content length");
try {
if (contentLength != null && contentLength !== body.size) {
@@ -25402,7 +25395,7 @@ var require_client_h2 = __commonJS({
abort(err);
}
}
- async function writeIterable({ abort, h2stream, body, client, request: request2, socket, contentLength, expectsPayload }) {
+ async function writeIterable(abort, h2stream, body, client, request2, socket, contentLength, expectsPayload) {
assert(contentLength !== 0 || client[kRunning] === 0, "iterator body cannot be pipelined");
let callback = null;
function onDrain() {
@@ -27147,7 +27140,7 @@ var require_retry_handler = __commonJS({
this.abort(
new RequestRetryError("Content-Range mismatch", statusCode, {
headers,
- count: this.retryCount
+ data: { count: this.retryCount }
})
);
return false;
@@ -27156,7 +27149,7 @@ var require_retry_handler = __commonJS({
this.abort(
new RequestRetryError("ETag mismatch", statusCode, {
headers,
- count: this.retryCount
+ data: { count: this.retryCount }
})
);
return false;
@@ -30380,9 +30373,7 @@ var require_request4 = __commonJS({
var {
isValidHTTPToken,
sameOrigin,
- normalizeMethod,
- environmentSettingsObject,
- normalizeMethodRecord
+ environmentSettingsObject
} = require_util9();
var {
forbiddenMethodsSet,
@@ -30394,7 +30385,7 @@ var require_request4 = __commonJS({
requestCache,
requestDuplex
} = require_constants8();
- var { kEnumerableProperty } = util;
+ var { kEnumerableProperty, normalizedMethodRecordsBase, normalizedMethodRecords } = util;
var { kHeaders, kSignal, kState, kDispatcher } = require_symbols7();
var { webidl } = require_webidl2();
var { URLSerializer } = require_data_url();
@@ -30591,17 +30582,18 @@ var require_request4 = __commonJS({
}
if (init.method !== void 0) {
let method = init.method;
- const mayBeNormalized = normalizeMethodRecord[method];
+ const mayBeNormalized = normalizedMethodRecords[method];
if (mayBeNormalized !== void 0) {
request2.method = mayBeNormalized;
} else {
if (!isValidHTTPToken(method)) {
throw new TypeError(`'${method}' is not a valid HTTP method.`);
}
- if (forbiddenMethodsSet.has(method.toUpperCase())) {
+ const upperCase = method.toUpperCase();
+ if (forbiddenMethodsSet.has(upperCase)) {
throw new TypeError(`'${method}' HTTP method is unsupported.`);
}
- method = normalizeMethod(method);
+ method = normalizedMethodRecordsBase[upperCase] ?? method;
request2.method = method;
}
if (!patchMethodWarning && request2.method === "patch") {
@@ -35317,7 +35309,6 @@ var require_websocket2 = __commonJS({
var { types } = require("node:util");
var { ErrorEvent, CloseEvent } = require_events2();
var { SendQueue } = require_sender();
- var experimentalWarned = false;
var WebSocket = class _WebSocket extends EventTarget {
#events = {
open: null,
@@ -35338,12 +35329,6 @@ var require_websocket2 = __commonJS({
super();
const prefix = "WebSocket constructor";
webidl.argumentLengthCheck(arguments, 1, prefix);
- if (!experimentalWarned) {
- experimentalWarned = true;
- process.emitWarning("WebSockets are experimental, expect them to change at any time.", {
- code: "UNDICI-WS"
- });
- }
const options = webidl.converters["DOMString or sequence or WebSocketInit"](protocols, prefix, "options");
url = webidl.converters.USVString(url, prefix, "url");
protocols = options.protocols;
diff --git a/package.json b/package.json
index 1b5aa20..7839ff4 100644
--- a/package.json
+++ b/package.json
@@ -2,7 +2,7 @@
"name": "create-github-app-token",
"private": true,
"type": "module",
- "version": "1.10.2",
+ "version": "1.10.3",
"description": "GitHub Action for creating a GitHub App Installation Access Token",
"scripts": {
"build": "esbuild main.js post.js --bundle --outdir=dist --out-extension:.js=.cjs --platform=node --target=node20.0.0",
From 040c2598aacc31cdff32d3527b574e70a22707f8 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 1 Jul 2024 12:02:56 -0700
Subject: [PATCH 07/10] build(deps-dev): bump the development-dependencies
group with 4 updates (#150)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Bumps the development-dependencies group with 4 updates:
[c8](https://github.com/bcoe/c8),
[esbuild](https://github.com/evanw/esbuild),
[execa](https://github.com/sindresorhus/execa) and
[yaml](https://github.com/eemeli/yaml).
Updates `c8` from 9.1.0 to 10.1.2
Release notes
Sourced from c8's
releases.
v10.1.2
10.1.2
(2024-06-13)
Bug Fixes
- deps: make monocart-coverage-reports an optional
with meta defined (3b91fda)
v10.1.1
10.1.1
(2024-06-11)
Bug Fixes
- stop installing monocart-coverage-reports (#535) (13979a7)
v10.1.0
10.1.0
(2024-06-11)
Features
v10.0.0
10.0.0
(2024-06-10)
⚠ BREAKING CHANGES
- deps: Node 18 is now the minimum supported Node.js
version
Bug Fixes
- deps: update test-exclude with new glob / minimatch
(#531) (e33cf30)
Changelog
Sourced from c8's
changelog.
10.1.2
(2024-06-13)
Bug Fixes
- deps: make monocart-coverage-reports an optional
with meta defined (3b91fda)
10.1.1
(2024-06-11)
Bug Fixes
- stop installing monocart-coverage-reports (#535) (13979a7)
10.1.0
(2024-06-11)
Features
10.0.0
(2024-06-10)
⚠ BREAKING CHANGES
- deps: Node 18 is now the minimum supported Node.js
version
Bug Fixes
- deps: update test-exclude with new glob / minimatch
(#531) (e33cf30)
Commits
ff146b4
chore(main): release 10.1.2 (#538)
3b91fda
fix(deps): make monocart-coverage-reports an optional with meta
defined
e3560e1
chore(main): release 10.1.1 (#536)
13979a7
fix: stop installing monocart-coverage-reports (#535)
15ac690
chore(main): release 10.1.0 (#533)
96e869f
build(deps-dev): bump braces from 3.0.2 to 3.0.3 (#534)
2e5e297
feat: add experimental monocart reports (#521)
dc38051
chore(main): release 10.0.0 (#532)
e33cf30
fix(deps)!: update test-exclude with new glob / minimatch (#531)
1eeeaeb
doc(CONTRIBUTING): remove dead link, update broken link (#526)
- Additional commits viewable in compare
view
Updates `esbuild` from 0.21.4 to 0.22.0
Release notes
Sourced from esbuild's
releases.
v0.22.0
This release deliberately contains backwards-incompatible
changes. To avoid automatically picking up releases like this,
you should either be pinning the exact version of esbuild
in your package.json
file (recommended) or be using a
version range syntax that only accepts patch upgrades such as
^0.21.0
or ~0.21.0
. See npm's documentation
about semver for
more information.
-
Omit packages from bundles by default when targeting node (#1874,
#2830,
#2846,
#2915,
#3145,
#3294,
#3323,
#3582,
#3809,
#3815)
This breaking change is an experiment. People are commonly confused
when using esbuild to bundle code for node (i.e. for
--platform=node
) because some packages may not be intended
for bundlers, and may use node-specific features that don't work with a
bundler. Even though esbuild's "getting started" instructions
say to use --packages=external
to work around this problem,
many people don't read the documentation and don't do this, and are then
confused when it doesn't work. So arguably this is a bad default
behavior for esbuild to have if people keep tripping over this.
With this release, esbuild will now omit packages from the bundle by
default when the platform is node
(i.e. the previous
behavior of --packages=external
is now the default in this
case). Note that your dependencies must now be present on the file
system when your bundle is run. If you don't want this behavior,
you can do --packages=bundle
to allow packages to be
included in the bundle (i.e. the previous default behavior). Note that
--packages=bundle
doesn't mean all packages are bundled,
just that packages are allowed to be bundled. You can still exclude
individual packages from the bundle using --external:
even
when --packages=bundle
is present.
The --packages=
setting considers all import paths that
"look like" package imports in the original source code to be
package imports. Specifically import paths that don't start with a path
segment of /
or .
or ..
are
considered to be package imports. The only two exceptions to this rule
are subpath
imports (which start with a #
character) and TypeScript
path remappings via paths
and/or baseUrl
in
tsconfig.json
(which are applied first).
-
Drop support for older platforms (#3802)
This release drops support for the following operating systems:
- Windows 7
- Windows 8
- Windows Server 2008
- Windows Server 2012
This is because the Go programming language dropped support for these
operating system versions in Go 1.21, and this release
updates esbuild from Go 1.20 to Go 1.22.
Note that this only affects the binary esbuild executables that are
published to the esbuild
npm package. It's still possible
to compile esbuild's source code for these older operating systems. If
you need to, you can compile esbuild for yourself using an older version
of the Go compiler (before Go version 1.21). That might look something
like this:
git clone https://github.com/evanw/esbuild.git
cd esbuild
go build ./cmd/esbuild
./esbuild.exe --version
In addition, this release increases the minimum required node version
for esbuild's JavaScript API from node 12 to node 18. Node 18 is the
oldest version of node that is still being supported (see node's release
schedule for more information). This increase is because of an
incompatibility between the JavaScript that the Go compiler generates
for the esbuild-wasm
package and versions of node before
node 17.4 (specifically the crypto.getRandomValues
function).
-
Update await using
behavior to match TypeScript
TypeScript 5.5 subtly changes the way await using
behaves. This release updates esbuild to match these changes in
TypeScript. You can read more about these changes in microsoft/TypeScript#58624.
-
Allow es2024
as a target environment
The ECMAScript 2024 specification was just approved, so it has been
added to esbuild as a possible compilation target. You can read more
about the features that it adds here: https://2ality.com/2024/06/ecmascript-2024.html.
The only addition that's relevant for esbuild is the regular expression
/v
flag. With --target=es2024
, regular
expressions that use the /v
flag will now be passed through
untransformed instead of being transformed into a call to new
RegExp
.
-
Publish binaries for OpenBSD on 64-bit ARM (#3665,
#3674)
With this release, you should now be able to install the
esbuild
npm package in OpenBSD on 64-bit ARM, such as on an
Apple device with an M1 chip.
This was contributed by @ikmckenz
.
-
Publish binaries for WASI (WebAssembly System Interface) preview 1
(#3300,
#3779)
The upcoming WASI (WebAssembly System Interface) standard is going to
be a way to run WebAssembly outside of a JavaScript host environment. In
this scenario you only need a .wasm
file without any
supporting JavaScript code. Instead of JavaScript providing the APIs for
the host environment, the WASI standard specifies a "system
interface" that WebAssembly code can access directly (e.g. for file
system access).
... (truncated)
Changelog
Sourced from esbuild's
changelog.
0.22.0
This release deliberately contains backwards-incompatible
changes. To avoid automatically picking up releases like this,
you should either be pinning the exact version of esbuild
in your package.json
file (recommended) or be using a
version range syntax that only accepts patch upgrades such as
^0.21.0
or ~0.21.0
. See npm's documentation
about semver for
more information.
-
Omit packages from bundles by default when targeting node (#1874,
#2830,
#2846,
#2915,
#3145,
#3294,
#3323,
#3582,
#3809,
#3815)
This breaking change is an experiment. People are commonly confused
when using esbuild to bundle code for node (i.e. for
--platform=node
) because some packages may not be intended
for bundlers, and may use node-specific features that don't work with a
bundler. Even though esbuild's "getting started" instructions
say to use --packages=external
to work around this problem,
many people don't read the documentation and don't do this, and are then
confused when it doesn't work. So arguably this is a bad default
behavior for esbuild to have if people keep tripping over this.
With this release, esbuild will now omit packages from the bundle by
default when the platform is node
(i.e. the previous
behavior of --packages=external
is now the default in this
case). Note that your dependencies must now be present on the file
system when your bundle is run. If you don't want this behavior,
you can do --packages=bundle
to allow packages to be
included in the bundle (i.e. the previous default behavior). Note that
--packages=bundle
doesn't mean all packages are bundled,
just that packages are allowed to be bundled. You can still exclude
individual packages from the bundle using --external:
even
when --packages=bundle
is present.
The --packages=
setting considers all import paths that
"look like" package imports in the original source code to be
package imports. Specifically import paths that don't start with a path
segment of /
or .
or ..
are
considered to be package imports. The only two exceptions to this rule
are subpath
imports (which start with a #
character) and TypeScript
path remappings via paths
and/or baseUrl
in
tsconfig.json
(which are applied first).
-
Drop support for older platforms (#3802)
This release drops support for the following operating systems:
- Windows 7
- Windows 8
- Windows Server 2008
- Windows Server 2012
This is because the Go programming language dropped support for these
operating system versions in Go 1.21, and this release
updates esbuild from Go 1.20 to Go 1.22.
Note that this only affects the binary esbuild executables that are
published to the esbuild
npm package. It's still possible
to compile esbuild's source code for these older operating systems. If
you need to, you can compile esbuild for yourself using an older version
of the Go compiler (before Go version 1.21). That might look something
like this:
git clone https://github.com/evanw/esbuild.git
cd esbuild
go build ./cmd/esbuild
./esbuild.exe --version
In addition, this release increases the minimum required node version
for esbuild's JavaScript API from node 12 to node 18. Node 18 is the
oldest version of node that is still being supported (see node's release
schedule for more information). This increase is because of an
incompatibility between the JavaScript that the Go compiler generates
for the esbuild-wasm
package and versions of node before
node 17.4 (specifically the crypto.getRandomValues
function).
-
Update await using
behavior to match TypeScript
TypeScript 5.5 subtly changes the way await using
behaves. This release updates esbuild to match these changes in
TypeScript. You can read more about these changes in microsoft/TypeScript#58624.
-
Allow es2024
as a target environment
The ECMAScript 2024 specification was just approved, so it has been
added to esbuild as a possible compilation target. You can read more
about the features that it adds here: https://2ality.com/2024/06/ecmascript-2024.html.
The only addition that's relevant for esbuild is the regular expression
/v
flag. With --target=es2024
, regular
expressions that use the /v
flag will now be passed through
untransformed instead of being transformed into a call to new
RegExp
.
-
Publish binaries for OpenBSD on 64-bit ARM (#3665,
#3674)
With this release, you should now be able to install the
esbuild
npm package in OpenBSD on 64-bit ARM, such as on an
Apple device with an M1 chip.
This was contributed by @ikmckenz
.
-
Publish binaries for WASI (WebAssembly System Interface) preview 1
(#3300,
#3779)
... (truncated)
Commits
Updates `execa` from 9.1.0 to 9.3.0
Release notes
Sourced from execa's
releases.
v9.3.0
Features
v9.2.0
This release includes a new
set of methods to exchange messages between the current process and
a Node.js subprocess, also known as "IPC". This allows passing
and returning
almost any message type to/from a Node.js subprocess. Also, debugging
IPC is now much easier.
Moreover, a new gracefulCancel
option has also been added to terminate
a subprocess gracefully.
For a deeper dive-in, please check and share the release
post!
Thanks @iiroj
for your contribution, @SimonSiefke
and
@adymorz
for
reporting the bugs fixed in this release, and @karlhorky
for
improving the documentation!
Deprecations
- Passing
'ipc'
to the stdio
option has been deprecated. It will be removed in the next major
release. Instead, the ipc:
true
option should be used. (#1056)
- await execa('npm', ['run', 'build'], {stdio:
['pipe', 'pipe', 'pipe', 'ipc']});
+ await execa('npm', ['run', 'build'], {ipc: true});
- import {execaCommand} from 'execa';
+ import {execa} from 'execa';
- await execaCommand('npm run build');
- await execa
npm run build
;
const taskName = 'build';
- await execaCommand(
npm run ${taskName}
);
- await execa
npm run ${taskName}
;
const commandArguments = ['run', 'task with space'];
await execanpm ${commandArguments}
;
If the file and/or multiple arguments are supplied as a single
string, parseCommandString(command)
can split that string into an array. More
info. (#1054)
- import {execaCommand} from 'execa';
+ import {execa, parseCommandString} from 'execa';
const commandString = 'npm run task';
- await execaCommand(commandString);
- const commandArray = parseCommandString(commandString); // ['npm',
'run', 'task']
- await execa
${commandArray}
;
</tr></table>
... (truncated)
Commits
Updates `yaml` from 2.4.2 to 2.4.5
Release notes
Sourced from yaml's
releases.
v2.4.5
- Improve tab handling (#553,
yaml-test-suite tests DK95
& Y79Y)
v2.4.4
With special thanks to @RedCMD
for finding
and reporting all of the following:
- Allow comment after top-level block scalar with explicit indent
indicator (#547)
- Allow tab as indent for line comments before nodes (#548)
- Do not allow tab before block collection (#549)
- In flow collections, allow
[]{}
immediately after
:
with plain key (#550)
- Require indentation for
?
explicit-key contents (#551)
- Require indentation from block scalar header & flow collections
in mapping values (#553)
v2.4.3
- Improve error when parsing a non-string value (#459)
- Do not parse
-.NaN
or +.nan
as NaN (#546)
- Support
#
within %TAG
prefixes with
trailing #comments
- Check for non-node complex keys when stringifying with simpleKeys
(#541)
Commits
1b8fde6
2.4.5
f3c7d03
test: Obey yaml-test-suite skip instructions
4e66d72
fix: Improve tab handling (tests DK95 & Y79Y, #553)
d06f386
2.4.4
39053e8
chore: Satisfy strict TS
8baee44
test: Use source files for yaml-test-suite tests
280a861
fix: Allow comment after top-level block scalar with explicit indent
indicato...
767bc47
fix: Require indentation from block scalar header & flow collections
in mappi...
5096f83
fix: Require indentation for ? explicit-key contents (fixes #551)
22f2c6f
fix: In flow collections, allow []{} immediately after : with plain key
(fixe...
- Additional commits viewable in compare
view
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore major version` will close this
group update PR and stop Dependabot creating any more for the specific
dependency's major version (unless you unignore this specific
dependency's major version or upgrade to it yourself)
- `@dependabot ignore minor version` will close this
group update PR and stop Dependabot creating any more for the specific
dependency's minor version (unless you unignore this specific
dependency's minor version or upgrade to it yourself)
- `@dependabot ignore ` will close this group update PR
and stop Dependabot creating any more for the specific dependency
(unless you unignore this specific dependency or upgrade to it yourself)
- `@dependabot unignore ` will remove all of the ignore
conditions of the specified dependency
- `@dependabot unignore ` will
remove the ignore condition of the specified dependency and ignore
conditions
---------
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Gregor Martynus <39992+gr2m@users.noreply.github.com>
---
dist/main.cjs | 229 +++++++++--------
dist/post.cjs | 229 +++++++++--------
package-lock.json | 632 +++++++++++++++++++++++++++++++++++-----------
package.json | 10 +-
4 files changed, 740 insertions(+), 360 deletions(-)
diff --git a/dist/main.cjs b/dist/main.cjs
index a98f495..474eaef 100644
--- a/dist/main.cjs
+++ b/dist/main.cjs
@@ -20172,27 +20172,6 @@ var require_util8 = __commonJS({
}
var kEnumerableProperty = /* @__PURE__ */ Object.create(null);
kEnumerableProperty.enumerable = true;
- var normalizedMethodRecordsBase = {
- delete: "DELETE",
- DELETE: "DELETE",
- get: "GET",
- GET: "GET",
- head: "HEAD",
- HEAD: "HEAD",
- options: "OPTIONS",
- OPTIONS: "OPTIONS",
- post: "POST",
- POST: "POST",
- put: "PUT",
- PUT: "PUT"
- };
- var normalizedMethodRecords = {
- ...normalizedMethodRecordsBase,
- patch: "patch",
- PATCH: "PATCH"
- };
- Object.setPrototypeOf(normalizedMethodRecordsBase, null);
- Object.setPrototypeOf(normalizedMethodRecords, null);
module2.exports = {
kEnumerableProperty,
nop,
@@ -20231,8 +20210,6 @@ var require_util8 = __commonJS({
isValidHeaderValue,
isTokenCharCode,
parseRangeHeader,
- normalizedMethodRecordsBase,
- normalizedMethodRecords,
isValidPort,
isHttpOrHttpsPrefixed,
nodeMajor,
@@ -20448,8 +20425,7 @@ var require_request3 = __commonJS({
isBlobLike,
buildURL,
validateHandler,
- getServerName,
- normalizedMethodRecords
+ getServerName
} = require_util8();
var { channels } = require_diagnostics();
var { headerNameLowerCasedRecord } = require_constants6();
@@ -20476,12 +20452,12 @@ var require_request3 = __commonJS({
throw new InvalidArgumentError("path must be a string");
} else if (path[0] !== "/" && !(path.startsWith("http://") || path.startsWith("https://")) && method !== "CONNECT") {
throw new InvalidArgumentError("path must be an absolute URL or start with a slash");
- } else if (invalidPathRegex.test(path)) {
+ } else if (invalidPathRegex.exec(path) !== null) {
throw new InvalidArgumentError("invalid request path");
}
if (typeof method !== "string") {
throw new InvalidArgumentError("method must be a string");
- } else if (normalizedMethodRecords[method] === void 0 && !isValidHTTPToken(method)) {
+ } else if (!isValidHTTPToken(method)) {
throw new InvalidArgumentError("invalid request method");
}
if (upgrade && typeof upgrade !== "string") {
@@ -21026,7 +21002,7 @@ var require_connect2 = __commonJS({
}
};
}
- function buildConnector({ allowH2, maxCachedSessions, socketPath, timeout, session: customSession, ...opts }) {
+ function buildConnector({ allowH2, maxCachedSessions, socketPath, timeout, ...opts }) {
if (maxCachedSessions != null && (!Number.isInteger(maxCachedSessions) || maxCachedSessions < 0)) {
throw new InvalidArgumentError("maxCachedSessions must be a positive integer or zero");
}
@@ -21042,7 +21018,7 @@ var require_connect2 = __commonJS({
}
servername = servername || options.servername || util.getServerName(host) || null;
const sessionKey = servername || hostname;
- const session = customSession || sessionCache.get(sessionKey) || null;
+ const session = sessionCache.get(sessionKey) || null;
assert(sessionKey);
socket = tls.connect({
highWaterMark: 16384,
@@ -22560,7 +22536,7 @@ var require_util9 = __commonJS({
var { getGlobalOrigin } = require_global3();
var { collectASequenceOfCodePoints, collectAnHTTPQuotedString, removeChars, parseMIMEType } = require_data_url();
var { performance: performance2 } = require("node:perf_hooks");
- var { isBlobLike, ReadableStreamFrom, isValidHTTPToken, normalizedMethodRecordsBase } = require_util8();
+ var { isBlobLike, ReadableStreamFrom, isValidHTTPToken } = require_util8();
var assert = require("node:assert");
var { isUint8Array } = require("node:util/types");
var { webidl } = require_webidl2();
@@ -22667,7 +22643,7 @@ var require_util9 = __commonJS({
}
function appendRequestOriginHeader(request2) {
let serializedOrigin = request2.origin;
- if (serializedOrigin === "client" || serializedOrigin === void 0) {
+ if (serializedOrigin === "client") {
return;
}
if (request2.responseTainting === "cors" || request2.mode === "websocket") {
@@ -22948,8 +22924,29 @@ var require_util9 = __commonJS({
function isCancelled(fetchParams) {
return fetchParams.controller.state === "aborted" || fetchParams.controller.state === "terminated";
}
+ var normalizeMethodRecordBase = {
+ delete: "DELETE",
+ DELETE: "DELETE",
+ get: "GET",
+ GET: "GET",
+ head: "HEAD",
+ HEAD: "HEAD",
+ options: "OPTIONS",
+ OPTIONS: "OPTIONS",
+ post: "POST",
+ POST: "POST",
+ put: "PUT",
+ PUT: "PUT"
+ };
+ var normalizeMethodRecord = {
+ ...normalizeMethodRecordBase,
+ patch: "patch",
+ PATCH: "PATCH"
+ };
+ Object.setPrototypeOf(normalizeMethodRecordBase, null);
+ Object.setPrototypeOf(normalizeMethodRecord, null);
function normalizeMethod(method) {
- return normalizedMethodRecordsBase[method.toLowerCase()] ?? method;
+ return normalizeMethodRecordBase[method.toLowerCase()] ?? method;
}
function serializeJavascriptValueToJSONString(value) {
const result = JSON.stringify(value);
@@ -23086,7 +23083,7 @@ var require_util9 = __commonJS({
}
});
}
- async function fullyReadBody(body, processBody, processBodyError) {
+ async function fullyReadBody(body, processBody, processBodyError, shouldClone) {
const successSteps = processBody;
const errorSteps = processBodyError;
let reader;
@@ -23097,7 +23094,7 @@ var require_util9 = __commonJS({
return;
}
try {
- successSteps(await readAllBytes(reader));
+ successSteps(await readAllBytes(reader, shouldClone));
} catch (e) {
errorSteps(e);
}
@@ -23120,12 +23117,19 @@ var require_util9 = __commonJS({
assert(!invalidIsomorphicEncodeValueRegex.test(input));
return input;
}
- async function readAllBytes(reader) {
+ async function readAllBytes(reader, shouldClone) {
const bytes = [];
let byteLength = 0;
while (true) {
const { done, value: chunk } = await reader.read();
if (done) {
+ if (bytes.length === 1) {
+ const { buffer, byteOffset, byteLength: byteLength2 } = bytes[0];
+ if (shouldClone === false) {
+ return Buffer.from(buffer, byteOffset, byteLength2);
+ }
+ return Buffer.from(buffer.slice(byteOffset, byteOffset + byteLength2), 0, byteLength2);
+ }
return Buffer.concat(bytes, byteLength);
}
if (!isUint8Array(chunk)) {
@@ -23388,6 +23392,7 @@ var require_util9 = __commonJS({
urlHasHttpsScheme,
urlIsHttpHttpsScheme,
readAllBytes,
+ normalizeMethodRecord,
simpleRangeHeaderValue,
buildContentRange,
parseMetadata,
@@ -24059,18 +24064,18 @@ Content-Type: ${value.type || "application/octet-stream"}\r
mimeType = serializeAMimeType(mimeType);
}
return new Blob2([bytes], { type: mimeType });
- }, instance);
+ }, instance, false);
},
arrayBuffer() {
return consumeBody(this, (bytes) => {
- return new Uint8Array(bytes).buffer;
- }, instance);
+ return bytes.buffer;
+ }, instance, true);
},
text() {
- return consumeBody(this, utf8DecodeBytes, instance);
+ return consumeBody(this, utf8DecodeBytes, instance, false);
},
json() {
- return consumeBody(this, parseJSONFromBytes, instance);
+ return consumeBody(this, parseJSONFromBytes, instance, false);
},
formData() {
return consumeBody(this, (value) => {
@@ -24099,12 +24104,12 @@ Content-Type: ${value.type || "application/octet-stream"}\r
throw new TypeError(
'Content-Type was not one of "multipart/form-data" or "application/x-www-form-urlencoded".'
);
- }, instance);
+ }, instance, false);
},
bytes() {
return consumeBody(this, (bytes) => {
- return new Uint8Array(bytes);
- }, instance);
+ return new Uint8Array(bytes.buffer, 0, bytes.byteLength);
+ }, instance, true);
}
};
return methods;
@@ -24112,7 +24117,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r
function mixinBody(prototype) {
Object.assign(prototype.prototype, bodyMixinMethods(prototype));
}
- async function consumeBody(object, convertBytesToJSValue, instance) {
+ async function consumeBody(object, convertBytesToJSValue, instance, shouldClone) {
webidl.brandCheck(object, instance);
if (bodyUnusable(object[kState].body)) {
throw new TypeError("Body is unusable: Body has already been read");
@@ -24131,7 +24136,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r
successSteps(Buffer.allocUnsafe(0));
return promise.promise;
}
- await fullyReadBody(object[kState].body, successSteps, errorSteps);
+ await fullyReadBody(object[kState].body, successSteps, errorSteps, shouldClone);
return promise.promise;
}
function bodyUnusable(body) {
@@ -24884,25 +24889,25 @@ upgrade: ${upgrade}\r
channels.sendHeaders.publish({ request: request2, headers: header, socket });
}
if (!body || bodyLength === 0) {
- writeBuffer(abort, null, client, request2, socket, contentLength, header, expectsPayload);
+ writeBuffer({ abort, body: null, client, request: request2, socket, contentLength, header, expectsPayload });
} else if (util.isBuffer(body)) {
- writeBuffer(abort, body, client, request2, socket, contentLength, header, expectsPayload);
+ writeBuffer({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload });
} else if (util.isBlobLike(body)) {
if (typeof body.stream === "function") {
- writeIterable(abort, body.stream(), client, request2, socket, contentLength, header, expectsPayload);
+ writeIterable({ abort, body: body.stream(), client, request: request2, socket, contentLength, header, expectsPayload });
} else {
- writeBlob(abort, body, client, request2, socket, contentLength, header, expectsPayload);
+ writeBlob({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload });
}
} else if (util.isStream(body)) {
- writeStream(abort, body, client, request2, socket, contentLength, header, expectsPayload);
+ writeStream({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload });
} else if (util.isIterable(body)) {
- writeIterable(abort, body, client, request2, socket, contentLength, header, expectsPayload);
+ writeIterable({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload });
} else {
assert(false);
}
return true;
}
- function writeStream(abort, body, client, request2, socket, contentLength, header, expectsPayload) {
+ function writeStream({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload }) {
assert(contentLength !== 0 || client[kRunning] === 0, "stream body cannot be pipelined");
let finished = false;
const writer = new AsyncWriter({ abort, socket, request: request2, contentLength, client, expectsPayload, header });
@@ -24971,7 +24976,7 @@ upgrade: ${upgrade}\r
setImmediate(onClose);
}
}
- function writeBuffer(abort, body, client, request2, socket, contentLength, header, expectsPayload) {
+ function writeBuffer({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload }) {
try {
if (!body) {
if (contentLength === 0) {
@@ -25002,7 +25007,7 @@ upgrade: ${upgrade}\r
abort(err);
}
}
- async function writeBlob(abort, body, client, request2, socket, contentLength, header, expectsPayload) {
+ async function writeBlob({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload }) {
assert(contentLength === body.size, "blob body must have content length");
try {
if (contentLength != null && contentLength !== body.size) {
@@ -25025,7 +25030,7 @@ upgrade: ${upgrade}\r
abort(err);
}
}
- async function writeIterable(abort, body, client, request2, socket, contentLength, header, expectsPayload) {
+ async function writeIterable({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload }) {
assert(contentLength !== 0 || client[kRunning] === 0, "iterator body cannot be pipelined");
let callback = null;
function onDrain() {
@@ -25488,79 +25493,81 @@ var require_client_h2 = __commonJS({
return true;
function writeBodyH2() {
if (!body || contentLength === 0) {
- writeBuffer(
+ writeBuffer({
abort,
- stream,
- null,
client,
- request2,
- client[kSocket],
+ request: request2,
contentLength,
- expectsPayload
- );
+ expectsPayload,
+ h2stream: stream,
+ body: null,
+ socket: client[kSocket]
+ });
} else if (util.isBuffer(body)) {
- writeBuffer(
+ writeBuffer({
abort,
- stream,
- body,
client,
- request2,
- client[kSocket],
+ request: request2,
contentLength,
- expectsPayload
- );
+ body,
+ expectsPayload,
+ h2stream: stream,
+ socket: client[kSocket]
+ });
} else if (util.isBlobLike(body)) {
if (typeof body.stream === "function") {
- writeIterable(
+ writeIterable({
abort,
- stream,
- body.stream(),
client,
- request2,
- client[kSocket],
+ request: request2,
contentLength,
- expectsPayload
- );
+ expectsPayload,
+ h2stream: stream,
+ body: body.stream(),
+ socket: client[kSocket]
+ });
} else {
- writeBlob(
+ writeBlob({
abort,
- stream,
body,
client,
- request2,
- client[kSocket],
+ request: request2,
contentLength,
- expectsPayload
- );
+ expectsPayload,
+ h2stream: stream,
+ socket: client[kSocket]
+ });
}
} else if (util.isStream(body)) {
- writeStream(
+ writeStream({
abort,
- client[kSocket],
- expectsPayload,
- stream,
body,
client,
- request2,
- contentLength
- );
+ request: request2,
+ contentLength,
+ expectsPayload,
+ socket: client[kSocket],
+ h2stream: stream,
+ header: ""
+ });
} else if (util.isIterable(body)) {
- writeIterable(
+ writeIterable({
abort,
- stream,
body,
client,
- request2,
- client[kSocket],
+ request: request2,
contentLength,
- expectsPayload
- );
+ expectsPayload,
+ header: "",
+ h2stream: stream,
+ socket: client[kSocket]
+ });
} else {
assert(false);
}
}
}
- function writeBuffer(abort, h2stream, body, client, request2, socket, contentLength, expectsPayload) {
+ function writeBuffer({ abort, h2stream, body, client, request: request2, socket, contentLength, expectsPayload }) {
try {
if (body != null && util.isBuffer(body)) {
assert(contentLength === body.byteLength, "buffer body must have content length");
@@ -25579,7 +25586,7 @@ var require_client_h2 = __commonJS({
abort(error);
}
}
- function writeStream(abort, socket, expectsPayload, h2stream, body, client, request2, contentLength) {
+ function writeStream({ abort, socket, expectsPayload, h2stream, body, client, request: request2, contentLength }) {
assert(contentLength !== 0 || client[kRunning] === 0, "stream body cannot be pipelined");
const pipe = pipeline(
body,
@@ -25603,7 +25610,7 @@ var require_client_h2 = __commonJS({
request2.onBodySent(chunk);
}
}
- async function writeBlob(abort, h2stream, body, client, request2, socket, contentLength, expectsPayload) {
+ async function writeBlob({ abort, h2stream, body, client, request: request2, socket, contentLength, expectsPayload }) {
assert(contentLength === body.size, "blob body must have content length");
try {
if (contentLength != null && contentLength !== body.size) {
@@ -25624,7 +25631,7 @@ var require_client_h2 = __commonJS({
abort(err);
}
}
- async function writeIterable(abort, h2stream, body, client, request2, socket, contentLength, expectsPayload) {
+ async function writeIterable({ abort, h2stream, body, client, request: request2, socket, contentLength, expectsPayload }) {
assert(contentLength !== 0 || client[kRunning] === 0, "iterator body cannot be pipelined");
let callback = null;
function onDrain() {
@@ -27369,7 +27376,7 @@ var require_retry_handler = __commonJS({
this.abort(
new RequestRetryError("Content-Range mismatch", statusCode, {
headers,
- data: { count: this.retryCount }
+ count: this.retryCount
})
);
return false;
@@ -27378,7 +27385,7 @@ var require_retry_handler = __commonJS({
this.abort(
new RequestRetryError("ETag mismatch", statusCode, {
headers,
- data: { count: this.retryCount }
+ count: this.retryCount
})
);
return false;
@@ -30602,7 +30609,9 @@ var require_request4 = __commonJS({
var {
isValidHTTPToken,
sameOrigin,
- environmentSettingsObject
+ normalizeMethod,
+ environmentSettingsObject,
+ normalizeMethodRecord
} = require_util9();
var {
forbiddenMethodsSet,
@@ -30614,7 +30623,7 @@ var require_request4 = __commonJS({
requestCache,
requestDuplex
} = require_constants8();
- var { kEnumerableProperty, normalizedMethodRecordsBase, normalizedMethodRecords } = util;
+ var { kEnumerableProperty } = util;
var { kHeaders, kSignal, kState, kDispatcher } = require_symbols7();
var { webidl } = require_webidl2();
var { URLSerializer } = require_data_url();
@@ -30811,18 +30820,17 @@ var require_request4 = __commonJS({
}
if (init.method !== void 0) {
let method = init.method;
- const mayBeNormalized = normalizedMethodRecords[method];
+ const mayBeNormalized = normalizeMethodRecord[method];
if (mayBeNormalized !== void 0) {
request2.method = mayBeNormalized;
} else {
if (!isValidHTTPToken(method)) {
throw new TypeError(`'${method}' is not a valid HTTP method.`);
}
- const upperCase = method.toUpperCase();
- if (forbiddenMethodsSet.has(upperCase)) {
+ if (forbiddenMethodsSet.has(method.toUpperCase())) {
throw new TypeError(`'${method}' HTTP method is unsupported.`);
}
- method = normalizedMethodRecordsBase[upperCase] ?? method;
+ method = normalizeMethod(method);
request2.method = method;
}
if (!patchMethodWarning && request2.method === "patch") {
@@ -35538,6 +35546,7 @@ var require_websocket2 = __commonJS({
var { types } = require("node:util");
var { ErrorEvent, CloseEvent } = require_events2();
var { SendQueue } = require_sender();
+ var experimentalWarned = false;
var WebSocket = class _WebSocket extends EventTarget {
#events = {
open: null,
@@ -35558,6 +35567,12 @@ var require_websocket2 = __commonJS({
super();
const prefix = "WebSocket constructor";
webidl.argumentLengthCheck(arguments, 1, prefix);
+ if (!experimentalWarned) {
+ experimentalWarned = true;
+ process.emitWarning("WebSockets are experimental, expect them to change at any time.", {
+ code: "UNDICI-WS"
+ });
+ }
const options = webidl.converters["DOMString or sequence or WebSocketInit"](protocols, prefix, "options");
url = webidl.converters.USVString(url, prefix, "url");
protocols = options.protocols;
diff --git a/dist/post.cjs b/dist/post.cjs
index 090c0f1..0307466 100644
--- a/dist/post.cjs
+++ b/dist/post.cjs
@@ -19943,27 +19943,6 @@ var require_util8 = __commonJS({
}
var kEnumerableProperty = /* @__PURE__ */ Object.create(null);
kEnumerableProperty.enumerable = true;
- var normalizedMethodRecordsBase = {
- delete: "DELETE",
- DELETE: "DELETE",
- get: "GET",
- GET: "GET",
- head: "HEAD",
- HEAD: "HEAD",
- options: "OPTIONS",
- OPTIONS: "OPTIONS",
- post: "POST",
- POST: "POST",
- put: "PUT",
- PUT: "PUT"
- };
- var normalizedMethodRecords = {
- ...normalizedMethodRecordsBase,
- patch: "patch",
- PATCH: "PATCH"
- };
- Object.setPrototypeOf(normalizedMethodRecordsBase, null);
- Object.setPrototypeOf(normalizedMethodRecords, null);
module2.exports = {
kEnumerableProperty,
nop,
@@ -20002,8 +19981,6 @@ var require_util8 = __commonJS({
isValidHeaderValue,
isTokenCharCode,
parseRangeHeader,
- normalizedMethodRecordsBase,
- normalizedMethodRecords,
isValidPort,
isHttpOrHttpsPrefixed,
nodeMajor,
@@ -20219,8 +20196,7 @@ var require_request3 = __commonJS({
isBlobLike,
buildURL,
validateHandler,
- getServerName,
- normalizedMethodRecords
+ getServerName
} = require_util8();
var { channels } = require_diagnostics();
var { headerNameLowerCasedRecord } = require_constants6();
@@ -20247,12 +20223,12 @@ var require_request3 = __commonJS({
throw new InvalidArgumentError("path must be a string");
} else if (path[0] !== "/" && !(path.startsWith("http://") || path.startsWith("https://")) && method !== "CONNECT") {
throw new InvalidArgumentError("path must be an absolute URL or start with a slash");
- } else if (invalidPathRegex.test(path)) {
+ } else if (invalidPathRegex.exec(path) !== null) {
throw new InvalidArgumentError("invalid request path");
}
if (typeof method !== "string") {
throw new InvalidArgumentError("method must be a string");
- } else if (normalizedMethodRecords[method] === void 0 && !isValidHTTPToken(method)) {
+ } else if (!isValidHTTPToken(method)) {
throw new InvalidArgumentError("invalid request method");
}
if (upgrade && typeof upgrade !== "string") {
@@ -20797,7 +20773,7 @@ var require_connect2 = __commonJS({
}
};
}
- function buildConnector({ allowH2, maxCachedSessions, socketPath, timeout, session: customSession, ...opts }) {
+ function buildConnector({ allowH2, maxCachedSessions, socketPath, timeout, ...opts }) {
if (maxCachedSessions != null && (!Number.isInteger(maxCachedSessions) || maxCachedSessions < 0)) {
throw new InvalidArgumentError("maxCachedSessions must be a positive integer or zero");
}
@@ -20813,7 +20789,7 @@ var require_connect2 = __commonJS({
}
servername = servername || options.servername || util.getServerName(host) || null;
const sessionKey = servername || hostname;
- const session = customSession || sessionCache.get(sessionKey) || null;
+ const session = sessionCache.get(sessionKey) || null;
assert(sessionKey);
socket = tls.connect({
highWaterMark: 16384,
@@ -22331,7 +22307,7 @@ var require_util9 = __commonJS({
var { getGlobalOrigin } = require_global3();
var { collectASequenceOfCodePoints, collectAnHTTPQuotedString, removeChars, parseMIMEType } = require_data_url();
var { performance: performance2 } = require("node:perf_hooks");
- var { isBlobLike, ReadableStreamFrom, isValidHTTPToken, normalizedMethodRecordsBase } = require_util8();
+ var { isBlobLike, ReadableStreamFrom, isValidHTTPToken } = require_util8();
var assert = require("node:assert");
var { isUint8Array } = require("node:util/types");
var { webidl } = require_webidl2();
@@ -22438,7 +22414,7 @@ var require_util9 = __commonJS({
}
function appendRequestOriginHeader(request2) {
let serializedOrigin = request2.origin;
- if (serializedOrigin === "client" || serializedOrigin === void 0) {
+ if (serializedOrigin === "client") {
return;
}
if (request2.responseTainting === "cors" || request2.mode === "websocket") {
@@ -22719,8 +22695,29 @@ var require_util9 = __commonJS({
function isCancelled(fetchParams) {
return fetchParams.controller.state === "aborted" || fetchParams.controller.state === "terminated";
}
+ var normalizeMethodRecordBase = {
+ delete: "DELETE",
+ DELETE: "DELETE",
+ get: "GET",
+ GET: "GET",
+ head: "HEAD",
+ HEAD: "HEAD",
+ options: "OPTIONS",
+ OPTIONS: "OPTIONS",
+ post: "POST",
+ POST: "POST",
+ put: "PUT",
+ PUT: "PUT"
+ };
+ var normalizeMethodRecord = {
+ ...normalizeMethodRecordBase,
+ patch: "patch",
+ PATCH: "PATCH"
+ };
+ Object.setPrototypeOf(normalizeMethodRecordBase, null);
+ Object.setPrototypeOf(normalizeMethodRecord, null);
function normalizeMethod(method) {
- return normalizedMethodRecordsBase[method.toLowerCase()] ?? method;
+ return normalizeMethodRecordBase[method.toLowerCase()] ?? method;
}
function serializeJavascriptValueToJSONString(value) {
const result = JSON.stringify(value);
@@ -22857,7 +22854,7 @@ var require_util9 = __commonJS({
}
});
}
- async function fullyReadBody(body, processBody, processBodyError) {
+ async function fullyReadBody(body, processBody, processBodyError, shouldClone) {
const successSteps = processBody;
const errorSteps = processBodyError;
let reader;
@@ -22868,7 +22865,7 @@ var require_util9 = __commonJS({
return;
}
try {
- successSteps(await readAllBytes(reader));
+ successSteps(await readAllBytes(reader, shouldClone));
} catch (e) {
errorSteps(e);
}
@@ -22891,12 +22888,19 @@ var require_util9 = __commonJS({
assert(!invalidIsomorphicEncodeValueRegex.test(input));
return input;
}
- async function readAllBytes(reader) {
+ async function readAllBytes(reader, shouldClone) {
const bytes = [];
let byteLength = 0;
while (true) {
const { done, value: chunk } = await reader.read();
if (done) {
+ if (bytes.length === 1) {
+ const { buffer, byteOffset, byteLength: byteLength2 } = bytes[0];
+ if (shouldClone === false) {
+ return Buffer.from(buffer, byteOffset, byteLength2);
+ }
+ return Buffer.from(buffer.slice(byteOffset, byteOffset + byteLength2), 0, byteLength2);
+ }
return Buffer.concat(bytes, byteLength);
}
if (!isUint8Array(chunk)) {
@@ -23159,6 +23163,7 @@ var require_util9 = __commonJS({
urlHasHttpsScheme,
urlIsHttpHttpsScheme,
readAllBytes,
+ normalizeMethodRecord,
simpleRangeHeaderValue,
buildContentRange,
parseMetadata,
@@ -23830,18 +23835,18 @@ Content-Type: ${value.type || "application/octet-stream"}\r
mimeType = serializeAMimeType(mimeType);
}
return new Blob2([bytes], { type: mimeType });
- }, instance);
+ }, instance, false);
},
arrayBuffer() {
return consumeBody(this, (bytes) => {
- return new Uint8Array(bytes).buffer;
- }, instance);
+ return bytes.buffer;
+ }, instance, true);
},
text() {
- return consumeBody(this, utf8DecodeBytes, instance);
+ return consumeBody(this, utf8DecodeBytes, instance, false);
},
json() {
- return consumeBody(this, parseJSONFromBytes, instance);
+ return consumeBody(this, parseJSONFromBytes, instance, false);
},
formData() {
return consumeBody(this, (value) => {
@@ -23870,12 +23875,12 @@ Content-Type: ${value.type || "application/octet-stream"}\r
throw new TypeError(
'Content-Type was not one of "multipart/form-data" or "application/x-www-form-urlencoded".'
);
- }, instance);
+ }, instance, false);
},
bytes() {
return consumeBody(this, (bytes) => {
- return new Uint8Array(bytes);
- }, instance);
+ return new Uint8Array(bytes.buffer, 0, bytes.byteLength);
+ }, instance, true);
}
};
return methods;
@@ -23883,7 +23888,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r
function mixinBody(prototype) {
Object.assign(prototype.prototype, bodyMixinMethods(prototype));
}
- async function consumeBody(object, convertBytesToJSValue, instance) {
+ async function consumeBody(object, convertBytesToJSValue, instance, shouldClone) {
webidl.brandCheck(object, instance);
if (bodyUnusable(object[kState].body)) {
throw new TypeError("Body is unusable: Body has already been read");
@@ -23902,7 +23907,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r
successSteps(Buffer.allocUnsafe(0));
return promise.promise;
}
- await fullyReadBody(object[kState].body, successSteps, errorSteps);
+ await fullyReadBody(object[kState].body, successSteps, errorSteps, shouldClone);
return promise.promise;
}
function bodyUnusable(body) {
@@ -24655,25 +24660,25 @@ upgrade: ${upgrade}\r
channels.sendHeaders.publish({ request: request2, headers: header, socket });
}
if (!body || bodyLength === 0) {
- writeBuffer(abort, null, client, request2, socket, contentLength, header, expectsPayload);
+ writeBuffer({ abort, body: null, client, request: request2, socket, contentLength, header, expectsPayload });
} else if (util.isBuffer(body)) {
- writeBuffer(abort, body, client, request2, socket, contentLength, header, expectsPayload);
+ writeBuffer({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload });
} else if (util.isBlobLike(body)) {
if (typeof body.stream === "function") {
- writeIterable(abort, body.stream(), client, request2, socket, contentLength, header, expectsPayload);
+ writeIterable({ abort, body: body.stream(), client, request: request2, socket, contentLength, header, expectsPayload });
} else {
- writeBlob(abort, body, client, request2, socket, contentLength, header, expectsPayload);
+ writeBlob({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload });
}
} else if (util.isStream(body)) {
- writeStream(abort, body, client, request2, socket, contentLength, header, expectsPayload);
+ writeStream({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload });
} else if (util.isIterable(body)) {
- writeIterable(abort, body, client, request2, socket, contentLength, header, expectsPayload);
+ writeIterable({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload });
} else {
assert(false);
}
return true;
}
- function writeStream(abort, body, client, request2, socket, contentLength, header, expectsPayload) {
+ function writeStream({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload }) {
assert(contentLength !== 0 || client[kRunning] === 0, "stream body cannot be pipelined");
let finished = false;
const writer = new AsyncWriter({ abort, socket, request: request2, contentLength, client, expectsPayload, header });
@@ -24742,7 +24747,7 @@ upgrade: ${upgrade}\r
setImmediate(onClose);
}
}
- function writeBuffer(abort, body, client, request2, socket, contentLength, header, expectsPayload) {
+ function writeBuffer({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload }) {
try {
if (!body) {
if (contentLength === 0) {
@@ -24773,7 +24778,7 @@ upgrade: ${upgrade}\r
abort(err);
}
}
- async function writeBlob(abort, body, client, request2, socket, contentLength, header, expectsPayload) {
+ async function writeBlob({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload }) {
assert(contentLength === body.size, "blob body must have content length");
try {
if (contentLength != null && contentLength !== body.size) {
@@ -24796,7 +24801,7 @@ upgrade: ${upgrade}\r
abort(err);
}
}
- async function writeIterable(abort, body, client, request2, socket, contentLength, header, expectsPayload) {
+ async function writeIterable({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload }) {
assert(contentLength !== 0 || client[kRunning] === 0, "iterator body cannot be pipelined");
let callback = null;
function onDrain() {
@@ -25259,79 +25264,81 @@ var require_client_h2 = __commonJS({
return true;
function writeBodyH2() {
if (!body || contentLength === 0) {
- writeBuffer(
+ writeBuffer({
abort,
- stream,
- null,
client,
- request2,
- client[kSocket],
+ request: request2,
contentLength,
- expectsPayload
- );
+ expectsPayload,
+ h2stream: stream,
+ body: null,
+ socket: client[kSocket]
+ });
} else if (util.isBuffer(body)) {
- writeBuffer(
+ writeBuffer({
abort,
- stream,
- body,
client,
- request2,
- client[kSocket],
+ request: request2,
contentLength,
- expectsPayload
- );
+ body,
+ expectsPayload,
+ h2stream: stream,
+ socket: client[kSocket]
+ });
} else if (util.isBlobLike(body)) {
if (typeof body.stream === "function") {
- writeIterable(
+ writeIterable({
abort,
- stream,
- body.stream(),
client,
- request2,
- client[kSocket],
+ request: request2,
contentLength,
- expectsPayload
- );
+ expectsPayload,
+ h2stream: stream,
+ body: body.stream(),
+ socket: client[kSocket]
+ });
} else {
- writeBlob(
+ writeBlob({
abort,
- stream,
body,
client,
- request2,
- client[kSocket],
+ request: request2,
contentLength,
- expectsPayload
- );
+ expectsPayload,
+ h2stream: stream,
+ socket: client[kSocket]
+ });
}
} else if (util.isStream(body)) {
- writeStream(
+ writeStream({
abort,
- client[kSocket],
- expectsPayload,
- stream,
body,
client,
- request2,
- contentLength
- );
+ request: request2,
+ contentLength,
+ expectsPayload,
+ socket: client[kSocket],
+ h2stream: stream,
+ header: ""
+ });
} else if (util.isIterable(body)) {
- writeIterable(
+ writeIterable({
abort,
- stream,
body,
client,
- request2,
- client[kSocket],
+ request: request2,
contentLength,
- expectsPayload
- );
+ expectsPayload,
+ header: "",
+ h2stream: stream,
+ socket: client[kSocket]
+ });
} else {
assert(false);
}
}
}
- function writeBuffer(abort, h2stream, body, client, request2, socket, contentLength, expectsPayload) {
+ function writeBuffer({ abort, h2stream, body, client, request: request2, socket, contentLength, expectsPayload }) {
try {
if (body != null && util.isBuffer(body)) {
assert(contentLength === body.byteLength, "buffer body must have content length");
@@ -25350,7 +25357,7 @@ var require_client_h2 = __commonJS({
abort(error);
}
}
- function writeStream(abort, socket, expectsPayload, h2stream, body, client, request2, contentLength) {
+ function writeStream({ abort, socket, expectsPayload, h2stream, body, client, request: request2, contentLength }) {
assert(contentLength !== 0 || client[kRunning] === 0, "stream body cannot be pipelined");
const pipe = pipeline(
body,
@@ -25374,7 +25381,7 @@ var require_client_h2 = __commonJS({
request2.onBodySent(chunk);
}
}
- async function writeBlob(abort, h2stream, body, client, request2, socket, contentLength, expectsPayload) {
+ async function writeBlob({ abort, h2stream, body, client, request: request2, socket, contentLength, expectsPayload }) {
assert(contentLength === body.size, "blob body must have content length");
try {
if (contentLength != null && contentLength !== body.size) {
@@ -25395,7 +25402,7 @@ var require_client_h2 = __commonJS({
abort(err);
}
}
- async function writeIterable(abort, h2stream, body, client, request2, socket, contentLength, expectsPayload) {
+ async function writeIterable({ abort, h2stream, body, client, request: request2, socket, contentLength, expectsPayload }) {
assert(contentLength !== 0 || client[kRunning] === 0, "iterator body cannot be pipelined");
let callback = null;
function onDrain() {
@@ -27140,7 +27147,7 @@ var require_retry_handler = __commonJS({
this.abort(
new RequestRetryError("Content-Range mismatch", statusCode, {
headers,
- data: { count: this.retryCount }
+ count: this.retryCount
})
);
return false;
@@ -27149,7 +27156,7 @@ var require_retry_handler = __commonJS({
this.abort(
new RequestRetryError("ETag mismatch", statusCode, {
headers,
- data: { count: this.retryCount }
+ count: this.retryCount
})
);
return false;
@@ -30373,7 +30380,9 @@ var require_request4 = __commonJS({
var {
isValidHTTPToken,
sameOrigin,
- environmentSettingsObject
+ normalizeMethod,
+ environmentSettingsObject,
+ normalizeMethodRecord
} = require_util9();
var {
forbiddenMethodsSet,
@@ -30385,7 +30394,7 @@ var require_request4 = __commonJS({
requestCache,
requestDuplex
} = require_constants8();
- var { kEnumerableProperty, normalizedMethodRecordsBase, normalizedMethodRecords } = util;
+ var { kEnumerableProperty } = util;
var { kHeaders, kSignal, kState, kDispatcher } = require_symbols7();
var { webidl } = require_webidl2();
var { URLSerializer } = require_data_url();
@@ -30582,18 +30591,17 @@ var require_request4 = __commonJS({
}
if (init.method !== void 0) {
let method = init.method;
- const mayBeNormalized = normalizedMethodRecords[method];
+ const mayBeNormalized = normalizeMethodRecord[method];
if (mayBeNormalized !== void 0) {
request2.method = mayBeNormalized;
} else {
if (!isValidHTTPToken(method)) {
throw new TypeError(`'${method}' is not a valid HTTP method.`);
}
- const upperCase = method.toUpperCase();
- if (forbiddenMethodsSet.has(upperCase)) {
+ if (forbiddenMethodsSet.has(method.toUpperCase())) {
throw new TypeError(`'${method}' HTTP method is unsupported.`);
}
- method = normalizedMethodRecordsBase[upperCase] ?? method;
+ method = normalizeMethod(method);
request2.method = method;
}
if (!patchMethodWarning && request2.method === "patch") {
@@ -35309,6 +35317,7 @@ var require_websocket2 = __commonJS({
var { types } = require("node:util");
var { ErrorEvent, CloseEvent } = require_events2();
var { SendQueue } = require_sender();
+ var experimentalWarned = false;
var WebSocket = class _WebSocket extends EventTarget {
#events = {
open: null,
@@ -35329,6 +35338,12 @@ var require_websocket2 = __commonJS({
super();
const prefix = "WebSocket constructor";
webidl.argumentLengthCheck(arguments, 1, prefix);
+ if (!experimentalWarned) {
+ experimentalWarned = true;
+ process.emitWarning("WebSockets are experimental, expect them to change at any time.", {
+ code: "UNDICI-WS"
+ });
+ }
const options = webidl.converters["DOMString or sequence or WebSocketInit"](protocols, prefix, "options");
url = webidl.converters.USVString(url, prefix, "url");
protocols = options.protocols;
diff --git a/package-lock.json b/package-lock.json
index 73d78ce..a318de7 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -18,12 +18,12 @@
"devDependencies": {
"@sinonjs/fake-timers": "^11.2.2",
"ava": "^6.1.3",
- "c8": "^9.1.0",
+ "c8": "^10.1.2",
"dotenv": "^16.4.5",
- "esbuild": "^0.21.4",
- "execa": "^9.1.0",
+ "esbuild": "^0.22.0",
+ "execa": "^9.3.0",
"open-cli": "^8.0.0",
- "yaml": "^2.4.2"
+ "yaml": "^2.4.5"
}
},
"node_modules/@actions/core": {
@@ -62,9 +62,9 @@
"dev": true
},
"node_modules/@esbuild/aix-ppc64": {
- "version": "0.21.4",
- "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.4.tgz",
- "integrity": "sha512-Zrm+B33R4LWPLjDEVnEqt2+SLTATlru1q/xYKVn8oVTbiRBGmK2VIMoIYGJDGyftnGaC788IuzGFAlb7IQ0Y8A==",
+ "version": "0.22.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.22.0.tgz",
+ "integrity": "sha512-uvQR2crZ/zgzSHDvdygHyNI+ze9zwS8mqz0YtGXotSqvEE0UkYE9s+FZKQNTt1VtT719mfP3vHrUdCpxBNQZhQ==",
"cpu": [
"ppc64"
],
@@ -74,13 +74,13 @@
"aix"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/android-arm": {
- "version": "0.21.4",
- "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.4.tgz",
- "integrity": "sha512-E7H/yTd8kGQfY4z9t3nRPk/hrhaCajfA3YSQSBrst8B+3uTcgsi8N+ZWYCaeIDsiVs6m65JPCaQN/DxBRclF3A==",
+ "version": "0.22.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.22.0.tgz",
+ "integrity": "sha512-PBnyP+r8vJE4ifxsWys9l+Mc2UY/yYZOpX82eoyGISXXb3dRr0M21v+s4fgRKWMFPMSf/iyowqPW/u7ScSUkjQ==",
"cpu": [
"arm"
],
@@ -90,13 +90,13 @@
"android"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/android-arm64": {
- "version": "0.21.4",
- "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.4.tgz",
- "integrity": "sha512-fYFnz+ObClJ3dNiITySBUx+oNalYUT18/AryMxfovLkYWbutXsct3Wz2ZWAcGGppp+RVVX5FiXeLYGi97umisA==",
+ "version": "0.22.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.22.0.tgz",
+ "integrity": "sha512-UKhPb3o2gAB/bfXcl58ZXTn1q2oVu1rEu/bKrCtmm+Nj5MKUbrOwR5WAixE2v+lk0amWuwPvhnPpBRLIGiq7ig==",
"cpu": [
"arm64"
],
@@ -106,13 +106,13 @@
"android"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/android-x64": {
- "version": "0.21.4",
- "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.4.tgz",
- "integrity": "sha512-mDqmlge3hFbEPbCWxp4fM6hqq7aZfLEHZAKGP9viq9wMUBVQx202aDIfc3l+d2cKhUJM741VrCXEzRFhPDKH3Q==",
+ "version": "0.22.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.22.0.tgz",
+ "integrity": "sha512-IjTYtvIrjhR41Ijy2dDPgYjQHWG/x/A4KXYbs1fiU3efpRdoxMChK3oEZV6GPzVEzJqxFgcuBaiX1kwEvWUxSw==",
"cpu": [
"x64"
],
@@ -122,13 +122,13 @@
"android"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/darwin-arm64": {
- "version": "0.21.4",
- "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.4.tgz",
- "integrity": "sha512-72eaIrDZDSiWqpmCzVaBD58c8ea8cw/U0fq/PPOTqE3c53D0xVMRt2ooIABZ6/wj99Y+h4ksT/+I+srCDLU9TA==",
+ "version": "0.22.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.22.0.tgz",
+ "integrity": "sha512-mqt+Go4y9wRvEz81bhKd9RpHsQR1LwU8Xm6jZRUV/xpM7cIQFbFH6wBCLPTNsdELBvfoHeumud7X78jQQJv2TA==",
"cpu": [
"arm64"
],
@@ -138,13 +138,13 @@
"darwin"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/darwin-x64": {
- "version": "0.21.4",
- "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.4.tgz",
- "integrity": "sha512-uBsuwRMehGmw1JC7Vecu/upOjTsMhgahmDkWhGLWxIgUn2x/Y4tIwUZngsmVb6XyPSTXJYS4YiASKPcm9Zitag==",
+ "version": "0.22.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.22.0.tgz",
+ "integrity": "sha512-vTaTQ9OgYc3VTaWtOE5pSuDT6H3d/qSRFRfSBbnxFfzAvYoB3pqKXA0LEbi/oT8GUOEAutspfRMqPj2ezdFaMw==",
"cpu": [
"x64"
],
@@ -154,13 +154,13 @@
"darwin"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/freebsd-arm64": {
- "version": "0.21.4",
- "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.4.tgz",
- "integrity": "sha512-8JfuSC6YMSAEIZIWNL3GtdUT5NhUA/CMUCpZdDRolUXNAXEE/Vbpe6qlGLpfThtY5NwXq8Hi4nJy4YfPh+TwAg==",
+ "version": "0.22.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.22.0.tgz",
+ "integrity": "sha512-0e1ZgoobJzaGnR4reD7I9rYZ7ttqdh1KPvJWnquUoDJhL0rYwdneeLailBzd2/4g/U5p4e5TIHEWa68NF2hFpQ==",
"cpu": [
"arm64"
],
@@ -170,13 +170,13 @@
"freebsd"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/freebsd-x64": {
- "version": "0.21.4",
- "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.4.tgz",
- "integrity": "sha512-8d9y9eQhxv4ef7JmXny7591P/PYsDFc4+STaxC1GBv0tMyCdyWfXu2jBuqRsyhY8uL2HU8uPyscgE2KxCY9imQ==",
+ "version": "0.22.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.22.0.tgz",
+ "integrity": "sha512-BFgyYwlCwRWyPQJtkzqq2p6pJbiiWgp0P9PNf7a5FQ1itKY4czPuOMAlFVItirSmEpRPCeImuwePNScZS0pL5Q==",
"cpu": [
"x64"
],
@@ -186,13 +186,13 @@
"freebsd"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-arm": {
- "version": "0.21.4",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.4.tgz",
- "integrity": "sha512-2rqFFefpYmpMs+FWjkzSgXg5vViocqpq5a1PSRgT0AvSgxoXmGF17qfGAzKedg6wAwyM7UltrKVo9kxaJLMF/g==",
+ "version": "0.22.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.22.0.tgz",
+ "integrity": "sha512-KEMWiA9aGuPUD4BH5yjlhElLgaRXe+Eri6gKBoDazoPBTo1BXc/e6IW5FcJO9DoL19FBeCxgONyh95hLDNepIg==",
"cpu": [
"arm"
],
@@ -202,13 +202,13 @@
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-arm64": {
- "version": "0.21.4",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.4.tgz",
- "integrity": "sha512-/GLD2orjNU50v9PcxNpYZi+y8dJ7e7/LhQukN3S4jNDXCKkyyiyAz9zDw3siZ7Eh1tRcnCHAo/WcqKMzmi4eMQ==",
+ "version": "0.22.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.22.0.tgz",
+ "integrity": "sha512-V/K2rctCUgC0PCXpN7AqT4hoazXKgIYugFGu/myk2+pfe6jTW2guz/TBwq4cZ7ESqusR/IzkcQaBkcjquuBWsw==",
"cpu": [
"arm64"
],
@@ -218,13 +218,13 @@
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-ia32": {
- "version": "0.21.4",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.4.tgz",
- "integrity": "sha512-pNftBl7m/tFG3t2m/tSjuYeWIffzwAZT9m08+9DPLizxVOsUl8DdFzn9HvJrTQwe3wvJnwTdl92AonY36w/25g==",
+ "version": "0.22.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.22.0.tgz",
+ "integrity": "sha512-r2ZZqkOMOrpUhzNwxI7uLAHIDwkfeqmTnrv1cjpL/rjllPWszgqmprd/om9oviKXUBpMqHbXmppvjAYgISb26Q==",
"cpu": [
"ia32"
],
@@ -234,13 +234,13 @@
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-loong64": {
- "version": "0.21.4",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.4.tgz",
- "integrity": "sha512-cSD2gzCK5LuVX+hszzXQzlWya6c7hilO71L9h4KHwqI4qeqZ57bAtkgcC2YioXjsbfAv4lPn3qe3b00Zt+jIfQ==",
+ "version": "0.22.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.22.0.tgz",
+ "integrity": "sha512-qaowLrV/YOMAL2RfKQ4C/VaDzAuLDuylM2sd/LH+4OFirMl6CuDpRlCq4u49ZBaVV8pkI/Y+hTdiibvQRhojCA==",
"cpu": [
"loong64"
],
@@ -250,13 +250,13 @@
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-mips64el": {
- "version": "0.21.4",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.4.tgz",
- "integrity": "sha512-qtzAd3BJh7UdbiXCrg6npWLYU0YpufsV9XlufKhMhYMJGJCdfX/G6+PNd0+v877X1JG5VmjBLUiFB0o8EUSicA==",
+ "version": "0.22.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.22.0.tgz",
+ "integrity": "sha512-hgrezzjQTRxjkQ5k08J6rtZN5PNnkWx/Rz6Kmj9gnsdCAX1I4Dn4ZPqvFRkXo55Q3pnVQJBwbdtrTO7tMGtyVA==",
"cpu": [
"mips64el"
],
@@ -266,13 +266,13 @@
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-ppc64": {
- "version": "0.21.4",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.4.tgz",
- "integrity": "sha512-yB8AYzOTaL0D5+2a4xEy7OVvbcypvDR05MsB/VVPVA7nL4hc5w5Dyd/ddnayStDgJE59fAgNEOdLhBxjfx5+dg==",
+ "version": "0.22.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.22.0.tgz",
+ "integrity": "sha512-ewxg6FLLUio883XgSjfULEmDl3VPv/TYNnRprVAS3QeGFLdCYdx1tIudBcd7n9jIdk82v1Ajov4jx87qW7h9+g==",
"cpu": [
"ppc64"
],
@@ -282,13 +282,13 @@
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-riscv64": {
- "version": "0.21.4",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.4.tgz",
- "integrity": "sha512-Y5AgOuVzPjQdgU59ramLoqSSiXddu7F3F+LI5hYy/d1UHN7K5oLzYBDZe23QmQJ9PIVUXwOdKJ/jZahPdxzm9w==",
+ "version": "0.22.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.22.0.tgz",
+ "integrity": "sha512-Az5XbgSJC2lE8XK8pdcutsf9RgdafWdTpUK/+6uaDdfkviw/B4JCwAfh1qVeRWwOohwdsl4ywZrWBNWxwrPLFg==",
"cpu": [
"riscv64"
],
@@ -298,13 +298,13 @@
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-s390x": {
- "version": "0.21.4",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.4.tgz",
- "integrity": "sha512-Iqc/l/FFwtt8FoTK9riYv9zQNms7B8u+vAI/rxKuN10HgQIXaPzKZc479lZ0x6+vKVQbu55GdpYpeNWzjOhgbA==",
+ "version": "0.22.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.22.0.tgz",
+ "integrity": "sha512-8j4a2ChT9+V34NNNY9c/gMldutaJFmfMacTPq4KfNKwv2fitBCLYjee7c+Vxaha2nUhPK7cXcZpJtJ3+Y7ZdVQ==",
"cpu": [
"s390x"
],
@@ -314,13 +314,13 @@
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/linux-x64": {
- "version": "0.21.4",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.4.tgz",
- "integrity": "sha512-Td9jv782UMAFsuLZINfUpoF5mZIbAj+jv1YVtE58rFtfvoKRiKSkRGQfHTgKamLVT/fO7203bHa3wU122V/Bdg==",
+ "version": "0.22.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.22.0.tgz",
+ "integrity": "sha512-JUQyOnpbAkkRFOk/AhsEemz5TfWN4FJZxVObUlnlNCbe7QBl61ZNfM4cwBXayQA6laMJMUcqLHaYQHAB6YQ95Q==",
"cpu": [
"x64"
],
@@ -330,13 +330,13 @@
"linux"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/netbsd-x64": {
- "version": "0.21.4",
- "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.4.tgz",
- "integrity": "sha512-Awn38oSXxsPMQxaV0Ipb7W/gxZtk5Tx3+W+rAPdZkyEhQ6968r9NvtkjhnhbEgWXYbgV+JEONJ6PcdBS+nlcpA==",
+ "version": "0.22.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.22.0.tgz",
+ "integrity": "sha512-11PoCoHXo4HFNbLsXuMB6bpMPWGDiw7xETji6COdJss4SQZLvcgNoeSqWtATRm10Jj1uEHiaIk4N0PiN6x4Fcg==",
"cpu": [
"x64"
],
@@ -346,13 +346,29 @@
"netbsd"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/openbsd-arm64": {
+ "version": "0.22.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.22.0.tgz",
+ "integrity": "sha512-Ezlhu/YyITmXwKSB+Zu/QqD7cxrjrpiw85cc0Rbd3AWr2wsgp+dWbWOE8MqHaLW9NKMZvuL0DhbJbvzR7F6Zvg==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "openbsd"
+ ],
+ "engines": {
+ "node": ">=18"
}
},
"node_modules/@esbuild/openbsd-x64": {
- "version": "0.21.4",
- "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.4.tgz",
- "integrity": "sha512-IsUmQeCY0aU374R82fxIPu6vkOybWIMc3hVGZ3ChRwL9hA1TwY+tS0lgFWV5+F1+1ssuvvXt3HFqe8roCip8Hg==",
+ "version": "0.22.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.22.0.tgz",
+ "integrity": "sha512-ufjdW5tFJGUjlH9j/5cCE9lrwRffyZh+T4vYvoDKoYsC6IXbwaFeV/ENxeNXcxotF0P8CDzoICXVSbJaGBhkrw==",
"cpu": [
"x64"
],
@@ -362,13 +378,13 @@
"openbsd"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/sunos-x64": {
- "version": "0.21.4",
- "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.4.tgz",
- "integrity": "sha512-hsKhgZ4teLUaDA6FG/QIu2q0rI6I36tZVfM4DBZv3BG0mkMIdEnMbhc4xwLvLJSS22uWmaVkFkqWgIS0gPIm+A==",
+ "version": "0.22.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.22.0.tgz",
+ "integrity": "sha512-zY6ly/AoSmKnmNTowDJsK5ehra153/5ZhqxNLfq9NRsTTltetr+yHHcQ4RW7QDqw4JC8A1uC1YmeSfK9NRcK1w==",
"cpu": [
"x64"
],
@@ -378,13 +394,13 @@
"sunos"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/win32-arm64": {
- "version": "0.21.4",
- "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.4.tgz",
- "integrity": "sha512-UUfMgMoXPoA/bvGUNfUBFLCh0gt9dxZYIx9W4rfJr7+hKe5jxxHmfOK8YSH4qsHLLN4Ck8JZ+v7Q5fIm1huErg==",
+ "version": "0.22.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.22.0.tgz",
+ "integrity": "sha512-Kml5F7tv/1Maam0pbbCrvkk9vj046dPej30kFzlhXnhuCtYYBP6FGy/cLbc5yUT1lkZznGLf2OvuvmLjscO5rw==",
"cpu": [
"arm64"
],
@@ -394,13 +410,13 @@
"win32"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/win32-ia32": {
- "version": "0.21.4",
- "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.4.tgz",
- "integrity": "sha512-yIxbspZb5kGCAHWm8dexALQ9en1IYDfErzjSEq1KzXFniHv019VT3mNtTK7t8qdy4TwT6QYHI9sEZabONHg+aw==",
+ "version": "0.22.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.22.0.tgz",
+ "integrity": "sha512-IOgwn+mYTM3RrcydP4Og5IpXh+ftN8oF+HELTXSmbWBlujuci4Qa3DTeO+LEErceisI7KUSfEIiX+WOUlpELkw==",
"cpu": [
"ia32"
],
@@ -410,13 +426,13 @@
"win32"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@esbuild/win32-x64": {
- "version": "0.21.4",
- "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.4.tgz",
- "integrity": "sha512-sywLRD3UK/qRJt0oBwdpYLBibk7KiRfbswmWRDabuncQYSlf8aLEEUor/oP6KRz8KEG+HoiVLBhPRD5JWjS8Sg==",
+ "version": "0.22.0",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.22.0.tgz",
+ "integrity": "sha512-4bDHJrk2WHBXJPhy1y80X7/5b5iZTZP3LGcKIlAP1J+KqZ4zQAPMLEzftGyjjfcKbA4JDlPt/+2R/F1ZTeRgrw==",
"cpu": [
"x64"
],
@@ -426,7 +442,7 @@
"win32"
],
"engines": {
- "node": ">=12"
+ "node": ">=18"
}
},
"node_modules/@fastify/busboy": {
@@ -437,6 +453,63 @@
"node": ">=14"
}
},
+ "node_modules/@isaacs/cliui": {
+ "version": "8.0.2",
+ "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz",
+ "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==",
+ "dev": true,
+ "dependencies": {
+ "string-width": "^5.1.2",
+ "string-width-cjs": "npm:string-width@^4.2.0",
+ "strip-ansi": "^7.0.1",
+ "strip-ansi-cjs": "npm:strip-ansi@^6.0.1",
+ "wrap-ansi": "^8.1.0",
+ "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@isaacs/cliui/node_modules/emoji-regex": {
+ "version": "9.2.2",
+ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz",
+ "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==",
+ "dev": true
+ },
+ "node_modules/@isaacs/cliui/node_modules/string-width": {
+ "version": "5.1.2",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz",
+ "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==",
+ "dev": true,
+ "dependencies": {
+ "eastasianwidth": "^0.2.0",
+ "emoji-regex": "^9.2.2",
+ "strip-ansi": "^7.0.1"
+ },
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/@isaacs/cliui/node_modules/wrap-ansi": {
+ "version": "8.1.0",
+ "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz",
+ "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==",
+ "dev": true,
+ "dependencies": {
+ "ansi-styles": "^6.1.0",
+ "string-width": "^5.0.1",
+ "strip-ansi": "^7.0.1"
+ },
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/wrap-ansi?sponsor=1"
+ }
+ },
"node_modules/@istanbuljs/schema": {
"version": "0.1.3",
"resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz",
@@ -660,6 +733,16 @@
"@octokit/openapi-types": "^22.2.0"
}
},
+ "node_modules/@pkgjs/parseargs": {
+ "version": "0.11.0",
+ "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz",
+ "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==",
+ "dev": true,
+ "optional": true,
+ "engines": {
+ "node": ">=14"
+ }
+ },
"node_modules/@rollup/pluginutils": {
"version": "4.2.1",
"resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-4.2.1.tgz",
@@ -1021,9 +1104,9 @@
}
},
"node_modules/c8": {
- "version": "9.1.0",
- "resolved": "https://registry.npmjs.org/c8/-/c8-9.1.0.tgz",
- "integrity": "sha512-mBWcT5iqNir1zIkzSPyI3NCR9EZCVI3WUD+AVO17MVWTSFNyUueXE82qTeampNtTr+ilN/5Ua3j24LgbCKjDVg==",
+ "version": "10.1.2",
+ "resolved": "https://registry.npmjs.org/c8/-/c8-10.1.2.tgz",
+ "integrity": "sha512-Qr6rj76eSshu5CgRYvktW0uM0CFY0yi4Fd5D0duDXO6sYinyopmftUiJVuzBQxQcwQLor7JWDVRP+dUfCmzgJw==",
"dev": true,
"dependencies": {
"@bcoe/v8-coverage": "^0.2.3",
@@ -1033,7 +1116,7 @@
"istanbul-lib-coverage": "^3.2.0",
"istanbul-lib-report": "^3.0.1",
"istanbul-reports": "^3.1.6",
- "test-exclude": "^6.0.0",
+ "test-exclude": "^7.0.1",
"v8-to-istanbul": "^9.0.0",
"yargs": "^17.7.2",
"yargs-parser": "^21.1.1"
@@ -1042,7 +1125,15 @@
"c8": "bin/c8.js"
},
"engines": {
- "node": ">=14.14.0"
+ "node": ">=18"
+ },
+ "peerDependencies": {
+ "monocart-coverage-reports": "^2"
+ },
+ "peerDependenciesMeta": {
+ "monocart-coverage-reports": {
+ "optional": true
+ }
}
},
"node_modules/callsites": {
@@ -1443,6 +1534,12 @@
"url": "https://dotenvx.com"
}
},
+ "node_modules/eastasianwidth": {
+ "version": "0.2.0",
+ "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz",
+ "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==",
+ "dev": true
+ },
"node_modules/emittery": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/emittery/-/emittery-1.0.3.tgz",
@@ -1462,41 +1559,42 @@
"dev": true
},
"node_modules/esbuild": {
- "version": "0.21.4",
- "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.4.tgz",
- "integrity": "sha512-sFMcNNrj+Q0ZDolrp5pDhH0nRPN9hLIM3fRPwgbLYJeSHHgnXSnbV3xYgSVuOeLWH9c73VwmEverVzupIv5xuA==",
+ "version": "0.22.0",
+ "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.22.0.tgz",
+ "integrity": "sha512-zNYA6bFZsVnsU481FnGAQjLDW0Pl/8BGG7EvAp15RzUvGC+ME7hf1q7LvIfStEQBz/iEHuBJCYcOwPmNCf1Tlw==",
"dev": true,
"hasInstallScript": true,
"bin": {
"esbuild": "bin/esbuild"
},
"engines": {
- "node": ">=12"
+ "node": ">=18"
},
"optionalDependencies": {
- "@esbuild/aix-ppc64": "0.21.4",
- "@esbuild/android-arm": "0.21.4",
- "@esbuild/android-arm64": "0.21.4",
- "@esbuild/android-x64": "0.21.4",
- "@esbuild/darwin-arm64": "0.21.4",
- "@esbuild/darwin-x64": "0.21.4",
- "@esbuild/freebsd-arm64": "0.21.4",
- "@esbuild/freebsd-x64": "0.21.4",
- "@esbuild/linux-arm": "0.21.4",
- "@esbuild/linux-arm64": "0.21.4",
- "@esbuild/linux-ia32": "0.21.4",
- "@esbuild/linux-loong64": "0.21.4",
- "@esbuild/linux-mips64el": "0.21.4",
- "@esbuild/linux-ppc64": "0.21.4",
- "@esbuild/linux-riscv64": "0.21.4",
- "@esbuild/linux-s390x": "0.21.4",
- "@esbuild/linux-x64": "0.21.4",
- "@esbuild/netbsd-x64": "0.21.4",
- "@esbuild/openbsd-x64": "0.21.4",
- "@esbuild/sunos-x64": "0.21.4",
- "@esbuild/win32-arm64": "0.21.4",
- "@esbuild/win32-ia32": "0.21.4",
- "@esbuild/win32-x64": "0.21.4"
+ "@esbuild/aix-ppc64": "0.22.0",
+ "@esbuild/android-arm": "0.22.0",
+ "@esbuild/android-arm64": "0.22.0",
+ "@esbuild/android-x64": "0.22.0",
+ "@esbuild/darwin-arm64": "0.22.0",
+ "@esbuild/darwin-x64": "0.22.0",
+ "@esbuild/freebsd-arm64": "0.22.0",
+ "@esbuild/freebsd-x64": "0.22.0",
+ "@esbuild/linux-arm": "0.22.0",
+ "@esbuild/linux-arm64": "0.22.0",
+ "@esbuild/linux-ia32": "0.22.0",
+ "@esbuild/linux-loong64": "0.22.0",
+ "@esbuild/linux-mips64el": "0.22.0",
+ "@esbuild/linux-ppc64": "0.22.0",
+ "@esbuild/linux-riscv64": "0.22.0",
+ "@esbuild/linux-s390x": "0.22.0",
+ "@esbuild/linux-x64": "0.22.0",
+ "@esbuild/netbsd-x64": "0.22.0",
+ "@esbuild/openbsd-arm64": "0.22.0",
+ "@esbuild/openbsd-x64": "0.22.0",
+ "@esbuild/sunos-x64": "0.22.0",
+ "@esbuild/win32-arm64": "0.22.0",
+ "@esbuild/win32-ia32": "0.22.0",
+ "@esbuild/win32-x64": "0.22.0"
}
},
"node_modules/escalade": {
@@ -1549,9 +1647,9 @@
}
},
"node_modules/execa": {
- "version": "9.1.0",
- "resolved": "https://registry.npmjs.org/execa/-/execa-9.1.0.tgz",
- "integrity": "sha512-lSgHc4Elo2m6bUDhc3Hl/VxvUDJdQWI40RZ4KMY9bKRc+hgMOT7II/JjbNDhI8VnMtrCb7U/fhpJIkLORZozWw==",
+ "version": "9.3.0",
+ "resolved": "https://registry.npmjs.org/execa/-/execa-9.3.0.tgz",
+ "integrity": "sha512-l6JFbqnHEadBoVAVpN5dl2yCyfX28WoBAGaoQcNmLLSedOxTxcn2Qa83s8I/PA5i56vWru2OHOtrwF7Om2vqlg==",
"dev": true,
"dependencies": {
"@sindresorhus/merge-streams": "^4.0.0",
@@ -1568,7 +1666,7 @@
"yoctocolors": "^2.0.0"
},
"engines": {
- "node": ">=18"
+ "node": "^18.19.0 || >=20.5.0"
},
"funding": {
"url": "https://github.com/sindresorhus/execa?sponsor=1"
@@ -2284,6 +2382,24 @@
"node": ">=8"
}
},
+ "node_modules/jackspeak": {
+ "version": "3.4.0",
+ "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.0.tgz",
+ "integrity": "sha512-JVYhQnN59LVPFCEcVa2C3CrEKYacvjRfqIQl+h8oi91aLYQVWRYbxjPcv1bUiUy/kLmQaANrYfNMCO3kuEDHfw==",
+ "dev": true,
+ "dependencies": {
+ "@isaacs/cliui": "^8.0.2"
+ },
+ "engines": {
+ "node": ">=14"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ },
+ "optionalDependencies": {
+ "@pkgjs/parseargs": "^0.11.0"
+ }
+ },
"node_modules/js-string-escape": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/js-string-escape/-/js-string-escape-1.0.1.tgz",
@@ -2761,6 +2877,12 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
+ "node_modules/package-json-from-dist": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.0.tgz",
+ "integrity": "sha512-dATvCeZN/8wQsGywez1mzHtTlP22H8OEfPrVMLNr4/eGa+ijtLn/6M5f0dY8UKNrC2O9UCU6SSoG3qRKnt7STw==",
+ "dev": true
+ },
"node_modules/parse-ms": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-4.0.0.tgz",
@@ -2800,6 +2922,22 @@
"node": ">=8"
}
},
+ "node_modules/path-scurry": {
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz",
+ "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==",
+ "dev": true,
+ "dependencies": {
+ "lru-cache": "^10.2.0",
+ "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0"
+ },
+ "engines": {
+ "node": ">=16 || 14 >=14.18"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
"node_modules/path-type": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/path-type/-/path-type-5.0.0.tgz",
@@ -3197,6 +3335,57 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
+ "node_modules/string-width-cjs": {
+ "name": "string-width",
+ "version": "4.2.3",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
+ "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
+ "dev": true,
+ "dependencies": {
+ "emoji-regex": "^8.0.0",
+ "is-fullwidth-code-point": "^3.0.0",
+ "strip-ansi": "^6.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/string-width-cjs/node_modules/ansi-regex": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
+ "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
+ "dev": true,
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/string-width-cjs/node_modules/emoji-regex": {
+ "version": "8.0.0",
+ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
+ "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
+ "dev": true
+ },
+ "node_modules/string-width-cjs/node_modules/is-fullwidth-code-point": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
+ "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
+ "dev": true,
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/string-width-cjs/node_modules/strip-ansi": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
+ "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+ "dev": true,
+ "dependencies": {
+ "ansi-regex": "^5.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
"node_modules/strip-ansi": {
"version": "7.1.0",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz",
@@ -3212,6 +3401,28 @@
"url": "https://github.com/chalk/strip-ansi?sponsor=1"
}
},
+ "node_modules/strip-ansi-cjs": {
+ "name": "strip-ansi",
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
+ "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+ "dev": true,
+ "dependencies": {
+ "ansi-regex": "^5.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/strip-ansi-cjs/node_modules/ansi-regex": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
+ "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
+ "dev": true,
+ "engines": {
+ "node": ">=8"
+ }
+ },
"node_modules/strip-final-newline": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-4.0.0.tgz",
@@ -3325,17 +3536,73 @@
}
},
"node_modules/test-exclude": {
- "version": "6.0.0",
- "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz",
- "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==",
+ "version": "7.0.1",
+ "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-7.0.1.tgz",
+ "integrity": "sha512-pFYqmTw68LXVjeWJMST4+borgQP2AyMNbg1BpZh9LbyhUeNkeaPF9gzfPGUAnSMV3qPYdWUwDIjjCLiSDOl7vg==",
"dev": true,
"dependencies": {
"@istanbuljs/schema": "^0.1.2",
- "glob": "^7.1.4",
- "minimatch": "^3.0.4"
+ "glob": "^10.4.1",
+ "minimatch": "^9.0.4"
},
"engines": {
- "node": ">=8"
+ "node": ">=18"
+ }
+ },
+ "node_modules/test-exclude/node_modules/brace-expansion": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
+ "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
+ "dev": true,
+ "dependencies": {
+ "balanced-match": "^1.0.0"
+ }
+ },
+ "node_modules/test-exclude/node_modules/glob": {
+ "version": "10.4.2",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.2.tgz",
+ "integrity": "sha512-GwMlUF6PkPo3Gk21UxkCohOv0PLcIXVtKyLlpEI28R/cO/4eNOdmLk3CMW1wROV/WR/EsZOWAfBbBOqYvs88/w==",
+ "dev": true,
+ "dependencies": {
+ "foreground-child": "^3.1.0",
+ "jackspeak": "^3.1.2",
+ "minimatch": "^9.0.4",
+ "minipass": "^7.1.2",
+ "package-json-from-dist": "^1.0.0",
+ "path-scurry": "^1.11.1"
+ },
+ "bin": {
+ "glob": "dist/esm/bin.mjs"
+ },
+ "engines": {
+ "node": ">=16 || 14 >=14.18"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/test-exclude/node_modules/minimatch": {
+ "version": "9.0.5",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
+ "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
+ "dev": true,
+ "dependencies": {
+ "brace-expansion": "^2.0.1"
+ },
+ "engines": {
+ "node": ">=16 || 14 >=14.17"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/test-exclude/node_modules/minipass": {
+ "version": "7.1.2",
+ "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz",
+ "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==",
+ "dev": true,
+ "engines": {
+ "node": ">=16 || 14 >=14.17"
}
},
"node_modules/time-zone": {
@@ -3600,6 +3867,89 @@
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
}
},
+ "node_modules/wrap-ansi-cjs": {
+ "name": "wrap-ansi",
+ "version": "7.0.0",
+ "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
+ "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
+ "dev": true,
+ "dependencies": {
+ "ansi-styles": "^4.0.0",
+ "string-width": "^4.1.0",
+ "strip-ansi": "^6.0.0"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/wrap-ansi?sponsor=1"
+ }
+ },
+ "node_modules/wrap-ansi-cjs/node_modules/ansi-regex": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
+ "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
+ "dev": true,
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+ "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
+ "dev": true,
+ "dependencies": {
+ "color-convert": "^2.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/ansi-styles?sponsor=1"
+ }
+ },
+ "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": {
+ "version": "8.0.0",
+ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
+ "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
+ "dev": true
+ },
+ "node_modules/wrap-ansi-cjs/node_modules/is-fullwidth-code-point": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
+ "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
+ "dev": true,
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/wrap-ansi-cjs/node_modules/string-width": {
+ "version": "4.2.3",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
+ "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
+ "dev": true,
+ "dependencies": {
+ "emoji-regex": "^8.0.0",
+ "is-fullwidth-code-point": "^3.0.0",
+ "strip-ansi": "^6.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/wrap-ansi-cjs/node_modules/strip-ansi": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
+ "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+ "dev": true,
+ "dependencies": {
+ "ansi-regex": "^5.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
"node_modules/wrap-ansi/node_modules/ansi-regex": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
@@ -3700,9 +4050,9 @@
"dev": true
},
"node_modules/yaml": {
- "version": "2.4.2",
- "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.4.2.tgz",
- "integrity": "sha512-B3VqDZ+JAg1nZpaEmWtTXUlBneoGx6CPM9b0TENK6aoSu5t73dItudwdgmi6tHlIZZId4dZ9skcAQ2UbcyAeVA==",
+ "version": "2.4.5",
+ "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.4.5.tgz",
+ "integrity": "sha512-aBx2bnqDzVOyNKfsysjA2ms5ZlnjSAW2eG3/L5G/CSujfjLJTJsEw1bGw8kCf04KodQWk1pxlGnZ56CRxiawmg==",
"dev": true,
"bin": {
"yaml": "bin.mjs"
diff --git a/package.json b/package.json
index 7839ff4..c9e1006 100644
--- a/package.json
+++ b/package.json
@@ -5,7 +5,7 @@
"version": "1.10.3",
"description": "GitHub Action for creating a GitHub App Installation Access Token",
"scripts": {
- "build": "esbuild main.js post.js --bundle --outdir=dist --out-extension:.js=.cjs --platform=node --target=node20.0.0",
+ "build": "esbuild main.js post.js --bundle --outdir=dist --out-extension:.js=.cjs --platform=node --target=node20.0.0 --packages=bundle",
"test": "c8 --100 ava tests/index.js",
"coverage": "c8 report --reporter html",
"postcoverage": "open-cli coverage/index.html"
@@ -21,12 +21,12 @@
"devDependencies": {
"@sinonjs/fake-timers": "^11.2.2",
"ava": "^6.1.3",
- "c8": "^9.1.0",
+ "c8": "^10.1.2",
"dotenv": "^16.4.5",
- "esbuild": "^0.21.4",
- "execa": "^9.1.0",
+ "esbuild": "^0.22.0",
+ "execa": "^9.3.0",
"open-cli": "^8.0.0",
- "yaml": "^2.4.2"
+ "yaml": "^2.4.5"
},
"release": {
"branches": [
From d0ac2addd11ed098fdfa86566abc8d715759aaa9 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=E6=B2=99=E6=BC=A0=E4=B9=8B=E5=AD=90?=
<7850715+maboloshi@users.noreply.github.com>
Date: Wed, 3 Jul 2024 11:09:29 +0800
Subject: [PATCH 08/10] docs(README): fix the `git committer string` and
`Configure git CLI` examples (#151)
1. Fix the `git committer string` and `Configure git CLI` examples.
2. Formatting examples
3. Modify the get `` comment and add a note that
[octokit/request-action](https://github.com/octokit/request-action) can
be used.
---------
Co-authored-by: Parker Brown <17183625+parkerbxyz@users.noreply.github.com>
---
README.md | 26 +++++++++++++++-----------
1 file changed, 15 insertions(+), 11 deletions(-)
diff --git a/README.md b/README.md
index 3d3060d..4e6e9ba 100644
--- a/README.md
+++ b/README.md
@@ -79,14 +79,14 @@ jobs:
# required
app-id: ${{ vars.APP_ID }}
private-key: ${{ secrets.PRIVATE_KEY }}
- - name: Retrieve GitHub App User ID
+ - name: Get GitHub App User ID
id: get-user-id
- run: echo "user-id=$(gh api "/users/${{ steps.generate-token.outputs.app-slug }}[bot]" --jq .id)" >> "$GITHUB_OUTPUT"
+ run: echo "user-id=$(gh api "/users/${{ steps.app-token.outputs.app-slug }}[bot]" --jq .id)" >> "$GITHUB_OUTPUT"
env:
GH_TOKEN: ${{ steps.app-token.outputs.token }}
- id: committer
- run: echo "string=${{steps.app-token.outputs.app-slug}}[bot] <${{steps.get-user-id.outputs.user-id}}+${{ steps.app-token.outputs.app-slug }}[bot]@users.noreply.github.com>" >> "$GITHUB_OUTPUT"
- - run: echo "committer string is ${{steps.committer.outputs.string}}"
+ run: echo "string=${{ steps.app-token.outputs.app-slug }}[bot] <${{ steps.get-user-id.outputs.user-id }}+${{ steps.app-token.outputs.app-slug }}[bot]@users.noreply.github.com>" >> "$GITHUB_OUTPUT"
+ - run: echo "committer string is ${ {steps.committer.outputs.string }}"
```
### Configure git CLI for an app's bot user
@@ -104,14 +104,14 @@ jobs:
# required
app-id: ${{ vars.APP_ID }}
private-key: ${{ secrets.PRIVATE_KEY }}
- - name: Retrieve GitHub App User ID
+ - name: Get GitHub App User ID
id: get-user-id
- run: echo "user-id=$(gh api "/users/${{ steps.generate-token.outputs.app-slug }}[bot]" --jq .id)" >> "$GITHUB_OUTPUT"
+ run: echo "user-id=$(gh api "/users/${{ steps.app-token.outputs.app-slug }}[bot]" --jq .id)" >> "$GITHUB_OUTPUT"
env:
GH_TOKEN: ${{ steps.app-token.outputs.token }}
- run: |
- git config --global user.name '${{steps.app-token.outputs.app-slug}}[bot]'
- git config --global user.email '${{steps.get-user-id.outputs.user-id}}+${{ steps.app-token.outputs.app-slug }}[bot]@users.noreply.github.com>'
+ git config --global user.name '${{ steps.app-token.outputs.app-slug }}[bot]'
+ git config --global user.email '${{ steps.get-user-id.outputs.user-id }}+${{ steps.app-token.outputs.app-slug }}[bot]@users.noreply.github.com>'
# git commands like commit work using the bot user
- run: |
git add .
@@ -119,8 +119,12 @@ jobs:
git push
```
-The `` is the numeric user ID of the app's bot user, which can be found under `https://api.github.com/users/%5Bbot%5D`.
-For example, we can check at `https://api.github.com/users/dependabot%5Bbot%5D` to see the user ID of dependabot is 49699333.
+> [!TIP]
+> The `` is the numeric user ID of the app's bot user, which can be found under `https://api.github.com/users/%5Bbot%5D`.
+>
+> For example, we can check at `https://api.github.com/users/dependabot[bot]` to see the user ID of Dependabot is 49699333.
+>
+> Alternatively, you can use the [octokit/request-action](https://github.com/octokit/request-action) to get the ID.
### Create a token for all repositories in the current owner's installation
@@ -203,7 +207,7 @@ jobs:
set-matrix:
runs-on: ubuntu-latest
outputs:
- matrix: ${{steps.set.outputs.matrix }}
+ matrix: ${{ steps.set.outputs.matrix }}
steps:
- id: set
run: echo 'matrix=[{"owner":"owner1"},{"owner":"owner2","repos":["repo1"]}]' >>"$GITHUB_OUTPUT"
From 000e2a0d29976e250bb49dade8bddb037c5187c5 Mon Sep 17 00:00:00 2001
From: vleon1a <145581438+vleon1a@users.noreply.github.com>
Date: Fri, 12 Jul 2024 00:09:08 +0200
Subject: [PATCH 09/10] docs(readme): document how a Base64 private key could
be decoded (#155)
Addressing this comment
https://github.com/actions/create-github-app-token/issues/42#issuecomment-2214599409
---------
Co-authored-by: Parker Brown <17183625+parkerbxyz@users.noreply.github.com>
---
README.md | 18 ++++++++++++++++++
1 file changed, 18 insertions(+)
diff --git a/README.md b/README.md
index 4e6e9ba..9637ad3 100644
--- a/README.md
+++ b/README.md
@@ -278,6 +278,24 @@ jobs:
**Required:** GitHub App private key. Escaped newlines (`\\n`) will be automatically replaced with actual newlines.
+Some other actions may require the private key to be Base64 encoded. To avoid recreating a new secret, it can be decoded on the fly, but it needs to be managed securely. Here is an example of how this can be achieved:
+
+```yaml
+steps:
+ - name: Decode the GitHub App Private Key
+ id: decode
+ run: |
+ private_key=$(echo "${{ secrets.PRIVATE_KEY }}" | base64 -d | awk 'BEGIN {ORS="\\n"} {print}' | head -c -2) &> /dev/null
+ echo "::add-mask::$private_key"
+ echo "private-key=$private_key" >> "$GITHUB_OUTPUT"
+ - name: Generate GitHub App Token
+ id: app-token
+ uses: actions/create-github-app-token@v1
+ with:
+ app-id: ${{ vars.APP_ID }}
+ private-key: ${{ steps.decode.outputs.private-key }}
+```
+
### `owner`
**Optional:** The owner of the GitHub App installation. If empty, defaults to the current repository owner.
From 9ccc6dbd71f2a82bbf749c2d2288eebba653eae6 Mon Sep 17 00:00:00 2001
From: Parker Brown <17183625+parkerbxyz@users.noreply.github.com>
Date: Thu, 8 Aug 2024 14:31:42 -0700
Subject: [PATCH 10/10] ci(test): add `workflow_dispatch` trigger
---
.github/workflows/test.yml | 1 +
1 file changed, 1 insertion(+)
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 40ec55b..ab2b03f 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -5,6 +5,7 @@ on:
branches:
- main
pull_request:
+ workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}