Compare commits

..

1 Commits

Author SHA1 Message Date
3775125a50 build(deps): bump octokit dependencies 2025-01-16 15:11:25 +00:00
20 changed files with 638 additions and 1201 deletions

1
.github/FUNDING.yml vendored Normal file
View File

@ -0,0 +1 @@
github: peter-evans

7
.github/ISSUE_TEMPLATE.md vendored Normal file
View File

@ -0,0 +1,7 @@
### Subject of the issue
Describe your issue here.
### Steps to reproduce
If this issue is describing a possible bug please provide (or link to) your GitHub Actions workflow.

20
.github/dependabot.yml vendored Normal file
View File

@ -0,0 +1,20 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
day: "tuesday"
labels:
- "dependencies"
- package-ecosystem: "npm"
directory: "/"
schedule:
interval: "weekly"
day: "tuesday"
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-major"]
labels:
- "dependencies"

View File

@ -0,0 +1,13 @@
name: Auto-merge Dependabot
on: pull_request
jobs:
automerge:
runs-on: ubuntu-latest
if: github.actor == 'dependabot[bot]'
steps:
- uses: peter-evans/enable-pull-request-automerge@v3
with:
token: ${{ secrets.ACTIONS_BOT_TOKEN }}
pull-request-number: ${{ github.event.pull_request.number }}
merge-method: squash

138
.github/workflows/ci.yml vendored Normal file
View File

@ -0,0 +1,138 @@
name: CI
on:
push:
branches: [main]
paths-ignore:
- 'README.md'
- 'docs/**'
pull_request:
branches: [main]
paths-ignore:
- 'README.md'
- 'docs/**'
permissions:
pull-requests: write
contents: write
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: 20.x
cache: npm
- run: npm ci
- run: npm run build
- run: npm run format-check
- run: npm run lint
- run: npm run test
- uses: actions/upload-artifact@v4
with:
name: dist
path: dist
- uses: actions/upload-artifact@v4
with:
name: action.yml
path: action.yml
test:
if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository
needs: [build]
runs-on: ubuntu-latest
strategy:
matrix:
target: [built, committed]
steps:
- uses: actions/checkout@v4
with:
ref: main
- if: matrix.target == 'built' || github.event_name == 'pull_request'
uses: actions/download-artifact@v4
with:
name: dist
path: dist
- if: matrix.target == 'built' || github.event_name == 'pull_request'
uses: actions/download-artifact@v4
with:
name: action.yml
path: .
- name: Create change
run: date +%s > report.txt
- name: Create Pull Request
id: cpr
uses: ./
with:
commit-message: '[CI] test ${{ matrix.target }}'
committer: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
author: ${{ github.actor }} <${{ github.actor_id }}+${{ github.actor }}@users.noreply.github.com>
title: '[CI] test ${{ matrix.target }}'
body: |
- CI test case for target '${{ matrix.target }}'
Auto-generated by [create-pull-request][1]
[1]: https://github.com/peter-evans/create-pull-request
branch: ci-test-${{ matrix.target }}-${{ github.sha }}
- name: Close Pull
uses: peter-evans/close-pull@v3
with:
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
comment: '[CI] test ${{ matrix.target }}'
delete-branch: true
commentTestSuiteHelp:
if: github.event_name == 'pull_request'
needs: [test]
runs-on: ubuntu-latest
steps:
- name: Find Comment
uses: peter-evans/find-comment@v3
id: fc
with:
issue-number: ${{ github.event.number }}
comment-author: 'github-actions[bot]'
body-includes: Full test suite slash command
- if: steps.fc.outputs.comment-id == ''
name: Create comment
uses: peter-evans/create-or-update-comment@v4
with:
issue-number: ${{ github.event.number }}
body: |
Full test suite slash command (repository admin only)
```
/test repository=${{ github.event.pull_request.head.repo.full_name }} ref=${{ github.event.pull_request.head.ref }} build=true
```
```
/test repository=${{ github.event.pull_request.head.repo.full_name }} ref=${{ github.event.pull_request.head.ref }} build=true sign-commits=true
```
package:
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
needs: [test]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v4
with:
name: dist
path: dist
- name: Create Pull Request
uses: peter-evans/create-pull-request@v7
with:
token: ${{ secrets.ACTIONS_BOT_TOKEN }}
commit-message: 'build: update distribution'
title: Update distribution
body: |
- Updates the distribution for changes on `main`
Auto-generated by [create-pull-request][1]
[1]: https://github.com/peter-evans/create-pull-request
branch: update-distribution

View File

@ -0,0 +1,49 @@
name: Create Pull Request Example Command
on:
repository_dispatch:
types: [cpr-example-command]
jobs:
createPullRequest:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Make changes to pull request
run: date +%s > report.txt
- name: Create Pull Request
id: cpr
uses: ./
with:
commit-message: Update report
committer: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
author: ${{ github.actor }} <${{ github.actor_id }}+${{ github.actor }}@users.noreply.github.com>
signoff: false
title: '[Example] Update report'
body: |
Update report
- Updated with *today's* date
- Auto-generated by [create-pull-request][1]
[1]: https://github.com/peter-evans/create-pull-request
labels: |
report
automated pr
assignees: peter-evans
reviewers: peter-evans
milestone: 1
draft: false
branch: example-patches
delete-branch: true
- name: Check output
run: |
echo "Pull Request Number - ${{ steps.cpr.outputs.pull-request-number }}"
echo "Pull Request URL - ${{ steps.cpr.outputs.pull-request-url }}"
- name: Add reaction
uses: peter-evans/create-or-update-comment@v4
with:
repository: ${{ github.event.client_payload.github.payload.repository.full_name }}
comment-id: ${{ github.event.client_payload.github.payload.comment.id }}
reaction-type: hooray

View File

@ -0,0 +1,43 @@
name: Slash Command Dispatch
on:
issue_comment:
types: [created]
jobs:
slashCommandDispatch:
runs-on: ubuntu-latest
steps:
- name: Slash Command Dispatch
uses: peter-evans/slash-command-dispatch@v4
with:
token: ${{ secrets.ACTIONS_BOT_TOKEN }}
config: >
[
{
"command": "test",
"permission": "admin",
"repository": "peter-evans/create-pull-request-tests",
"named_args": true
},
{
"command": "testv5",
"permission": "admin",
"repository": "peter-evans/create-pull-request-tests",
"named_args": true
},
{
"command": "clean",
"permission": "admin",
"repository": "peter-evans/create-pull-request-tests"
},
{
"command": "cpr-example",
"permission": "admin",
"issue_type": "issue"
},
{
"command": "rebase",
"permission": "admin",
"repository": "peter-evans/slash-command-dispatch-processor",
"issue_type": "pull-request"
}
]

View File

@ -0,0 +1,32 @@
name: Update Major Version
run-name: Update ${{ github.event.inputs.main_version }} to ${{ github.event.inputs.target }}
on:
workflow_dispatch:
inputs:
target:
description: The target tag or reference
required: true
main_version:
type: choice
description: The major version tag to update
options:
- v6
- v7
jobs:
tag:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
token: ${{ secrets.ACTIONS_BOT_TOKEN }}
fetch-depth: 0
- name: Git config
run: |
git config user.name actions-bot
git config user.email actions-bot@users.noreply.github.com
- name: Tag new target
run: git tag -f ${{ github.event.inputs.main_version }} ${{ github.event.inputs.target }}
- name: Push new tag
run: git push origin ${{ github.event.inputs.main_version }} --force

View File

@ -53,7 +53,7 @@ All inputs are **optional**. If not set, sensible defaults will be used.
| `token` | The token that the action will use to create and update the pull request. See [token](#token). | `GITHUB_TOKEN` |
| `branch-token` | The token that the action will use to create and update the branch. See [branch-token](#branch-token). | Defaults to the value of `token` |
| `path` | Relative path under `GITHUB_WORKSPACE` to the repository. | `GITHUB_WORKSPACE` |
| `add-paths` | A comma or newline-separated list of file paths to commit. Paths should follow git's [pathspec](https://git-scm.com/docs/gitglossary#Documentation/gitglossary.txt-aiddefpathspecapathspec) syntax. See [Add specific paths](#add-specific-paths). | If no paths are specified, all new and modified files are added. |
| `add-paths` | A comma or newline-separated list of file paths to commit. Paths should follow git's [pathspec](https://git-scm.com/docs/gitglossary#Documentation/gitglossary.txt-aiddefpathspecapathspec) syntax. If no paths are specified, all new and modified files are added. See [Add specific paths](#add-specific-paths). | |
| `commit-message` | The message to use when committing changes. See [commit-message](#commit-message). | `[create-pull-request] automated change` |
| `committer` | The committer name and email address in the format `Display Name <email@address.com>`. Defaults to the GitHub Actions bot user on github.com. | `github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>` |
| `author` | The author name and email address in the format `Display Name <email@address.com>`. Defaults to the user who triggered the workflow run. | `${{ github.actor }} <${{ github.actor_id }}+${{ github.actor }}@users.noreply.github.com>` |

4
dist/790.index.js vendored
View File

@ -6,8 +6,8 @@ exports.modules = {
/***/ 790:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
var N=Object.defineProperty;var c=(_,a)=>N(_,"name",{value:a,configurable:!0});__webpack_require__(3024),__webpack_require__(6760);const node=__webpack_require__(117);__webpack_require__(7067),__webpack_require__(4708),__webpack_require__(8522),__webpack_require__(7075),__webpack_require__(4573),__webpack_require__(7975),__webpack_require__(3465),__webpack_require__(3136),__webpack_require__(7030);let s=0;const S={START_BOUNDARY:s++,HEADER_FIELD_START:s++,HEADER_FIELD:s++,HEADER_VALUE_START:s++,HEADER_VALUE:s++,HEADER_VALUE_ALMOST_DONE:s++,HEADERS_ALMOST_DONE:s++,PART_DATA_START:s++,PART_DATA:s++,END:s++};let f=1;const F={PART_BOUNDARY:f,LAST_BOUNDARY:f*=2},LF=10,CR=13,SPACE=32,HYPHEN=45,COLON=58,A=97,Z=122,lower=c(_=>_|32,"lower"),noop=c(()=>{},"noop");class MultipartParser{static{c(this,"MultipartParser")}constructor(a){this.index=0,this.flags=0,this.onHeaderEnd=noop,this.onHeaderField=noop,this.onHeadersEnd=noop,this.onHeaderValue=noop,this.onPartBegin=noop,this.onPartData=noop,this.onPartEnd=noop,this.boundaryChars={},a=`\r
--`+a;const t=new Uint8Array(a.length);for(let n=0;n<a.length;n++)t[n]=a.charCodeAt(n),this.boundaryChars[t[n]]=!0;this.boundary=t,this.lookbehind=new Uint8Array(this.boundary.length+8),this.state=S.START_BOUNDARY}write(a){let t=0;const n=a.length;let E=this.index,{lookbehind:d,boundary:h,boundaryChars:H,index:e,state:o,flags:l}=this;const b=this.boundary.length,m=b-1,O=a.length;let r,P;const u=c(D=>{this[D+"Mark"]=t},"mark"),i=c(D=>{delete this[D+"Mark"]},"clear"),T=c((D,p,R,g)=>{(p===void 0||p!==R)&&this[D](g&&g.subarray(p,R))},"callback"),L=c((D,p)=>{const R=D+"Mark";R in this&&(p?(T(D,this[R],t,a),delete this[R]):(T(D,this[R],a.length,a),this[R]=0))},"dataCallback");for(t=0;t<n;t++)switch(r=a[t],o){case S.START_BOUNDARY:if(e===h.length-2){if(r===HYPHEN)l|=F.LAST_BOUNDARY;else if(r!==CR)return;e++;break}else if(e-1===h.length-2){if(l&F.LAST_BOUNDARY&&r===HYPHEN)o=S.END,l=0;else if(!(l&F.LAST_BOUNDARY)&&r===LF)e=0,T("onPartBegin"),o=S.HEADER_FIELD_START;else return;break}r!==h[e+2]&&(e=-2),r===h[e+2]&&e++;break;case S.HEADER_FIELD_START:o=S.HEADER_FIELD,u("onHeaderField"),e=0;case S.HEADER_FIELD:if(r===CR){i("onHeaderField"),o=S.HEADERS_ALMOST_DONE;break}if(e++,r===HYPHEN)break;if(r===COLON){if(e===1)return;L("onHeaderField",!0),o=S.HEADER_VALUE_START;break}if(P=lower(r),P<A||P>Z)return;break;case S.HEADER_VALUE_START:if(r===SPACE)break;u("onHeaderValue"),o=S.HEADER_VALUE;case S.HEADER_VALUE:r===CR&&(L("onHeaderValue",!0),T("onHeaderEnd"),o=S.HEADER_VALUE_ALMOST_DONE);break;case S.HEADER_VALUE_ALMOST_DONE:if(r!==LF)return;o=S.HEADER_FIELD_START;break;case S.HEADERS_ALMOST_DONE:if(r!==LF)return;T("onHeadersEnd"),o=S.PART_DATA_START;break;case S.PART_DATA_START:o=S.PART_DATA,u("onPartData");case S.PART_DATA:if(E=e,e===0){for(t+=m;t<O&&!(a[t]in H);)t+=b;t-=m,r=a[t]}if(e<h.length)h[e]===r?(e===0&&L("onPartData",!0),e++):e=0;else if(e===h.length)e++,r===CR?l|=F.PART_BOUNDARY:r===HYPHEN?l|=F.LAST_BOUNDARY:e=0;else if(e-1===h.length)if(l&F.PART_BOUNDARY){if(e=0,r===LF){l&=~F.PART_BOUNDARY,T("onPartEnd"),T("onPartBegin"),o=S.HEADER_FIELD_START;break}}else l&F.LAST_BOUNDARY&&r===HYPHEN?(T("onPartEnd"),o=S.END,l=0):e=0;if(e>0)d[e-1]=r;else if(E>0){const D=new Uint8Array(d.buffer,d.byteOffset,d.byteLength);T("onPartData",0,E,D),E=0,u("onPartData"),t--}break;case S.END:break;default:throw new Error(`Unexpected state entered: ${o}`)}L("onHeaderField"),L("onHeaderValue"),L("onPartData"),this.index=e,this.state=o,this.flags=l}end(){if(this.state===S.HEADER_FIELD_START&&this.index===0||this.state===S.PART_DATA&&this.index===this.boundary.length)this.onPartEnd();else if(this.state!==S.END)throw new Error("MultipartParser.end(): stream ended unexpectedly")}}function _fileName(_){const a=_.match(/\bfilename=("(.*?)"|([^()<>@,;:\\"/[\]?={}\s\t]+))($|;\s)/i);if(!a)return;const t=a[2]||a[3]||"";let n=t.slice(t.lastIndexOf("\\")+1);return n=n.replace(/%22/g,'"'),n=n.replace(/&#(\d{4});/g,(E,d)=>String.fromCharCode(d)),n}c(_fileName,"_fileName");async function toFormData(_,a){if(!/multipart/i.test(a))throw new TypeError("Failed to fetch");const t=a.match(/boundary=(?:"([^"]+)"|([^;]+))/i);if(!t)throw new TypeError("no or bad content-type header, no multipart boundary");const n=new MultipartParser(t[1]||t[2]);let E,d,h,H,e,o;const l=[],b=new node.FormData,m=c(i=>{h+=u.decode(i,{stream:!0})},"onPartData"),O=c(i=>{l.push(i)},"appendToFile"),r=c(()=>{const i=new node.File(l,o,{type:e});b.append(H,i)},"appendFileToFormData"),P=c(()=>{b.append(H,h)},"appendEntryToFormData"),u=new TextDecoder("utf-8");u.decode(),n.onPartBegin=function(){n.onPartData=m,n.onPartEnd=P,E="",d="",h="",H="",e="",o=null,l.length=0},n.onHeaderField=function(i){E+=u.decode(i,{stream:!0})},n.onHeaderValue=function(i){d+=u.decode(i,{stream:!0})},n.onHeaderEnd=function(){if(d+=u.decode(),E=E.toLowerCase(),E==="content-disposition"){const i=d.match(/\bname=("([^"]*)"|([^()<>@,;:\\"/[\]?={}\s\t]+))/i);i&&(H=i[2]||i[3]||""),o=_fileName(d),o&&(n.onPartData=O,n.onPartEnd=r)}else E==="content-type"&&(e=d);d="",E=""};for await(const i of _)n.write(i);return n.end(),b}c(toFormData,"toFormData"),exports.toFormData=toFormData;
var y=Object.defineProperty;var c=(R,o)=>y(R,"name",{value:o,configurable:!0});__webpack_require__(3024),__webpack_require__(6760);const node=__webpack_require__(117);__webpack_require__(7067),__webpack_require__(4708),__webpack_require__(8522),__webpack_require__(7075),__webpack_require__(4573),__webpack_require__(7975),__webpack_require__(7713),__webpack_require__(3136),__webpack_require__(7030);let s=0;const S={START_BOUNDARY:s++,HEADER_FIELD_START:s++,HEADER_FIELD:s++,HEADER_VALUE_START:s++,HEADER_VALUE:s++,HEADER_VALUE_ALMOST_DONE:s++,HEADERS_ALMOST_DONE:s++,PART_DATA_START:s++,PART_DATA:s++,END:s++};let f=1;const F={PART_BOUNDARY:f,LAST_BOUNDARY:f*=2},LF=10,CR=13,SPACE=32,HYPHEN=45,COLON=58,A=97,Z=122,lower=c(R=>R|32,"lower"),noop=c(()=>{},"noop"),g=class g{constructor(o){this.index=0,this.flags=0,this.onHeaderEnd=noop,this.onHeaderField=noop,this.onHeadersEnd=noop,this.onHeaderValue=noop,this.onPartBegin=noop,this.onPartData=noop,this.onPartEnd=noop,this.boundaryChars={},o=`\r
--`+o;const t=new Uint8Array(o.length);for(let n=0;n<o.length;n++)t[n]=o.charCodeAt(n),this.boundaryChars[t[n]]=!0;this.boundary=t,this.lookbehind=new Uint8Array(this.boundary.length+8),this.state=S.START_BOUNDARY}write(o){let t=0;const n=o.length;let E=this.index,{lookbehind:l,boundary:h,boundaryChars:H,index:e,state:a,flags:d}=this;const b=this.boundary.length,m=b-1,O=o.length;let r,P;const u=c(D=>{this[D+"Mark"]=t},"mark"),i=c(D=>{delete this[D+"Mark"]},"clear"),T=c((D,p,_,N)=>{(p===void 0||p!==_)&&this[D](N&&N.subarray(p,_))},"callback"),L=c((D,p)=>{const _=D+"Mark";_ in this&&(p?(T(D,this[_],t,o),delete this[_]):(T(D,this[_],o.length,o),this[_]=0))},"dataCallback");for(t=0;t<n;t++)switch(r=o[t],a){case S.START_BOUNDARY:if(e===h.length-2){if(r===HYPHEN)d|=F.LAST_BOUNDARY;else if(r!==CR)return;e++;break}else if(e-1===h.length-2){if(d&F.LAST_BOUNDARY&&r===HYPHEN)a=S.END,d=0;else if(!(d&F.LAST_BOUNDARY)&&r===LF)e=0,T("onPartBegin"),a=S.HEADER_FIELD_START;else return;break}r!==h[e+2]&&(e=-2),r===h[e+2]&&e++;break;case S.HEADER_FIELD_START:a=S.HEADER_FIELD,u("onHeaderField"),e=0;case S.HEADER_FIELD:if(r===CR){i("onHeaderField"),a=S.HEADERS_ALMOST_DONE;break}if(e++,r===HYPHEN)break;if(r===COLON){if(e===1)return;L("onHeaderField",!0),a=S.HEADER_VALUE_START;break}if(P=lower(r),P<A||P>Z)return;break;case S.HEADER_VALUE_START:if(r===SPACE)break;u("onHeaderValue"),a=S.HEADER_VALUE;case S.HEADER_VALUE:r===CR&&(L("onHeaderValue",!0),T("onHeaderEnd"),a=S.HEADER_VALUE_ALMOST_DONE);break;case S.HEADER_VALUE_ALMOST_DONE:if(r!==LF)return;a=S.HEADER_FIELD_START;break;case S.HEADERS_ALMOST_DONE:if(r!==LF)return;T("onHeadersEnd"),a=S.PART_DATA_START;break;case S.PART_DATA_START:a=S.PART_DATA,u("onPartData");case S.PART_DATA:if(E=e,e===0){for(t+=m;t<O&&!(o[t]in H);)t+=b;t-=m,r=o[t]}if(e<h.length)h[e]===r?(e===0&&L("onPartData",!0),e++):e=0;else if(e===h.length)e++,r===CR?d|=F.PART_BOUNDARY:r===HYPHEN?d|=F.LAST_BOUNDARY:e=0;else if(e-1===h.length)if(d&F.PART_BOUNDARY){if(e=0,r===LF){d&=~F.PART_BOUNDARY,T("onPartEnd"),T("onPartBegin"),a=S.HEADER_FIELD_START;break}}else d&F.LAST_BOUNDARY&&r===HYPHEN?(T("onPartEnd"),a=S.END,d=0):e=0;if(e>0)l[e-1]=r;else if(E>0){const D=new Uint8Array(l.buffer,l.byteOffset,l.byteLength);T("onPartData",0,E,D),E=0,u("onPartData"),t--}break;case S.END:break;default:throw new Error(`Unexpected state entered: ${a}`)}L("onHeaderField"),L("onHeaderValue"),L("onPartData"),this.index=e,this.state=a,this.flags=d}end(){if(this.state===S.HEADER_FIELD_START&&this.index===0||this.state===S.PART_DATA&&this.index===this.boundary.length)this.onPartEnd();else if(this.state!==S.END)throw new Error("MultipartParser.end(): stream ended unexpectedly")}};c(g,"MultipartParser");let MultipartParser=g;function _fileName(R){const o=R.match(/\bfilename=("(.*?)"|([^()<>@,;:\\"/[\]?={}\s\t]+))($|;\s)/i);if(!o)return;const t=o[2]||o[3]||"";let n=t.slice(t.lastIndexOf("\\")+1);return n=n.replace(/%22/g,'"'),n=n.replace(/&#(\d{4});/g,(E,l)=>String.fromCharCode(l)),n}c(_fileName,"_fileName");async function toFormData(R,o){if(!/multipart/i.test(o))throw new TypeError("Failed to fetch");const t=o.match(/boundary=(?:"([^"]+)"|([^;]+))/i);if(!t)throw new TypeError("no or bad content-type header, no multipart boundary");const n=new MultipartParser(t[1]||t[2]);let E,l,h,H,e,a;const d=[],b=new node.FormData,m=c(i=>{h+=u.decode(i,{stream:!0})},"onPartData"),O=c(i=>{d.push(i)},"appendToFile"),r=c(()=>{const i=new node.File(d,a,{type:e});b.append(H,i)},"appendFileToFormData"),P=c(()=>{b.append(H,h)},"appendEntryToFormData"),u=new TextDecoder("utf-8");u.decode(),n.onPartBegin=function(){n.onPartData=m,n.onPartEnd=P,E="",l="",h="",H="",e="",a=null,d.length=0},n.onHeaderField=function(i){E+=u.decode(i,{stream:!0})},n.onHeaderValue=function(i){l+=u.decode(i,{stream:!0})},n.onHeaderEnd=function(){if(l+=u.decode(),E=E.toLowerCase(),E==="content-disposition"){const i=l.match(/\bname=("([^"]*)"|([^()<>@,;:\\"/[\]?={}\s\t]+))/i);i&&(H=i[2]||i[3]||""),a=_fileName(l),a&&(n.onPartData=O,n.onPartEnd=r)}else E==="content-type"&&(e=l);l="",E=""};for await(const i of R)n.write(i);return n.end(),b}c(toFormData,"toFormData"),exports.toFormData=toFormData;
/***/ })

584
dist/index.js vendored

File diff suppressed because it is too large Load Diff

View File

@ -25,7 +25,7 @@ This document covers terminology, how the action works, general usage guidelines
## Terminology
[Pull requests](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests#about-pull-requests) are proposed changes to a repository branch that can be reviewed by a repository's collaborators before being accepted or rejected.
[Pull requests](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests#about-pull-requests) are proposed changes to a repository branch that can be reviewed by a repository's collaborators before being accepted or rejected.
A pull request references two branches:
@ -150,7 +150,7 @@ There are a number of workarounds with different pros and cons.
- Use the default `GITHUB_TOKEN` and allow the action to create pull requests that have no checks enabled. Manually close pull requests and immediately reopen them. This will enable `on: pull_request` workflows to run and be added as checks. To prevent merging of pull requests without checks erroneously, use [branch protection rules](https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests).
- Create draft pull requests by setting the `draft: always-true` input, and configure your workflow to trigger `ready_for_review` in `on: pull_request`. The workflow will run when users manually click the "Ready for review" button on the draft pull requests. If the pull request is updated by the action, the `always-true` mode ensures that the pull request will be converted back to a draft.
- Create draft pull requests by setting the `draft: always-true` input, and configure your workflow to trigger `on: ready_for_review`. The workflow will run when users manually click the "Ready for review" button on the draft pull requests. If the pull request is updated by the action, the `always-true` mode ensures that the pull request will be converted back to a draft.
- Use a [Personal Access Token (PAT)](https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token) created on an account that has write access to the repository that pull requests are being created in. This is the standard workaround and [recommended by GitHub](https://docs.github.com/en/actions/using-workflows/triggering-a-workflow#triggering-a-workflow-from-a-workflow). It's advisable to use a dedicated [machine account](https://docs.github.com/en/github/site-policy/github-terms-of-service#3-account-requirements) that has collaborator access to the repository, rather than creating a PAT on a personal user account. Also note that because the account that owns the PAT will be the creator of pull requests, that user account will be unable to perform actions such as request changes or approve the pull request.
@ -197,9 +197,8 @@ Checking out a branch from a different repository from where the workflow is exe
Allowing the action to push with a configured deploy key will trigger `on: push` workflows. This makes it an alternative to using a PAT to trigger checks for pull requests.
> [!NOTE]
> - You cannot use deploy keys alone to [create a pull request in a remote repository](#creating-pull-requests-in-a-remote-repository) because then using a PAT would become a requirement.
> You cannot use deploy keys alone to [create a pull request in a remote repository](#creating-pull-requests-in-a-remote-repository) because then using a PAT would become a requirement.
> This method only makes sense if creating a pull request in the repository where the workflow is running.
> - You cannot use deploy keys with [commit signature verification for bots](#commit-signature-verification-for-bots) (`sign-commits: true`).
How to use SSH (deploy keys) with create-pull-request action:
@ -374,7 +373,7 @@ The action supports two methods to sign commits, [commit signature verification
The action can sign commits as `github-actions[bot]` when using the repository's default `GITHUB_TOKEN`, or your own bot when using [GitHub App tokens](#authenticating-with-github-app-generated-tokens).
> [!IMPORTANT]
> [!IMPORTANT]
> - When setting `sign-commits: true` the action will ignore the `committer` and `author` inputs.
> - If you attempt to use a [Personal Access Token (PAT)](https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token) the action will create the pull request, but commits will *not* be signed. Commit signing is only supported with bot generated tokens.
> - The GitHub API has a 40MiB limit when creating git blobs. An error will be raised if there are files in the pull request larger than this. If you hit this limit, use [GPG commit signature verification](#gpg-commit-signature-verification) instead.

View File

@ -22,7 +22,7 @@
- [Dynamic configuration using variables](#dynamic-configuration-using-variables)
- [Using a markdown template](#using-a-markdown-template)
- [Debugging GitHub Actions](#debugging-github-actions)
- [Show an annotation message for a created pull request](#show-an-annotation-message-for-a-created-pull-request)
## Use case: Create a pull request to update X on push
@ -612,30 +612,3 @@ To enable step debug logging set the secret `ACTIONS_STEP_DEBUG` to `true` in th
MATRIX_CONTEXT: ${{ toJson(matrix) }}
run: echo "$MATRIX_CONTEXT"
```
### Show an annotation message for a created pull request
Showing an annotation message for a created or updated pull request allows you to confirm the pull request easily, such as by visiting the link. This can be achieved by adding a step that uses the [`notice` workflow command](https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/workflow-commands-for-github-actions?tool=bash#setting-a-notice-message).
For example:
```yml
- name: Create Pull Request
id: cpr
uses: peter-evans/create-pull-request@v7
- name: Show message for created Pull Request
if: ${{ steps.cpr.outputs.pull-request-url && steps.cpr.outputs.pull-request-operation != 'none' }}
shell: bash
env:
PR_URL: ${{ steps.cpr.outputs.pull-request-url }}
PR_OPERATION: ${{ steps.cpr.outputs.pull-request-operation }}
run: |
echo "::notice::${PR_URL} was ${PR_OPERATION}."
```
In this example, when a pull request is created, you will be able to see the following message on an action run page (e.g., `/actions/runs/12812393039`):
```
https://github.com/peter-evans/create-pull-request/pull/1 was created.
```

750
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -31,33 +31,33 @@
"dependencies": {
"@actions/core": "^1.11.1",
"@actions/exec": "^1.1.1",
"@octokit/core": "^6.1.5",
"@octokit/plugin-paginate-rest": "^11.6.0",
"@octokit/plugin-rest-endpoint-methods": "^13.5.0",
"@octokit/plugin-throttling": "^9.6.1",
"node-fetch-native": "^1.6.6",
"@octokit/core": "^6.1.3",
"@octokit/plugin-paginate-rest": "^11.4.0",
"@octokit/plugin-rest-endpoint-methods": "^13.3.0",
"@octokit/plugin-throttling": "^9.4.0",
"node-fetch-native": "^1.6.4",
"p-limit": "^6.2.0",
"uuid": "^9.0.1"
},
"devDependencies": {
"@types/jest": "^29.5.14",
"@types/node": "^18.19.96",
"@types/node": "^18.19.70",
"@typescript-eslint/eslint-plugin": "^7.18.0",
"@typescript-eslint/parser": "^7.18.0",
"@vercel/ncc": "^0.38.3",
"eslint": "^8.57.1",
"eslint-import-resolver-typescript": "^3.10.1",
"eslint-import-resolver-typescript": "^3.7.0",
"eslint-plugin-github": "^4.10.2",
"eslint-plugin-import": "^2.31.0",
"eslint-plugin-jest": "^27.9.0",
"eslint-plugin-prettier": "^5.4.0",
"eslint-plugin-prettier": "^5.2.1",
"jest": "^29.7.0",
"jest-circus": "^29.7.0",
"jest-environment-jsdom": "^29.7.0",
"js-yaml": "^4.1.0",
"prettier": "^3.5.3",
"ts-jest": "^29.3.2",
"typescript": "^5.8.3",
"undici": "^6.21.2"
"prettier": "^3.4.2",
"ts-jest": "^29.2.5",
"typescript": "^5.7.3",
"undici": "^6.21.0"
}
}

View File

@ -19,7 +19,7 @@ export async function getWorkingBaseAndType(
): Promise<[string, WorkingBaseType]> {
const symbolicRefResult = await git.exec(
['symbolic-ref', 'HEAD', '--short'],
{allowAllExitCodes: true}
true
)
if (symbolicRefResult.exitCode == 0) {
// A ref is checked out
@ -200,7 +200,7 @@ export async function createOrUpdateBranch(
} else {
aopts.push('-A')
}
await git.exec(aopts, {allowAllExitCodes: true})
await git.exec(aopts, true)
const popts = ['-m', commitMessage]
if (signoff) {
popts.push('--signoff')

View File

@ -211,7 +211,6 @@ export async function createPullRequest(inputs: Inputs): Promise<void> {
const stashed = await git.stashPush(['--include-untracked'])
await git.checkout(inputs.branch)
const pushSignedCommitsResult = await ghBranch.pushSignedCommits(
git,
result.branchCommits,
result.baseCommit,
repoPath,

View File

@ -2,7 +2,6 @@ import * as exec from '@actions/exec'
import * as io from '@actions/io'
import * as utils from './utils'
import * as path from 'path'
import stream, {Writable} from 'stream'
const tagsRefSpec = '+refs/tags/*:refs/tags/*'
@ -22,12 +21,6 @@ export type Commit = {
unparsedChanges: string[]
}
export type ExecOpts = {
allowAllExitCodes?: boolean
encoding?: 'utf8' | 'base64'
suppressGitCmdOutput?: boolean
}
export class GitCommandManager {
private gitPath: string
private workingDirectory: string
@ -73,7 +66,7 @@ export class GitCommandManager {
args.push(...options)
}
return await this.exec(args, {allowAllExitCodes: allowAllExitCodes})
return await this.exec(args, allowAllExitCodes)
}
async commit(
@ -89,7 +82,7 @@ export class GitCommandManager {
args.push(...options)
}
return await this.exec(args, {allowAllExitCodes: allowAllExitCodes})
return await this.exec(args, allowAllExitCodes)
}
async config(
@ -120,7 +113,7 @@ export class GitCommandManager {
configKey,
configValue
],
{allowAllExitCodes: true}
true
)
return output.exitCode === 0
}
@ -163,20 +156,17 @@ export class GitCommandManager {
async getCommit(ref: string): Promise<Commit> {
const endOfBody = '###EOB###'
const output = await this.exec(
[
'-c',
'core.quotePath=false',
'show',
'--raw',
'--cc',
'--no-renames',
'--no-abbrev',
`--format=%H%n%T%n%P%n%G?%n%s%n%b%n${endOfBody}`,
ref
],
{suppressGitCmdOutput: true}
)
const output = await this.exec([
'-c',
'core.quotePath=false',
'show',
'--raw',
'--cc',
'--no-renames',
'--no-abbrev',
`--format=%H%n%T%n%P%n%G?%n%s%n%b%n${endOfBody}`,
ref
])
const lines = output.stdout.split('\n')
const endOfBodyIndex = lines.lastIndexOf(endOfBody)
const detailLines = lines.slice(0, endOfBodyIndex)
@ -232,7 +222,7 @@ export class GitCommandManager {
if (options) {
args.push(...options)
}
const output = await this.exec(args, {allowAllExitCodes: true})
const output = await this.exec(args, true)
return output.exitCode === 1
}
@ -288,15 +278,6 @@ export class GitCommandManager {
return output.stdout.trim()
}
async showFileAtRefBase64(ref: string, path: string): Promise<string> {
const args = ['show', `${ref}:${path}`]
const output = await this.exec(args, {
encoding: 'base64',
suppressGitCmdOutput: true
})
return output.stdout.trim()
}
async stashPush(options?: string[]): Promise<boolean> {
const args = ['stash', 'push']
if (options) {
@ -345,7 +326,7 @@ export class GitCommandManager {
configKey,
configValue
],
{allowAllExitCodes: true}
true
)
return output.exitCode === 0
}
@ -353,7 +334,7 @@ export class GitCommandManager {
async tryGetRemoteUrl(): Promise<string> {
const output = await this.exec(
['config', '--local', '--get', 'remote.origin.url'],
{allowAllExitCodes: true}
true
)
if (output.exitCode !== 0) {
@ -368,30 +349,16 @@ export class GitCommandManager {
return stdout
}
async exec(
args: string[],
{
encoding = 'utf8',
allowAllExitCodes = false,
suppressGitCmdOutput = false
}: ExecOpts = {}
): Promise<GitOutput> {
async exec(args: string[], allowAllExitCodes = false): Promise<GitOutput> {
const result = new GitOutput()
if (process.env['CPR_SHOW_GIT_CMD_OUTPUT']) {
// debug mode overrides the suppressGitCmdOutput option
suppressGitCmdOutput = false
}
const env = {}
for (const key of Object.keys(process.env)) {
env[key] = process.env[key]
}
const stdout: Buffer[] = []
let stdoutLength = 0
const stderr: Buffer[] = []
let stderrLength = 0
const stdout: string[] = []
const stderr: string[] = []
const options = {
cwd: this.workingDirectory,
@ -399,21 +366,17 @@ export class GitCommandManager {
ignoreReturnCode: allowAllExitCodes,
listeners: {
stdout: (data: Buffer) => {
stdout.push(data)
stdoutLength += data.length
stdout.push(data.toString())
},
stderr: (data: Buffer) => {
stderr.push(data)
stderrLength += data.length
stderr.push(data.toString())
}
},
outStream: outStreamHandler(process.stdout, suppressGitCmdOutput),
errStream: outStreamHandler(process.stderr, suppressGitCmdOutput)
}
}
result.exitCode = await exec.exec(`"${this.gitPath}"`, args, options)
result.stdout = Buffer.concat(stdout, stdoutLength).toString(encoding)
result.stderr = Buffer.concat(stderr, stderrLength).toString(encoding)
result.stdout = stdout.join('')
result.stderr = stderr.join('')
return result
}
}
@ -423,24 +386,3 @@ class GitOutput {
stderr = ''
exitCode = 0
}
const outStreamHandler = (
outStream: Writable,
suppressGitCmdOutput: boolean
): Writable => {
return new stream.Writable({
write(chunk, _, next) {
if (suppressGitCmdOutput) {
const lines = chunk.toString().trimEnd().split('\n')
for (const line of lines) {
if (line.startsWith('[command]')) {
outStream.write(`${line}\n`)
}
}
} else {
outStream.write(chunk)
}
next()
}
})
}

View File

@ -1,6 +1,6 @@
import * as core from '@actions/core'
import {Inputs} from './create-pull-request'
import {Commit, GitCommandManager} from './git-command-manager'
import {Commit} from './git-command-manager'
import {Octokit, OctokitOptions, throttleOptions} from './octokit-client'
import pLimit from 'p-limit'
import * as utils from './utils'
@ -47,7 +47,7 @@ export class GitHubHelper {
options.auth = `${token}`
}
if (githubServerHostname !== 'github.com') {
options.baseUrl = `https://${githubServerHostname}/api/v1`
options.baseUrl = `https://${githubServerHostname}/api/v3`
} else {
options.baseUrl = 'https://api.github.com'
}
@ -220,7 +220,6 @@ export class GitHubHelper {
}
async pushSignedCommits(
git: GitCommandManager,
branchCommits: Commit[],
baseCommit: Commit,
repoPath: string,
@ -234,7 +233,6 @@ export class GitHubHelper {
}
for (const commit of branchCommits) {
headCommit = await this.createCommit(
git,
commit,
headCommit,
repoPath,
@ -246,7 +244,6 @@ export class GitHubHelper {
}
private async createCommit(
git: GitCommandManager,
commit: Commit,
parentCommit: CommitResponse,
repoPath: string,
@ -272,10 +269,10 @@ export class GitHubHelper {
let sha: string | null = null
if (status === 'A' || status === 'M') {
try {
const {data: blob} = await blobCreationLimit(async () =>
const {data: blob} = await blobCreationLimit(() =>
this.octokit.rest.git.createBlob({
...repository,
content: await git.showFileAtRefBase64(commit.sha, path),
content: utils.readFileBase64([repoPath, path]),
encoding: 'base64'
})
)

View File

@ -126,6 +126,16 @@ export function readFile(path: string): string {
return fs.readFileSync(path, 'utf-8')
}
export function readFileBase64(pathParts: string[]): string {
const resolvedPath = path.resolve(...pathParts)
if (fs.lstatSync(resolvedPath).isSymbolicLink()) {
return fs
.readlinkSync(resolvedPath, {encoding: 'buffer'})
.toString('base64')
}
return fs.readFileSync(resolvedPath).toString('base64')
}
/* eslint-disable @typescript-eslint/no-explicit-any */
function hasErrorCode(error: any): error is {code: string} {
return typeof (error && error.code) === 'string'