Compare commits

..

10 Commits

Author SHA1 Message Date
ce00b952cf Merge pull request #128 from peter-evans/dev
Unset and restore authorization extraheader only
2020-02-22 17:02:15 +09:00
0d42c285a3 Unset and restore authorization extraheader only 2020-02-22 16:56:42 +09:00
ea1eaf1734 Merge pull request #127 from peter-evans/dev
Unset and restore extraheader config option
2020-02-22 14:53:46 +09:00
d5c5ea3e20 Unset and restore extraheader config option 2020-02-22 14:08:54 +09:00
c7b64af0a4 Merge pull request #123 from peter-evans/dev
Authenticate with git extraheader
2020-02-18 19:49:13 +09:00
289fda9fea Authenticate with git extraheader 2020-02-18 19:35:15 +09:00
b021b9e27a Update vendored dependencies 2020-02-18 15:59:18 +09:00
c26314237b Update dependency GitPython to v3.0.8 2020-02-18 15:57:13 +09:00
ed7dd8d236 Update documentation 2020-02-14 20:10:27 +09:00
ca0e9d75fd Update documentation 2020-02-14 00:23:21 +09:00
35 changed files with 6631 additions and 207 deletions

17
.eslintrc.json Normal file
View File

@ -0,0 +1,17 @@
{
"env": {
"commonjs": true,
"es6": true,
"node": true
},
"extends": "eslint:recommended",
"globals": {
"Atomics": "readonly",
"SharedArrayBuffer": "readonly"
},
"parserOptions": {
"ecmaVersion": 2018
},
"rules": {
}
}

View File

@ -1,5 +1,6 @@
#!/usr/bin/env python3
""" Create Pull Request """
import base64
import common as cmn
import create_or_update_branch as coub
import create_or_update_pull_request as coupr
@ -31,7 +32,7 @@ def get_git_config_value(repo, name):
return None
def get_repository_detail():
def get_repository_detail(repo):
remote_origin_url = get_git_config_value(repo, "remote.origin.url")
if remote_origin_url is None:
raise ValueError("Failed to fetch 'remote.origin.url' from git config")
@ -116,9 +117,21 @@ repo = Repo(path)
# Determine the GitHub repository from git config
# This will be the target repository for the pull request
repo_url, protocol, github_repository = get_repository_detail()
repo_url, protocol, github_repository = get_repository_detail(repo)
print(f"Target repository set to {github_repository}")
if protocol == "HTTPS":
print(f"::debug::Using HTTPS protocol")
# Encode and configure the basic credential for HTTPS access
basic_credential = base64.b64encode(
f"x-access-token:{github_token}".encode("utf-8")
).decode("utf-8")
# Mask the basic credential in logs and debug output
print(f"::add-mask::{basic_credential}")
repo.git.set_persistent_git_options(
c=f"http.https://github.com/.extraheader=AUTHORIZATION: basic {basic_credential}"
)
# Determine if the checked out ref is a valid base for a pull request
# The action needs the checked out HEAD ref to be a branch
# This check will fail in the following cases:
@ -174,11 +187,6 @@ except ValueError as e:
print(f"::error::{e} " + "Unable to continue. Exiting.")
sys.exit(1)
# Set the auth token in the repo URL
# This supports checkout@v1. From v2 the auth token is saved for further use.
if protocol == "HTTPS":
repo_url = f"https://x-access-token:{github_token}@github.com/{github_repository}"
# Create or update the pull request branch
result = coub.create_or_update_branch(repo, repo_url, commit_message, base, branch)

2
dist/cpr/requirements.txt vendored Normal file
View File

@ -0,0 +1,2 @@
GitPython==3.0.8
PyGithub==1.46

373
dist/index.js vendored
View File

@ -34,7 +34,7 @@ module.exports =
/******/ // the startup function
/******/ function startup() {
/******/ // Load entry module and return exports
/******/ return __webpack_require__(104);
/******/ return __webpack_require__(676);
/******/ };
/******/
/******/ // run startup
@ -973,21 +973,6 @@ module.exports = util.assign(
module.exports = require("tls");
/***/ }),
/***/ 58:
/***/ (function(module, __unusedexports, __webpack_require__) {
// Unique ID creation requires a high quality random # generator. In node.js
// this is pretty straight-forward - we use the crypto API.
var crypto = __webpack_require__(417);
module.exports = function nodeRNG() {
return crypto.randomBytes(16);
};
/***/ }),
/***/ 87:
@ -998,109 +983,6 @@ module.exports = require("os");
/***/ }),
/***/ 104:
/***/ (function(__unusedmodule, __unusedexports, __webpack_require__) {
const { inspect } = __webpack_require__(669);
const isDocker = __webpack_require__(160);
const core = __webpack_require__(470);
const exec = __webpack_require__(986);
const setupPython = __webpack_require__(139);
async function run() {
try {
// Allows ncc to find assets to be included in the distribution
const src = __webpack_require__.ab + "src";
core.debug(`src: ${src}`);
// Determine how to access python and pip
const { pip, python } = (function() {
if (isDocker()) {
core.info("Running inside a Docker container");
// Python 3 assumed to be installed and on the PATH
return {
pip: "pip3",
python: "python3"
};
} else {
// Setup Python from the tool cache
setupPython("3.x", "x64");
return {
pip: "pip",
python: "python"
};
}
})();
// Install requirements
await exec.exec(pip, [
"install",
"--requirement",
`${src}/requirements.txt`,
"--no-index",
`--find-links=${__dirname}/vendor`
]);
// Fetch action inputs
const inputs = {
token: core.getInput("token"),
path: core.getInput("path"),
commitMessage: core.getInput("commit-message"),
committer: core.getInput("committer"),
author: core.getInput("author"),
title: core.getInput("title"),
body: core.getInput("body"),
labels: core.getInput("labels"),
assignees: core.getInput("assignees"),
reviewers: core.getInput("reviewers"),
teamReviewers: core.getInput("team-reviewers"),
milestone: core.getInput("milestone"),
project: core.getInput("project"),
projectColumn: core.getInput("project-column"),
branch: core.getInput("branch"),
base: core.getInput("base"),
branchSuffix: core.getInput("branch-suffix")
};
core.debug(`Inputs: ${inspect(inputs)}`);
// Set environment variables from inputs.
if (inputs.token) process.env.GITHUB_TOKEN = inputs.token;
if (inputs.path) process.env.CPR_PATH = inputs.path;
if (inputs.commitMessage) process.env.CPR_COMMIT_MESSAGE = inputs.commitMessage;
if (inputs.committer) process.env.CPR_COMMITTER = inputs.committer;
if (inputs.author) process.env.CPR_AUTHOR = inputs.author;
if (inputs.title) process.env.CPR_TITLE = inputs.title;
if (inputs.body) process.env.CPR_BODY = inputs.body;
if (inputs.labels) process.env.CPR_LABELS = inputs.labels;
if (inputs.assignees) process.env.CPR_ASSIGNEES = inputs.assignees;
if (inputs.reviewers) process.env.CPR_REVIEWERS = inputs.reviewers;
if (inputs.teamReviewers) process.env.CPR_TEAM_REVIEWERS = inputs.teamReviewers;
if (inputs.milestone) process.env.CPR_MILESTONE = inputs.milestone;
if (inputs.project) process.env.CPR_PROJECT_NAME = inputs.project;
if (inputs.projectColumn) process.env.CPR_PROJECT_COLUMN_NAME = inputs.projectColumn;
if (inputs.branch) process.env.CPR_BRANCH = inputs.branch;
if (inputs.base) process.env.CPR_BASE = inputs.base;
if (inputs.branchSuffix) process.env.CPR_BRANCH_SUFFIX = inputs.branchSuffix;
// Execute python script
await exec.exec(python, [`${src}/create_pull_request.py`]);
} catch (error) {
core.setFailed(error.message);
}
}
run();
/***/ }),
/***/ 129:
/***/ (function(module) {
module.exports = require("child_process");
/***/ }),
/***/ 139:
/***/ (function(module, __unusedexports, __webpack_require__) {
const core = __webpack_require__(470);
@ -1157,6 +1039,28 @@ let setupPython = function(versionSpec, arch) {
module.exports = setupPython;
/***/ }),
/***/ 129:
/***/ (function(module) {
module.exports = require("child_process");
/***/ }),
/***/ 139:
/***/ (function(module, __unusedexports, __webpack_require__) {
// Unique ID creation requires a high quality random # generator. In node.js
// this is pretty straight-forward - we use the crypto API.
var crypto = __webpack_require__(417);
module.exports = function nodeRNG() {
return crypto.randomBytes(16);
};
/***/ }),
/***/ 141:
@ -4301,6 +4205,235 @@ function isUnixExecutable(stats) {
}
//# sourceMappingURL=io-util.js.map
/***/ }),
/***/ 676:
/***/ (function(__unusedmodule, __unusedexports, __webpack_require__) {
const { inspect } = __webpack_require__(669);
const isDocker = __webpack_require__(160);
const core = __webpack_require__(470);
const exec = __webpack_require__(986);
const setupPython = __webpack_require__(104);
const {
getRepoPath,
getAndUnsetConfigOption,
addConfigOption
} = __webpack_require__(718);
const EXTRAHEADER_OPTION = "http.https://github.com/.extraheader";
const EXTRAHEADER_VALUE_REGEX = "^AUTHORIZATION:";
async function run() {
try {
// Allows ncc to find assets to be included in the distribution
const cpr = __webpack_require__.ab + "cpr";
core.debug(`cpr: ${cpr}`);
// Determine how to access python and pip
const { pip, python } = (function() {
if (isDocker()) {
core.info("Running inside a Docker container");
// Python 3 assumed to be installed and on the PATH
return {
pip: "pip3",
python: "python3"
};
} else {
// Setup Python from the tool cache
setupPython("3.x", "x64");
return {
pip: "pip",
python: "python"
};
}
})();
// Install requirements
await exec.exec(pip, [
"install",
"--requirement",
`${cpr}/requirements.txt`,
"--no-index",
`--find-links=${__dirname}/vendor`
]);
// Fetch action inputs
const inputs = {
token: core.getInput("token"),
path: core.getInput("path"),
commitMessage: core.getInput("commit-message"),
committer: core.getInput("committer"),
author: core.getInput("author"),
title: core.getInput("title"),
body: core.getInput("body"),
labels: core.getInput("labels"),
assignees: core.getInput("assignees"),
reviewers: core.getInput("reviewers"),
teamReviewers: core.getInput("team-reviewers"),
milestone: core.getInput("milestone"),
project: core.getInput("project"),
projectColumn: core.getInput("project-column"),
branch: core.getInput("branch"),
base: core.getInput("base"),
branchSuffix: core.getInput("branch-suffix")
};
core.debug(`Inputs: ${inspect(inputs)}`);
// Set environment variables from inputs.
if (inputs.token) process.env.GITHUB_TOKEN = inputs.token;
if (inputs.path) process.env.CPR_PATH = inputs.path;
if (inputs.commitMessage) process.env.CPR_COMMIT_MESSAGE = inputs.commitMessage;
if (inputs.committer) process.env.CPR_COMMITTER = inputs.committer;
if (inputs.author) process.env.CPR_AUTHOR = inputs.author;
if (inputs.title) process.env.CPR_TITLE = inputs.title;
if (inputs.body) process.env.CPR_BODY = inputs.body;
if (inputs.labels) process.env.CPR_LABELS = inputs.labels;
if (inputs.assignees) process.env.CPR_ASSIGNEES = inputs.assignees;
if (inputs.reviewers) process.env.CPR_REVIEWERS = inputs.reviewers;
if (inputs.teamReviewers) process.env.CPR_TEAM_REVIEWERS = inputs.teamReviewers;
if (inputs.milestone) process.env.CPR_MILESTONE = inputs.milestone;
if (inputs.project) process.env.CPR_PROJECT_NAME = inputs.project;
if (inputs.projectColumn) process.env.CPR_PROJECT_COLUMN_NAME = inputs.projectColumn;
if (inputs.branch) process.env.CPR_BRANCH = inputs.branch;
if (inputs.base) process.env.CPR_BASE = inputs.base;
if (inputs.branchSuffix) process.env.CPR_BRANCH_SUFFIX = inputs.branchSuffix;
// Get the repository path
var repoPath = getRepoPath(inputs.path);
// Get the extraheader config option if it exists
var extraHeaderOption = await getAndUnsetConfigOption(
repoPath,
EXTRAHEADER_OPTION,
EXTRAHEADER_VALUE_REGEX
);
// Execute create pull request
await exec.exec(python, [`${cpr}/create_pull_request.py`]);
} catch (error) {
core.setFailed(error.message);
} finally {
// Restore the extraheader config option
if (extraHeaderOption) {
if (
await addConfigOption(
repoPath,
EXTRAHEADER_OPTION,
extraHeaderOption.value
)
)
core.debug(`Restored config option '${EXTRAHEADER_OPTION}'`);
}
}
}
run();
/***/ }),
/***/ 718:
/***/ (function(module, __unusedexports, __webpack_require__) {
const core = __webpack_require__(470);
const exec = __webpack_require__(986);
const path = __webpack_require__(622);
function getRepoPath(relativePath) {
let githubWorkspacePath = process.env["GITHUB_WORKSPACE"];
if (!githubWorkspacePath) {
throw new Error("GITHUB_WORKSPACE not defined");
}
githubWorkspacePath = path.resolve(githubWorkspacePath);
core.debug(`githubWorkspacePath: ${githubWorkspacePath}`);
repoPath = githubWorkspacePath;
if (relativePath) repoPath = path.resolve(repoPath, relativePath);
core.debug(`repoPath: ${repoPath}`);
return repoPath;
}
async function execGit(repoPath, args, ignoreReturnCode = false) {
const stdout = [];
const options = {
cwd: repoPath,
ignoreReturnCode: ignoreReturnCode,
listeners: {
stdout: data => {
stdout.push(data.toString());
}
}
};
var result = {};
result.exitCode = await exec.exec("git", args, options);
result.stdout = stdout.join("");
return result;
}
async function addConfigOption(repoPath, name, value) {
const result = await execGit(
repoPath,
["config", "--local", "--add", name, value],
true
);
return result.exitCode === 0;
}
async function unsetConfigOption(repoPath, name, valueRegex=".") {
const result = await execGit(
repoPath,
["config", "--local", "--unset", name, valueRegex],
true
);
return result.exitCode === 0;
}
async function configOptionExists(repoPath, name, valueRegex=".") {
const result = await execGit(
repoPath,
["config", "--local", "--name-only", "--get-regexp", name, valueRegex],
true
);
return result.exitCode === 0;
}
async function getConfigOption(repoPath, name, valueRegex=".") {
const result = await execGit(
repoPath,
["config", "--local", "--get-regexp", name, valueRegex],
true
);
const option = result.stdout.trim().split(`${name} `);
return {
name: name,
value: option[1]
}
}
async function getAndUnsetConfigOption(repoPath, name, valueRegex=".") {
if (await configOptionExists(repoPath, name, valueRegex)) {
const option = await getConfigOption(repoPath, name, valueRegex);
if (await unsetConfigOption(repoPath, name, valueRegex)) {
core.debug(`Unset config option '${name}'`);
return option;
}
}
return null;
}
module.exports = {
getRepoPath,
execGit,
addConfigOption,
unsetConfigOption,
configOptionExists,
getConfigOption,
getAndUnsetConfigOption
};
/***/ }),
/***/ 722:
@ -4733,7 +4866,7 @@ module.exports = require("zlib");
/***/ 826:
/***/ (function(module, __unusedexports, __webpack_require__) {
var rng = __webpack_require__(58);
var rng = __webpack_require__(139);
var bytesToUuid = __webpack_require__(722);
function v4(options, buf, offset) {

View File

@ -1,2 +0,0 @@
GitPython==3.0.7
PyGithub==1.46

View File

@ -1,52 +0,0 @@
const core = require("@actions/core");
const tc = require("@actions/tool-cache");
const path = require("path");
const semver = require("semver");
/**
* Setup for Python from the GitHub Actions tool cache
* Converted from https://github.com/actions/setup-python
*
* @param {string} versionSpec version of Python
* @param {string} arch architecture (x64|x32)
*/
let setupPython = function(versionSpec, arch) {
return new Promise((resolve, reject) => {
const IS_WINDOWS = process.platform === "win32";
// Find the version of Python we want in the tool cache
const installDir = tc.find("Python", versionSpec, arch);
core.debug(`installDir: ${installDir}`);
// Set paths
core.exportVariable("pythonLocation", installDir);
core.addPath(installDir);
if (IS_WINDOWS) {
core.addPath(path.join(installDir, "Scripts"));
} else {
core.addPath(path.join(installDir, "bin"));
}
if (IS_WINDOWS) {
// Add --user directory
// `installDir` from tool cache should look like $AGENT_TOOLSDIRECTORY/Python/<semantic version>/x64/
// So if `findLocalTool` succeeded above, we must have a conformant `installDir`
const version = path.basename(path.dirname(installDir));
const major = semver.major(version);
const minor = semver.minor(version);
const userScriptsDir = path.join(
process.env["APPDATA"] || "",
"Python",
`Python${major}${minor}`,
"Scripts"
);
core.addPath(userScriptsDir);
}
// On Linux and macOS, pip will create the --user directory and add it to PATH as needed.
resolve();
});
};
module.exports = setupPython;

Binary file not shown.

BIN
dist/vendor/GitPython-3.0.8.tar.gz vendored Normal file

Binary file not shown.

Binary file not shown.

BIN
dist/vendor/idna-2.9.tar.gz vendored Normal file

Binary file not shown.

Binary file not shown.

BIN
dist/vendor/requests-2.23.0.tar.gz vendored Normal file

Binary file not shown.

Binary file not shown.

BIN
dist/vendor/wrapt-1.12.0.tar.gz vendored Normal file

Binary file not shown.

View File

@ -13,7 +13,7 @@ This document covers terminology, how the action works, general usage guidelines
- [Advanced usage](#advanced-usage)
- [Creating pull requests in a remote repository](#creating-pull-requests-in-a-remote-repository)
- [Push using SSH (deploy keys)](#push-using-ssh-deploy-keys)
- [Using in an alpine linux container](#using-in-an-alpine-linux-container)
- [Running in a container](#running-in-a-container)
- [Creating pull requests on tag push](#creating-pull-requests-on-tag-push)
## Terminology
@ -158,7 +158,7 @@ Allowing the action to push with a configured deploy key will trigger `on: push`
How to use SSH (deploy keys) with create-pull-request action:
1. [Create an new SSH key pair](https://help.github.com/en/github/authenticating-to-github/generating-a-new-ssh-key-and-adding-it-to-the-ssh-agent#generating-a-new-ssh-key) for your repository. Do not set a passphrase.
1. [Create a new SSH key pair](https://help.github.com/en/github/authenticating-to-github/generating-a-new-ssh-key-and-adding-it-to-the-ssh-agent#generating-a-new-ssh-key) for your repository. Do not set a passphrase.
2. Copy the contents of the public key (.pub file) to a new repository [deploy key](https://developer.github.com/v3/guides/managing-deploy-keys/#deploy-keys) and check the box to "Allow write access."
3. Add a secret to the repository containing the entire contents of the private key.
4. As shown in the example steps below, use the [`webfactory/ssh-agent`](https://github.com/webfactory/ssh-agent) action to install the private key and clone your repository. Remember to checkout the `base` of your pull request if it's not the default branch, e.g. `git checkout my-branch`.
@ -180,24 +180,49 @@ How to use SSH (deploy keys) with create-pull-request action:
token: ${{ secrets.GITHUB_TOKEN }}
```
### Using in an alpine linux container
### Running in a container
This action can be run inside an Alpine Linux container by pre-installing the correct binaries for the action's dependencies.
This action can be run inside a container by installing the action's dependencies either in the Docker image itself, or during the workflow.
The following example workflow installs git and Python dependencies at the start of the job. You can also bake these dependencies into your own Alpine Docker image if you prefer. Note that git must be installed *before* running `actions/checkout`, otherwise it will just download the source of the repository instead of cloning it.
The action requires `python3`, `pip3` and `git` to be installed and on the `PATH`.
Note that `actions/checkout` requires Git 2.18 or higher to be installed, otherwise it will just download the source of the repository instead of cloning it.
**Alpine container example:**
```yml
jobs:
createPullRequestAlpine:
runs-on: ubuntu-latest
container:
image: alpine
steps:
- name: Install dependencies
run: apk --no-cache add git python3
- uses: actions/checkout@v2
# Make changes to pull request here
- name: Create Pull Request
uses: peter-evans/create-pull-request@v2
with:
token: ${{ secrets.GITHUB_TOKEN }}
```
**Ubuntu container example:**
```yml
jobs:
createPullRequestAlpine:
runs-on: ubuntu-latest
container:
image: ubuntu
steps:
- name: Install dependencies
run: |
apk --no-cache add git python3
ln -sf python3 /usr/bin/python
ln -sf pip3 /usr/bin/pip
apt-get update
apt-get install -y software-properties-common
add-apt-repository -y ppa:git-core/ppa
apt-get install -y python3 python3-pip git
- uses: actions/checkout@v2

3
jest.config.js Normal file
View File

@ -0,0 +1,3 @@
process.env = Object.assign(process.env, {
GITHUB_WORKSPACE: __dirname
});

6054
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -5,8 +5,10 @@
"main": "index.js",
"scripts": {
"clean": "rm -rf dist",
"build": "ncc build index.js -o dist",
"vendor-deps": "pip download -r src/requirements.txt --no-binary=:all: -d dist/vendor",
"lint": "eslint src/index.js",
"test": "eslint src/index.js && jest",
"build": "ncc build src/index.js -o dist",
"vendor-deps": "pip download -r src/cpr/requirements.txt --no-binary=:all: -d dist/vendor",
"package": "npm run build && npm run vendor-deps"
},
"repository": {
@ -27,6 +29,8 @@
"is-docker": "^2.0.0"
},
"devDependencies": {
"@zeit/ncc": "0.21.1"
"@zeit/ncc": "0.21.1",
"eslint": "6.8.0",
"jest": "25.1.0"
}
}

View File

@ -1,5 +1,6 @@
#!/usr/bin/env python3
""" Create Pull Request """
import base64
import common as cmn
import create_or_update_branch as coub
import create_or_update_pull_request as coupr
@ -31,7 +32,7 @@ def get_git_config_value(repo, name):
return None
def get_repository_detail():
def get_repository_detail(repo):
remote_origin_url = get_git_config_value(repo, "remote.origin.url")
if remote_origin_url is None:
raise ValueError("Failed to fetch 'remote.origin.url' from git config")
@ -116,9 +117,21 @@ repo = Repo(path)
# Determine the GitHub repository from git config
# This will be the target repository for the pull request
repo_url, protocol, github_repository = get_repository_detail()
repo_url, protocol, github_repository = get_repository_detail(repo)
print(f"Target repository set to {github_repository}")
if protocol == "HTTPS":
print(f"::debug::Using HTTPS protocol")
# Encode and configure the basic credential for HTTPS access
basic_credential = base64.b64encode(
f"x-access-token:{github_token}".encode("utf-8")
).decode("utf-8")
# Mask the basic credential in logs and debug output
print(f"::add-mask::{basic_credential}")
repo.git.set_persistent_git_options(
c=f"http.https://github.com/.extraheader=AUTHORIZATION: basic {basic_credential}"
)
# Determine if the checked out ref is a valid base for a pull request
# The action needs the checked out HEAD ref to be a branch
# This check will fail in the following cases:
@ -174,11 +187,6 @@ except ValueError as e:
print(f"::error::{e} " + "Unable to continue. Exiting.")
sys.exit(1)
# Set the auth token in the repo URL
# This supports checkout@v1. From v2 the auth token is saved for further use.
if protocol == "HTTPS":
repo_url = f"https://x-access-token:{github_token}@github.com/{github_repository}"
# Create or update the pull request branch
result = coub.create_or_update_branch(repo, repo_url, commit_message, base, branch)

2
src/cpr/requirements.txt Normal file
View File

@ -0,0 +1,2 @@
GitPython==3.0.8
PyGithub==1.46

97
src/git.js Normal file
View File

@ -0,0 +1,97 @@
const core = require("@actions/core");
const exec = require("@actions/exec");
const path = require("path");
function getRepoPath(relativePath) {
let githubWorkspacePath = process.env["GITHUB_WORKSPACE"];
if (!githubWorkspacePath) {
throw new Error("GITHUB_WORKSPACE not defined");
}
githubWorkspacePath = path.resolve(githubWorkspacePath);
core.debug(`githubWorkspacePath: ${githubWorkspacePath}`);
repoPath = githubWorkspacePath;
if (relativePath) repoPath = path.resolve(repoPath, relativePath);
core.debug(`repoPath: ${repoPath}`);
return repoPath;
}
async function execGit(repoPath, args, ignoreReturnCode = false) {
const stdout = [];
const options = {
cwd: repoPath,
ignoreReturnCode: ignoreReturnCode,
listeners: {
stdout: data => {
stdout.push(data.toString());
}
}
};
var result = {};
result.exitCode = await exec.exec("git", args, options);
result.stdout = stdout.join("");
return result;
}
async function addConfigOption(repoPath, name, value) {
const result = await execGit(
repoPath,
["config", "--local", "--add", name, value],
true
);
return result.exitCode === 0;
}
async function unsetConfigOption(repoPath, name, valueRegex=".") {
const result = await execGit(
repoPath,
["config", "--local", "--unset", name, valueRegex],
true
);
return result.exitCode === 0;
}
async function configOptionExists(repoPath, name, valueRegex=".") {
const result = await execGit(
repoPath,
["config", "--local", "--name-only", "--get-regexp", name, valueRegex],
true
);
return result.exitCode === 0;
}
async function getConfigOption(repoPath, name, valueRegex=".") {
const result = await execGit(
repoPath,
["config", "--local", "--get-regexp", name, valueRegex],
true
);
const option = result.stdout.trim().split(`${name} `);
return {
name: name,
value: option[1]
}
}
async function getAndUnsetConfigOption(repoPath, name, valueRegex=".") {
if (await configOptionExists(repoPath, name, valueRegex)) {
const option = await getConfigOption(repoPath, name, valueRegex);
if (await unsetConfigOption(repoPath, name, valueRegex)) {
core.debug(`Unset config option '${name}'`);
return option;
}
}
return null;
}
module.exports = {
getRepoPath,
execGit,
addConfigOption,
unsetConfigOption,
configOptionExists,
getConfigOption,
getAndUnsetConfigOption
};

98
src/git.test.js Normal file
View File

@ -0,0 +1,98 @@
const path = require("path");
const {
getRepoPath,
execGit,
addConfigOption,
unsetConfigOption,
configOptionExists,
getConfigOption,
getAndUnsetConfigOption
} = require("./git");
test("getRepoPath", async () => {
expect(getRepoPath()).toEqual(process.env["GITHUB_WORKSPACE"]);
expect(getRepoPath("foo")).toEqual(
path.resolve(process.env["GITHUB_WORKSPACE"], "foo")
);
});
test("execGit", async () => {
const repoPath = getRepoPath();
const result = await execGit(
repoPath,
["config", "--local", "--name-only", "--get-regexp", "remote.origin.url"],
true
);
expect(result.exitCode).toEqual(0);
expect(result.stdout.trim()).toEqual("remote.origin.url");
});
test("add and unset config option", async () => {
const repoPath = getRepoPath();
const add = await addConfigOption(repoPath, "test.add.and.unset.config.option", "foo");
expect(add).toBeTruthy();
const unset = await unsetConfigOption(repoPath, "test.add.and.unset.config.option");
expect(unset).toBeTruthy();
});
test("add and unset config option with value regex", async () => {
const repoPath = getRepoPath();
const add = await addConfigOption(repoPath, "test.add.and.unset.config.option", "foo bar");
expect(add).toBeTruthy();
const unset = await unsetConfigOption(repoPath, "test.add.and.unset.config.option", "^foo");
expect(unset).toBeTruthy();
});
test("configOptionExists returns true", async () => {
const repoPath = getRepoPath();
const result = await configOptionExists(repoPath, "remote.origin.url");
expect(result).toBeTruthy();
});
test("configOptionExists returns false", async () => {
const repoPath = getRepoPath();
const result = await configOptionExists(repoPath, "this.key.does.not.exist");
expect(result).toBeFalsy();
});
test("get config option", async () => {
const repoPath = getRepoPath();
const add = await addConfigOption(repoPath, "test.get.config.option", "foo");
expect(add).toBeTruthy();
const option = await getConfigOption(repoPath, "test.get.config.option");
expect(option.value).toEqual("foo");
const unset = await unsetConfigOption(repoPath, "test.get.config.option");
expect(unset).toBeTruthy();
});
test("get config option with value regex", async () => {
const repoPath = getRepoPath();
const add = await addConfigOption(repoPath, "test.get.config.option", "foo bar");
expect(add).toBeTruthy();
const option = await getConfigOption(repoPath, "test.get.config.option", "^foo");
expect(option.value).toEqual("foo bar");
const unset = await unsetConfigOption(repoPath, "test.get.config.option", "^foo");
expect(unset).toBeTruthy();
});
test("get and unset config option is successful", async () => {
const repoPath = getRepoPath();
const add = await addConfigOption(repoPath, "test.get.and.unset.config.option", "foo");
expect(add).toBeTruthy();
const getAndUnset = await getAndUnsetConfigOption(repoPath, "test.get.and.unset.config.option");
expect(getAndUnset.value).toEqual("foo");
});
test("get and unset config option is successful with value regex", async () => {
const repoPath = getRepoPath();
const add = await addConfigOption(repoPath, "test.get.and.unset.config.option", "foo bar");
expect(add).toBeTruthy();
const getAndUnset = await getAndUnsetConfigOption(repoPath, "test.get.and.unset.config.option", "^foo");
expect(getAndUnset.value).toEqual("foo bar");
});
test("get and unset config option is unsuccessful", async () => {
const repoPath = getRepoPath();
const getAndUnset = await getAndUnsetConfigOption(repoPath, "this.key.does.not.exist");
expect(getAndUnset).toBeNull();
});

View File

@ -2,13 +2,21 @@ const { inspect } = require("util");
const isDocker = require("is-docker");
const core = require("@actions/core");
const exec = require("@actions/exec");
const setupPython = require("./src/setup-python");
const setupPython = require("./setup-python");
const {
getRepoPath,
getAndUnsetConfigOption,
addConfigOption
} = require("./git");
const EXTRAHEADER_OPTION = "http.https://github.com/.extraheader";
const EXTRAHEADER_VALUE_REGEX = "^AUTHORIZATION:";
async function run() {
try {
// Allows ncc to find assets to be included in the distribution
const src = __dirname + "/src";
core.debug(`src: ${src}`);
const cpr = __dirname + "/cpr";
core.debug(`cpr: ${cpr}`);
// Determine how to access python and pip
const { pip, python } = (function() {
@ -33,7 +41,7 @@ async function run() {
await exec.exec(pip, [
"install",
"--requirement",
`${src}/requirements.txt`,
`${cpr}/requirements.txt`,
"--no-index",
`--find-links=${__dirname}/vendor`
]);
@ -79,10 +87,31 @@ async function run() {
if (inputs.base) process.env.CPR_BASE = inputs.base;
if (inputs.branchSuffix) process.env.CPR_BRANCH_SUFFIX = inputs.branchSuffix;
// Execute python script
await exec.exec(python, [`${src}/create_pull_request.py`]);
// Get the repository path
var repoPath = getRepoPath(inputs.path);
// Get the extraheader config option if it exists
var extraHeaderOption = await getAndUnsetConfigOption(
repoPath,
EXTRAHEADER_OPTION,
EXTRAHEADER_VALUE_REGEX
);
// Execute create pull request
await exec.exec(python, [`${cpr}/create_pull_request.py`]);
} catch (error) {
core.setFailed(error.message);
} finally {
// Restore the extraheader config option
if (extraHeaderOption) {
if (
await addConfigOption(
repoPath,
EXTRAHEADER_OPTION,
extraHeaderOption.value
)
)
core.debug(`Restored config option '${EXTRAHEADER_OPTION}'`);
}
}
}

View File

@ -1,2 +0,0 @@
GitPython==3.0.7
PyGithub==1.46