summaryrefslogtreecommitdiff
path: root/.github/workflows
diff options
context:
space:
mode:
Diffstat (limited to '.github/workflows')
-rw-r--r--.github/workflows/dependencies.yml43
-rw-r--r--.github/workflows/dependencies/.gitignore1
-rw-r--r--.github/workflows/dependencies/requirements.txt7
-rw-r--r--.github/workflows/dependencies/updater.py606
-rw-r--r--.github/workflows/installer.yml14
-rw-r--r--.github/workflows/installer/vercel.json4
-rw-r--r--.github/workflows/main.yml7
-rw-r--r--.github/workflows/project.yml73
-rw-r--r--.github/workflows/scorecard.yml65
9 files changed, 786 insertions, 34 deletions
diff --git a/.github/workflows/dependencies.yml b/.github/workflows/dependencies.yml
new file mode 100644
index 000000000..8d8f5cc73
--- /dev/null
+++ b/.github/workflows/dependencies.yml
@@ -0,0 +1,43 @@
+name: Update dependencies
+on:
+ workflow_dispatch: {}
+ schedule:
+ - cron: "0 6 * * 0"
+
+jobs:
+ check:
+ name: Check for updates
+ runs-on: ubuntu-latest
+ if: github.repository == 'ohmyzsh/ohmyzsh'
+ permissions:
+ contents: write # this is needed to push commits and branches
+ steps:
+ - name: Harden the runner (Audit all outbound calls)
+ uses: step-security/harden-runner@20cf305ff2072d973412fa9b1e3a4f227bda3c76 # v2.14.0
+ with:
+ egress-policy: audit
+
+ - name: Checkout
+ uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+ with:
+ fetch-depth: 0
+ - name: Authenticate as @ohmyzsh
+ id: generate-token
+ uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2.2.1
+ with:
+ app-id: ${{ secrets.OHMYZSH_APP_ID }}
+ private-key: ${{ secrets.OHMYZSH_APP_PRIVATE_KEY }}
+ - name: Setup Python
+ uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
+ with:
+ python-version: "3.12"
+ cache: "pip"
+ - name: Process dependencies
+ env:
+ GH_TOKEN: ${{ steps.generate-token.outputs.token }}
+ GIT_APP_NAME: ohmyzsh[bot]
+ GIT_APP_EMAIL: 54982679+ohmyzsh[bot]@users.noreply.github.com
+ TMP_DIR: ${{ runner.temp }}
+ run: |
+ pip install -r .github/workflows/dependencies/requirements.txt
+ python3 .github/workflows/dependencies/updater.py
diff --git a/.github/workflows/dependencies/.gitignore b/.github/workflows/dependencies/.gitignore
new file mode 100644
index 000000000..1d17dae13
--- /dev/null
+++ b/.github/workflows/dependencies/.gitignore
@@ -0,0 +1 @@
+.venv
diff --git a/.github/workflows/dependencies/requirements.txt b/.github/workflows/dependencies/requirements.txt
new file mode 100644
index 000000000..584a96ac9
--- /dev/null
+++ b/.github/workflows/dependencies/requirements.txt
@@ -0,0 +1,7 @@
+certifi==2026.1.4
+charset-normalizer==3.4.4
+idna==3.11
+PyYAML==6.0.3
+requests==2.32.5
+semver==3.0.4
+urllib3==2.6.2
diff --git a/.github/workflows/dependencies/updater.py b/.github/workflows/dependencies/updater.py
new file mode 100644
index 000000000..783161d6c
--- /dev/null
+++ b/.github/workflows/dependencies/updater.py
@@ -0,0 +1,606 @@
+import json
+import os
+import re
+import shutil
+import subprocess
+import sys
+import timeit
+from copy import deepcopy
+from typing import Literal, NotRequired, Optional, TypedDict
+
+import requests
+import yaml
+from semver import Version
+
+# Get TMP_DIR variable from environment
+TMP_DIR = os.path.join(os.environ.get("TMP_DIR", "/tmp"), "ohmyzsh")
+# Relative path to dependencies.yml file
+DEPS_YAML_FILE = ".github/dependencies.yml"
+# Dry run flag
+DRY_RUN = os.environ.get("DRY_RUN", "0") == "1"
+
+# utils for tag comparison
+BASEVERSION = re.compile(
+ r"""[vV]?
+ (?P<major>(0|[1-9])\d*)
+ (\.
+ (?P<minor>(0|[1-9])\d*)
+ (\.
+ (?P<patch>(0|[1-9])\d*)
+ )?
+ )?
+ """,
+ re.VERBOSE,
+)
+
+
+def coerce(version: str) -> Optional[Version]:
+ match = BASEVERSION.search(version)
+ if not match:
+ return None
+
+ # BASEVERSION looks for `MAJOR.minor.patch` in the string given
+ # it fills with None if any of them is missing (for example `2.1`)
+ ver = {
+ key: 0 if value is None else value for key, value in match.groupdict().items()
+ }
+ # Version takes `major`, `minor`, `patch` arguments
+ ver = Version(**ver) # pyright: ignore[reportArgumentType]
+ return ver
+
+
+class CodeTimer:
+ def __init__(self, name=None):
+ self.name = " '" + name + "'" if name else ""
+
+ def __enter__(self):
+ self.start = timeit.default_timer()
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.took = (timeit.default_timer() - self.start) * 1000.0
+ print("Code block" + self.name + " took: " + str(self.took) + " ms")
+
+
+### YAML representation
+def str_presenter(dumper, data):
+ """
+ Configures yaml for dumping multiline strings
+ Ref: https://stackoverflow.com/a/33300001
+ """
+ if len(data.splitlines()) > 1: # check for multiline string
+ return dumper.represent_scalar("tag:yaml.org,2002:str", data, style="|")
+ return dumper.represent_scalar("tag:yaml.org,2002:str", data)
+
+
+yaml.add_representer(str, str_presenter)
+yaml.representer.SafeRepresenter.add_representer(str, str_presenter)
+
+
+# Types
+class DependencyDict(TypedDict):
+ repo: str
+ branch: str
+ version: str
+ precopy: NotRequired[str]
+ postcopy: NotRequired[str]
+
+
+class DependencyYAML(TypedDict):
+ dependencies: dict[str, DependencyDict]
+
+
+class UpdateStatusFalse(TypedDict):
+ has_updates: Literal[False]
+
+
+class UpdateStatusTrue(TypedDict):
+ has_updates: Literal[True]
+ version: str
+ compare_url: str
+ head_ref: str
+ head_url: str
+
+
+class CommandRunner:
+ class Exception(Exception):
+ def __init__(self, message, returncode, stage, stdout, stderr):
+ super().__init__(message)
+ self.returncode = returncode
+ self.stage = stage
+ self.stdout = stdout
+ self.stderr = stderr
+
+ @staticmethod
+ def run_or_fail(command: list[str], stage: str, *args, **kwargs):
+ if DRY_RUN and command[0] == "gh":
+ command.insert(0, "echo")
+
+ result = subprocess.run(command, *args, capture_output=True, **kwargs)
+
+ if result.returncode != 0:
+ raise CommandRunner.Exception(
+ f"{stage} command failed with exit code {result.returncode}",
+ returncode=result.returncode,
+ stage=stage,
+ stdout=result.stdout.decode("utf-8"),
+ stderr=result.stderr.decode("utf-8"),
+ )
+
+ return result
+
+
+class DependencyStore:
+ store: DependencyYAML = {"dependencies": {}}
+
+ @staticmethod
+ def set(data: DependencyYAML):
+ DependencyStore.store = data
+
+ @staticmethod
+ def update_dependency_version(path: str, version: str) -> DependencyYAML:
+ with CodeTimer(f"store deepcopy: {path}"):
+ store_copy = deepcopy(DependencyStore.store)
+
+ dependency = store_copy["dependencies"].get(path)
+ if dependency is None:
+ raise ValueError(f"Dependency {path} {version} not found")
+ dependency["version"] = version
+ store_copy["dependencies"][path] = dependency
+
+ return store_copy
+
+ @staticmethod
+ def write_store(file: str, data: DependencyYAML):
+ with open(file, "w") as yaml_file:
+ yaml.safe_dump(data, yaml_file, sort_keys=False)
+
+
+class Dependency:
+ def __init__(self, path: str, values: DependencyDict):
+ self.path = path
+ self.values = values
+
+ self.name: str = ""
+ self.desc: str = ""
+ self.kind: str = ""
+
+ match path.split("/"):
+ case ["plugins", name]:
+ self.name = name
+ self.kind = "plugin"
+ self.desc = f"{name} plugin"
+ case ["themes", name]:
+ self.name = name.replace(".zsh-theme", "")
+ self.kind = "theme"
+ self.desc = f"{self.name} theme"
+ case _:
+ self.name = self.desc = path
+
+ def __str__(self):
+ output: str = ""
+ for key in DependencyDict.__dict__["__annotations__"].keys():
+ if key not in self.values:
+ output += f"{key}: None\n"
+ continue
+
+ value = self.values[key]
+ if "\n" not in value:
+ output += f"{key}: {value}\n"
+ else:
+ output += f"{key}:\n "
+ output += value.replace("\n", "\n ", value.count("\n") - 1)
+ return output
+
+ def update_or_notify(self):
+ # Print dependency settings
+ print(f"Processing {self.desc}...", file=sys.stderr)
+ print(self, file=sys.stderr)
+
+ # Check for updates
+ repo = self.values["repo"]
+ remote_branch = self.values["branch"]
+ version = self.values["version"]
+ is_tag = version.startswith("tag:")
+
+ try:
+ with CodeTimer(f"update check: {repo}"):
+ if is_tag:
+ status = GitHub.check_newer_tag(repo, version.replace("tag:", ""))
+ else:
+ status = GitHub.check_updates(repo, remote_branch, version)
+
+ if status["has_updates"] is True:
+ short_sha = status["head_ref"][:8]
+ new_version = status["version"] if is_tag else short_sha
+
+ try:
+ branch_name = f"update/{self.path}/{new_version}"
+
+ # Create new branch
+ branch = Git.checkout_or_create_branch(branch_name)
+
+ # Update dependency files
+ self.__apply_upstream_changes()
+
+ if not Git.repo_is_clean():
+ # Update dependencies.yml file
+ self.__update_yaml(
+ f"tag:{new_version}" if is_tag else status["version"]
+ )
+
+ # Add all changes and commit
+ has_new_commit = Git.add_and_commit(self.name, new_version)
+
+ if has_new_commit:
+ # Push changes to remote
+ Git.push(branch)
+
+ # Create GitHub PR
+ GitHub.create_pr(
+ branch,
+ f"chore({self.name}): update to version {new_version}",
+ f"""## Description
+
+Update for **{self.desc}**: update to version [{new_version}]({status["head_url"]}).
+Check out the [list of changes]({status["compare_url"]}).
+""",
+ )
+
+ # Clean up repository
+ Git.clean_repo()
+ except (CommandRunner.Exception, shutil.Error) as e:
+ # Handle exception on automatic update
+ match type(e):
+ case CommandRunner.Exception:
+ # Print error message
+ print(
+ f"Error running {e.stage} command: {e.returncode}", # pyright: ignore[reportAttributeAccessIssue]
+ file=sys.stderr,
+ )
+ print(e.stderr, file=sys.stderr) # pyright: ignore[reportAttributeAccessIssue]
+ case shutil.Error:
+ print(f"Error copying files: {e}", file=sys.stderr)
+
+ try:
+ Git.clean_repo()
+ except CommandRunner.Exception as e:
+ print(
+ f"Error reverting repository to clean state: {e}",
+ file=sys.stderr,
+ )
+ sys.exit(1)
+
+ # Create a GitHub issue to notify maintainer
+ title = f"{self.path}: update to {new_version}"
+ body = f"""## Description
+
+There is a new version of `{self.name}` {self.kind} available.
+
+New version: [{new_version}]({status["head_url"]})
+Check out the [list of changes]({status["compare_url"]}).
+"""
+
+ print("Creating GitHub issue", file=sys.stderr)
+ print(f"{title}\n\n{body}", file=sys.stderr)
+ GitHub.create_issue(title, body)
+ except Exception as e:
+ print(e, file=sys.stderr)
+
+ def __update_yaml(self, new_version: str) -> None:
+ dep_yaml = DependencyStore.update_dependency_version(self.path, new_version)
+ DependencyStore.write_store(DEPS_YAML_FILE, dep_yaml)
+
+ def __apply_upstream_changes(self) -> None:
+ # Patterns to ignore in copying files from upstream repo
+ GLOBAL_IGNORE = [".git", ".github", ".gitignore"]
+
+ path = os.path.abspath(self.path)
+ precopy = self.values.get("precopy")
+ postcopy = self.values.get("postcopy")
+
+ repo = self.values["repo"]
+ branch = self.values["branch"]
+ remote_url = f"https://github.com/{repo}.git"
+ repo_dir = os.path.join(TMP_DIR, repo)
+
+ # Clone repository
+ Git.clone(remote_url, branch, repo_dir, reclone=True)
+
+ # Run precopy on tmp repo
+ if precopy is not None:
+ print("Running precopy script:", end="\n ", file=sys.stderr)
+ print(
+ precopy.replace("\n", "\n ", precopy.count("\n") - 1), file=sys.stderr
+ )
+ CommandRunner.run_or_fail(
+ ["bash", "-c", precopy], cwd=repo_dir, stage="Precopy"
+ )
+
+ # Copy files from upstream repo
+ print(f"Copying files from {repo_dir} to {path}", file=sys.stderr)
+ shutil.copytree(
+ repo_dir,
+ path,
+ dirs_exist_ok=True,
+ ignore=shutil.ignore_patterns(*GLOBAL_IGNORE),
+ )
+
+ # Run postcopy on our repository
+ if postcopy is not None:
+ print("Running postcopy script:", end="\n ", file=sys.stderr)
+ print(
+ postcopy.replace("\n", "\n ", postcopy.count("\n") - 1),
+ file=sys.stderr,
+ )
+ CommandRunner.run_or_fail(
+ ["bash", "-c", postcopy], cwd=path, stage="Postcopy"
+ )
+
+
+class Git:
+ default_branch = "master"
+
+ @staticmethod
+ def clone(remote_url: str, branch: str, repo_dir: str, reclone=False):
+ # If repo needs to be fresh
+ if reclone and os.path.exists(repo_dir):
+ shutil.rmtree(repo_dir)
+
+ # Clone repo in tmp directory and checkout branch
+ if not os.path.exists(repo_dir):
+ print(
+ f"Cloning {remote_url} to {repo_dir} and checking out {branch}",
+ file=sys.stderr,
+ )
+ CommandRunner.run_or_fail(
+ ["git", "clone", "--depth=1", "-b", branch, remote_url, repo_dir],
+ stage="Clone",
+ )
+
+ @staticmethod
+ def checkout_or_create_branch(branch_name: str):
+ # Get current branch name
+ result = CommandRunner.run_or_fail(
+ ["git", "rev-parse", "--abbrev-ref", "HEAD"], stage="GetDefaultBranch"
+ )
+ Git.default_branch = result.stdout.decode("utf-8").strip()
+
+ # Create new branch and return created branch name
+ try:
+ # try to checkout already existing branch
+ CommandRunner.run_or_fail(
+ ["git", "checkout", branch_name], stage="CreateBranch"
+ )
+ except CommandRunner.Exception:
+ # otherwise create new branch
+ CommandRunner.run_or_fail(
+ ["git", "checkout", "-b", branch_name], stage="CreateBranch"
+ )
+ return branch_name
+
+ @staticmethod
+ def repo_is_clean() -> bool:
+ """
+ Returns `True` if the repo is clean.
+ Returns `False` if the repo is dirty.
+ """
+ try:
+ CommandRunner.run_or_fail(
+ ["git", "diff", "--exit-code"], stage="CheckRepoClean"
+ )
+ return True
+ except CommandRunner.Exception:
+ return False
+
+ @staticmethod
+ def add_and_commit(scope: str, version: str) -> bool:
+ """
+ Returns `True` if there were changes and were indeed commited.
+ Returns `False` if the repo was clean and no changes were commited.
+ """
+ if Git.repo_is_clean():
+ return False
+
+ user_name = os.environ.get("GIT_APP_NAME")
+ user_email = os.environ.get("GIT_APP_EMAIL")
+
+ # Add all files to git staging
+ CommandRunner.run_or_fail(["git", "add", "-A", "-v"], stage="AddFiles")
+
+ # Reset environment and git config
+ clean_env = os.environ.copy()
+ clean_env["LANG"] = "C.UTF-8"
+ clean_env["GIT_CONFIG_GLOBAL"] = "/dev/null"
+ clean_env["GIT_CONFIG_NOSYSTEM"] = "1"
+
+ # Commit with settings above
+ CommandRunner.run_or_fail(
+ [
+ "git",
+ "-c",
+ f"user.name={user_name}",
+ "-c",
+ f"user.email={user_email}",
+ "commit",
+ "-m",
+ f"chore({scope}): update to {version}",
+ ],
+ stage="CreateCommit",
+ env=clean_env,
+ )
+ return True
+
+ @staticmethod
+ def push(branch: str):
+ CommandRunner.run_or_fail(
+ ["git", "push", "-u", "origin", branch], stage="PushBranch"
+ )
+
+ @staticmethod
+ def clean_repo():
+ CommandRunner.run_or_fail(
+ ["git", "reset", "--hard", "HEAD"], stage="ResetRepository"
+ )
+ CommandRunner.run_or_fail(
+ ["git", "checkout", Git.default_branch], stage="CheckoutDefaultBranch"
+ )
+
+
+class GitHub:
+ @staticmethod
+ def check_newer_tag(repo, current_tag) -> UpdateStatusFalse | UpdateStatusTrue:
+ # GET /repos/:owner/:repo/git/refs/tags
+ url = f"https://api.github.com/repos/{repo}/git/refs/tags"
+
+ # Send a GET request to the GitHub API
+ response = requests.get(url)
+ current_version = coerce(current_tag)
+ if current_version is None:
+ raise ValueError(
+ f"Stored {current_version} from {repo} does not follow semver"
+ )
+
+ # If the request was successful
+ if response.status_code == 200:
+ # Parse the JSON response
+ data = response.json()
+
+ if len(data) == 0:
+ return {
+ "has_updates": False,
+ }
+
+ latest_ref = None
+ latest_version: Optional[Version] = None
+ for ref in data:
+ # we find the tag since GitHub returns it as plain git ref
+ tag_version = coerce(ref["ref"].replace("refs/tags/", ""))
+ if tag_version is None:
+ # we skip every tag that is not semver-complaint
+ continue
+ if latest_version is None or tag_version.compare(latest_version) > 0:
+ # if we have a "greater" semver version, set it as latest
+ latest_version = tag_version
+ latest_ref = ref
+
+ # raise if no valid semver tag is found
+ if latest_ref is None or latest_version is None:
+ raise ValueError(f"No tags following semver found in {repo}")
+
+ # we get the tag since GitHub returns it as plain git ref
+ latest_tag = latest_ref["ref"].replace("refs/tags/", "")
+
+ if latest_version.compare(current_version) <= 0:
+ return {
+ "has_updates": False,
+ }
+
+ return {
+ "has_updates": True,
+ "version": latest_tag,
+ "compare_url": f"https://github.com/{repo}/compare/{current_tag}...{latest_tag}",
+ "head_ref": latest_ref["object"]["sha"],
+ "head_url": f"https://github.com/{repo}/releases/tag/{latest_tag}",
+ }
+ else:
+ # If the request was not successful, raise an exception
+ raise Exception(
+ f"GitHub API request failed with status code {response.status_code}: {response.json()}"
+ )
+
+ @staticmethod
+ def check_updates(repo, branch, version) -> UpdateStatusFalse | UpdateStatusTrue:
+ url = f"https://api.github.com/repos/{repo}/compare/{version}...{branch}"
+
+ # Send a GET request to the GitHub API
+ response = requests.get(url)
+
+ # If the request was successful
+ if response.status_code == 200:
+ # Parse the JSON response
+ data = response.json()
+
+ # If the base is behind the head, there is a newer version
+ has_updates = data["status"] != "identical"
+
+ if not has_updates:
+ return {
+ "has_updates": False,
+ }
+
+ return {
+ "has_updates": data["status"] != "identical",
+ "version": data["commits"][-1]["sha"],
+ "compare_url": data["permalink_url"],
+ "head_ref": data["commits"][-1]["sha"],
+ "head_url": data["commits"][-1]["html_url"],
+ }
+ else:
+ # If the request was not successful, raise an exception
+ raise Exception(
+ f"GitHub API request failed with status code {response.status_code}: {response.json()}"
+ )
+
+ @staticmethod
+ def create_issue(title: str, body: str) -> None:
+ cmd = ["gh", "issue", "create", "-t", title, "-b", body]
+ CommandRunner.run_or_fail(cmd, stage="CreateIssue")
+
+ @staticmethod
+ def create_pr(branch: str, title: str, body: str) -> None:
+ # first of all let's check if PR is already open
+ check_cmd = [
+ "gh",
+ "pr",
+ "list",
+ "--state",
+ "open",
+ "--head",
+ branch,
+ "--json",
+ "title",
+ ]
+ # returncode is 0 also if no PRs are found
+ output = json.loads(
+ CommandRunner.run_or_fail(check_cmd, stage="CheckPullRequestOpen")
+ .stdout.decode("utf-8")
+ .strip()
+ )
+ # we have PR in this case!
+ if len(output) > 0:
+ return
+ cmd = [
+ "gh",
+ "pr",
+ "create",
+ "-B",
+ Git.default_branch,
+ "-H",
+ branch,
+ "-t",
+ title,
+ "-b",
+ body,
+ ]
+ CommandRunner.run_or_fail(cmd, stage="CreatePullRequest")
+
+
+def main():
+ # Load the YAML file
+ with open(DEPS_YAML_FILE, "r") as yaml_file:
+ data: DependencyYAML = yaml.safe_load(yaml_file)
+
+ if "dependencies" not in data:
+ raise Exception("dependencies.yml not properly formatted")
+
+ # Cache YAML version
+ DependencyStore.set(data)
+
+ dependencies = data["dependencies"]
+ for path in dependencies:
+ dependency = Dependency(path, dependencies[path])
+ dependency.update_or_notify()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/.github/workflows/installer.yml b/.github/workflows/installer.yml
index cad5d445b..eef5bf322 100644
--- a/.github/workflows/installer.yml
+++ b/.github/workflows/installer.yml
@@ -25,8 +25,13 @@ jobs:
- ubuntu-latest
- macos-latest
steps:
+ - name: Harden the runner (Audit all outbound calls)
+ uses: step-security/harden-runner@20cf305ff2072d973412fa9b1e3a4f227bda3c76 # v2.14.0
+ with:
+ egress-policy: audit
+
- name: Set up git repository
- uses: actions/checkout@v3
+ uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Install zsh
if: runner.os == 'Linux'
run: sudo apt-get update; sudo apt-get install zsh
@@ -41,8 +46,13 @@ jobs:
needs:
- test
steps:
+ - name: Harden the runner (Audit all outbound calls)
+ uses: step-security/harden-runner@20cf305ff2072d973412fa9b1e3a4f227bda3c76 # v2.14.0
+ with:
+ egress-policy: audit
+
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Install Vercel CLI
run: npm install -g vercel
- name: Setup project and deploy
diff --git a/.github/workflows/installer/vercel.json b/.github/workflows/installer/vercel.json
index 524dc3c0f..88ec18725 100644
--- a/.github/workflows/installer/vercel.json
+++ b/.github/workflows/installer/vercel.json
@@ -1,7 +1,7 @@
{
"headers": [
{
- "source": "/((?!favicon.ico).*)",
+ "source": "/(|install.sh)",
"headers": [
{
"key": "Content-Type",
@@ -16,7 +16,7 @@
],
"rewrites": [
{
- "source": "/((?!favicon.ico|install.sh).*)",
+ "source": "/",
"destination": "/install.sh"
}
]
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index de7d98262..5917d5316 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -23,8 +23,13 @@ jobs:
runs-on: ubuntu-latest
if: github.repository == 'ohmyzsh/ohmyzsh'
steps:
+ - name: Harden the runner (Audit all outbound calls)
+ uses: step-security/harden-runner@20cf305ff2072d973412fa9b1e3a4f227bda3c76 # v2.14.0
+ with:
+ egress-policy: audit
+
- name: Set up git repository
- uses: actions/checkout@v3
+ uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Install zsh
run: sudo apt-get update; sudo apt-get install zsh
- name: Check syntax
diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml
index 1d961d8c0..70597cab6 100644
--- a/.github/workflows/project.yml
+++ b/.github/workflows/project.yml
@@ -15,11 +15,20 @@ jobs:
name: Add to project
runs-on: ubuntu-latest
if: github.repository == 'ohmyzsh/ohmyzsh'
- env:
- GITHUB_TOKEN: ${{ secrets.PROJECT_TOKEN }}
steps:
+ - name: Harden the runner (Audit all outbound calls)
+ uses: step-security/harden-runner@20cf305ff2072d973412fa9b1e3a4f227bda3c76 # v2.14.0
+ with:
+ egress-policy: audit
+ - name: Authenticate as @ohmyzsh
+ id: generate-token
+ uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2.2.1
+ with:
+ app-id: ${{ secrets.OHMYZSH_APP_ID }}
+ private-key: ${{ secrets.OHMYZSH_APP_PRIVATE_KEY }}
- name: Read project data
env:
+ GH_TOKEN: ${{ steps.generate-token.outputs.token }}
ORGANIZATION: ohmyzsh
PROJECT_NUMBER: "1"
run: |
@@ -42,14 +51,14 @@ jobs:
}' -f org=$ORGANIZATION -F number=$PROJECT_NUMBER > project_data.json
# Parse project data
- cat >> $GITHUB_ENV <<EOF
+ cat >> "$GITHUB_ENV" <<EOF
PROJECT_ID=$(jq '.data.organization.projectV2.id' project_data.json)
PLUGIN_FIELD_ID=$(jq '.data.organization.projectV2.fields.nodes[] | select(.name == "Plugin") | .id' project_data.json)
THEME_FIELD_ID=$(jq '.data.organization.projectV2.fields.nodes[] | select(.name == "Theme") | .id' project_data.json)
EOF
-
- name: Add to project
env:
+ GH_TOKEN: ${{ steps.generate-token.outputs.token }}
ISSUE_OR_PR_ID: ${{ github.event.issue.node_id || github.event.pull_request.node_id }}
run: |
item_id="$(gh api graphql -f query='
@@ -60,45 +69,51 @@ jobs:
}
}
}
- ' -f project=$PROJECT_ID -f content=$ISSUE_OR_PR_ID --jq '.data.addProjectV2ItemById.item.id')"
+ ' -f project="$PROJECT_ID" -f content="$ISSUE_OR_PR_ID" --jq '.data.addProjectV2ItemById.item.id')"
echo "ITEM_ID=$item_id" >> $GITHUB_ENV
-
- name: Classify Pull Request
if: github.event_name == 'pull_request_target'
+ env:
+ GH_TOKEN: ${{ steps.generate-token.outputs.token }}
+ PR_NUMBER: ${{ github.event.pull_request.number }}
run: |
- touch plugins.list themes.list
-
- gh pr view ${{ github.event.pull_request.number }} \
- --repo ${{ github.repository }} \
+ # Get the list of modified files in the PR, and extract plugins and themes
+ gh pr view "$PR_NUMBER" \
+ --repo "$GITHUB_REPOSITORY" \
--json files --jq '.files.[].path' | awk -F/ '
+ BEGIN {
+ plugins = 0
+ themes = 0
+ }
/^plugins\// {
- plugins[$2] = 1
+ if (plugin == $2) next
+ plugin = $2
+ plugins++
}
/^themes\// {
gsub(/\.zsh-theme$/, "", $2)
- themes[$2] = 1
+ if (theme == $2) next
+ theme = $2
+ themes++
}
END {
- for (plugin in plugins) {
- print plugin >> "plugins.list"
+ # plugin and theme are values controlled by the PR author
+ # so we should sanitize them before using anywhere else
+ if (plugins == 1) {
+ gsub(/[^a-zA-Z0-9._-]/, "", plugin)
+ print "PLUGIN=" plugin
}
- for (theme in themes) {
- print theme >> "themes.list"
+ if (themes == 1) {
+ gsub(/[^a-zA-Z0-9._-]/, "", theme)
+ print "THEME=" theme
}
}
- '
- # If only one plugin is modified, add it to the plugin field
- if [[ $(wc -l < plugins.list) = 1 ]]; then
- echo "PLUGIN=$(cat plugins.list)" >> $GITHUB_ENV
- fi
- # If only one theme is modified, add it to the theme field
- if [[ $(wc -l < themes.list) = 1 ]]; then
- echo "THEME=$(cat themes.list)" >> $GITHUB_ENV
- fi
-
+ ' >> "$GITHUB_ENV"
- name: Fill Pull Request fields in project
if: github.event_name == 'pull_request_target'
+ env:
+ GH_TOKEN: ${{ steps.generate-token.outputs.token }}
run: |
gh api graphql -f query='
mutation (
@@ -134,7 +149,7 @@ jobs:
}
}
}
- ' -f project=$PROJECT_ID -f item=$ITEM_ID \
- -f plugin_field=$PLUGIN_FIELD_ID -f plugin_value=$PLUGIN \
- -f theme_field=$THEME_FIELD_ID -f theme_value=$THEME \
+ ' -f project="$PROJECT_ID" -f item="$ITEM_ID" \
+ -f plugin_field="$PLUGIN_FIELD_ID" -f plugin_value="$PLUGIN" \
+ -f theme_field="$THEME_FIELD_ID" -f theme_value="$THEME" \
--silent
diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml
new file mode 100644
index 000000000..8c8113b2c
--- /dev/null
+++ b/.github/workflows/scorecard.yml
@@ -0,0 +1,65 @@
+# This workflow uses actions that are not certified by GitHub. They are provided
+# by a third-party and are governed by separate terms of service, privacy
+# policy, and support documentation.
+
+name: Scorecard supply-chain security
+on:
+ # For Branch-Protection check. Only the default branch is supported. See
+ # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection
+ branch_protection_rule:
+ # To guarantee Maintained check is occasionally updated. See
+ # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained
+ schedule:
+ - cron: '20 7 * * 2'
+ push:
+ branches: ["master"]
+
+# Declare default permissions as read only.
+permissions: read-all
+
+jobs:
+ analysis:
+ name: Scorecard analysis
+ runs-on: ubuntu-latest
+ permissions:
+ # Needed to upload the results to code-scanning dashboard.
+ security-events: write
+ # Needed to publish results and get a badge (see publish_results below).
+ id-token: write
+ contents: read
+ actions: read
+ # To allow GraphQL ListCommits to work
+ issues: read
+ pull-requests: read
+ # To detect SAST tools
+ checks: read
+
+ steps:
+ - name: Harden the runner (Audit all outbound calls)
+ uses: step-security/harden-runner@20cf305ff2072d973412fa9b1e3a4f227bda3c76 # v2.14.0
+ with:
+ egress-policy: audit
+
+ - name: "Checkout code"
+ uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+ with:
+ persist-credentials: false
+
+ - name: "Run analysis"
+ uses: ossf/scorecard-action@4eaacf0543bb3f2c246792bd56e8cdeffafb205a # v2.4.3
+ with:
+ results_file: results.sarif
+ results_format: sarif
+ publish_results: true
+
+ - name: "Upload artifact"
+ uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
+ with:
+ name: SARIF file
+ path: results.sarif
+ retention-days: 5
+
+ - name: "Upload to code-scanning"
+ uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
+ with:
+ sarif_file: results.sarif