Skip to content

Fix release checksums.txt artifact generation #446

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 19 additions & 0 deletions .github/workflows/tooling-unit-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -96,3 +96,22 @@ jobs:
- name: Run PyTest
run: |
pytest scripts/guideline_recategorization/recategorize_test.py

release-tests:
name: Run release tests
runs-on: ubuntu-22.04
steps:
- name: Checkout
uses: actions/checkout@v2

- name: Install Python
uses: actions/setup-python@v4
with:
python-version: "3.9"

- name: Install Python dependencies
run: pip install -r scripts/release/requirements.txt

- name: Run PyTest
run: |
pytest scripts/release/update_release_assets_test.py
5 changes: 2 additions & 3 deletions .github/workflows/update-release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -54,13 +54,12 @@ jobs:
GITHUB_TOKEN: ${{ github.token }}
RELEASE_ENGINEERING_TOKEN: ${{ steps.generate-token.outputs.token }}
run: |
python scripts/release/update-release-assets.py \
python scripts/release/update_release_assets.py \
--head-sha $HEAD_SHA \
--layout scripts/release/release-layout.yml \
--repo "$GITHUB_REPOSITORY" \
--github-token "$GITHUB_REPOSITORY:$GITHUB_TOKEN" "github/codeql-coding-standards-release-engineering:$RELEASE_ENGINEERING_TOKEN" \
--skip-checkrun "release-status" \
--skip-checks
--skip-checkrun "release-status"

- name: Update release notes
env:
Expand Down
4 changes: 3 additions & 1 deletion scripts/release/release-layout.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,6 @@ layout:
- file: docs/user_manual.md
checksums.txt:
- shell: |
sha256sum ./* > checksums.txt
sha256sum ${{ layout.root }}/* > checksums.txt
# Remove the layout root from the paths in checksums.txt
sed -i -e "s|${{ layout.root }}/||g" checksums.txt
3 changes: 2 additions & 1 deletion scripts/release/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
semantic-version==2.10.0
PyGithub==1.59.1
PyYAML==6.0.1
GitPython==3.1.36
GitPython==3.1.36
pytest==7.4.3
19 changes: 19 additions & 0 deletions scripts/release/test-data/release-layout.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
version: 0.1.0

layout:
hello-world.txt:
- shell: |
echo "hello world!" > hello-world.txt
hello-world.zip:
- shell: |
echo "hello!" > hello.txt
echo "world!" > world.txt
# reset the creation and modification times to a fixed value
touch -a -m -t 197001010000.00 hello.txt world.txt
checksums.txt:
- shell: |
shasum -a 256 ${{ layout.root }}/* > checksums.txt
# Remove the layout root from the checksums.txt
# We don't use inplace because of BSD vs GNU shenanigans
sed -e "s|${{ layout.root }}/||g" checksums.txt > checksums-rewritten.txt
mv checksums-rewritten.txt checksums.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from __future__ import annotations # This enables postponed evaluation of type annotations. Required for typing.TYPE_CHECKING. See https://peps.python.org/pep-0563/
from typing import TYPE_CHECKING, List, Union, cast, Dict, Any
from typing import TYPE_CHECKING, List, Union, cast, Dict, Any, TypeVar, Callable, Sequence, Optional
import shutil
from tempfile import TemporaryDirectory
import subprocess
Expand All @@ -12,7 +12,7 @@

if TYPE_CHECKING:
from github import WorkflowRun, Repository


script_path = Path(__file__).resolve()
root_path = script_path.parent.parent.parent
Expand All @@ -30,7 +30,7 @@ def get_check_runs(self: Repository.Repository, ref: str, **kwargs: str) -> Pagi
f"{self.url}/commits/{ref}/check-runs",
firstParams=None,
list_item="check_runs")

Repository.Repository = MyRepository

from github import WorkflowRun, Artifact
Expand All @@ -51,7 +51,7 @@ def download_logs(self, path: Path) -> None:
if self._requester._Requester__auth is not None: # type: ignore
headers["Authorization"] = f"{self._requester._Requester__auth.token_type} {self._requester._Requester__auth.token}" # type: ignore
headers["User-Agent"] = self._requester._Requester__userAgent # type: ignore

resp = requests.get(url, headers=headers, allow_redirects=True)

if resp.status_code != 200:
Expand All @@ -70,7 +70,7 @@ def download_artifacts(self, path: Path) -> None:
if self._requester._Requester__auth is not None: # type: ignore
headers["Authorization"] = f"{self._requester._Requester__auth.token_type} {self._requester._Requester__auth.token}" # type: ignore
headers["User-Agent"] = self._requester._Requester__userAgent # type: ignore

resp = requests.get(artifact.archive_download_url, headers=headers, allow_redirects=True)

if resp.status_code != 200:
Expand All @@ -93,15 +93,15 @@ def download_artifact(self, name: str, path: Path) -> None:
if self._requester._Requester__auth is not None: # type: ignore
headers["Authorization"] = f"{self._requester._Requester__auth.token_type} {self._requester._Requester__auth.token}" # type: ignore
headers["User-Agent"] = self._requester._Requester__userAgent # type: ignore

resp = requests.get(artifact.archive_download_url, headers=headers, allow_redirects=True)

if resp.status_code != 200:
raise Exception(f"Unable to download artifact ${artifact.name}. Received status code {resp.status_code} {resp.reason}")

with (path / f"{artifact.name}.zip").open("wb") as f:
f.write(resp.content)


WorkflowRun.WorkflowRun = MyWorkflowRun

Expand All @@ -124,12 +124,16 @@ def make(self, directory: Path, workflow_runs: List[WorkflowRun.WorkflowRun]) ->
elif action_type == "workflow-artifact":
actions.append(WorkflowArtifactAction(workflow_runs, **cast(Dict[str, Any], action_args)))
elif action_type == "shell":
actions.append(ShellAction(action_args))
modifiers : List[Callable[[str], str]] = [
lambda cmd: re.sub(pattern=r"\${{\s*coding-standards\.root\s*}}", repl=str(root_path), string=cmd),
lambda cmd: re.sub(pattern=r"\${{\s*layout\.root\s*}}", repl=str(directory), string=cmd)
]
actions.append(ShellAction(action_args, modifiers=modifiers))
elif action_type == "file":
actions.append(FileAction(action_args))
else:
raise Exception(f"Unknown action type {action_type}")

artifacts.append(ReleaseArtifact(artifact, actions, self.skip_checks))

for artifact in artifacts:
Expand All @@ -153,7 +157,7 @@ def run(self) -> List[Path]:
print(f"Downloading logs for {workflow_run.name}")
workflow_run.download_logs(Path(self.temp_workdir.name)) # type: ignore
return list(map(Path, Path(self.temp_workdir.name).glob("**/*")))

class WorkflowArtifactAction():

def __init__(self, workflow_runs: List[WorkflowRun.WorkflowRun], **kwargs: str) -> None:
Expand All @@ -176,17 +180,29 @@ def run(self) -> List[Path]:
print(f"Downloading artifacts for {workflow_run.name} to {self.temp_workdir.name}")
workflow_run.download_artifacts(Path(self.temp_workdir.name)) # type: ignore
return list(map(Path, Path(self.temp_workdir.name).glob("**/*")))

class ShellAction():
def __init__(self, command: str) -> None:
def __init__(self, command: str, **kwargs: Any) -> None:
self.command = command.strip()
self.temp_workdir = TemporaryDirectory()
self.options = kwargs

def _rewrite_command(self) -> str:
E = TypeVar("E")
R = TypeVar("R")
def lfold(fn: Callable[[R, E], R], lst: Sequence[E], init: R) -> R:
return lfold(fn, lst[1:], fn(init, lst[0])) if lst else init
if 'modifiers' in self.options:
return lfold(lambda acc, x: x(acc), self.options['modifiers'], self.command)
else:
return self.command

def run(self) -> List[Path]:
concrete_command = re.sub(pattern=r"\${{\s*coding-standards\.root\s*}}", repl=str(root_path), string=self.command)
#concrete_command = re.sub(pattern=r"\${{\s*coding-standards\.root\s*}}", repl=str(root_path), string=self.command)
concrete_command = self._rewrite_command()
subprocess.run(concrete_command, cwd=self.temp_workdir.name, check=True, shell=True)
return list(map(Path, Path(self.temp_workdir.name).glob("**/*")))

class FileAction():
def __init__(self, path: Path) -> None:
self.path = path
Expand All @@ -200,7 +216,7 @@ def __init__(self, name: str, actions: List[Union[WorkflowLogAction, WorkflowArt
self.actions = actions
self.allow_no_files = allow_no_files

def make(self, directory: Path) -> Path:
def make(self, directory: Path) -> Optional[Path]:
files: list[Path] = [file for action in self.actions for file in action.run()]
if len(files) == 0:
if not self.allow_no_files:
Expand All @@ -212,8 +228,8 @@ def make(self, directory: Path) -> Path:
extension = "".join(self.name.suffixes)[1:]
if not extension in ["zip", "tar", "tar.gz", "tar.bz2", "tar.xz"]:
raise Exception(f"Artifact {self.name} is not a support archive file, but has multiple files associated with it!")
ext_format_map = {

ext_format_map = {
"zip": "zip",
"tar": "tar",
"tar.gz": "gztar",
Expand All @@ -225,7 +241,7 @@ def make(self, directory: Path) -> Path:
temp_dir_path = Path(temp_dir)
for file in files:
shutil.copy(file, temp_dir_path / file.name)

return Path(shutil.make_archive(str(directory / self.name.with_suffix("")), ext_format_map[extension], root_dir=temp_dir_path))

def main(args: 'argparse.Namespace') -> int:
Expand All @@ -248,13 +264,13 @@ def main(args: 'argparse.Namespace') -> int:
if len(pull_candidates) != 1:
print(f"Error: expected exactly one PR for SHA {args.head_sha}, but found {len(pull_candidates)}", file=sys.stderr)
return 1

pull_request = pull_candidates[0]

if pull_request.state != "open":
print(f"Error: PR {pull_request.url} is not open", file=sys.stderr)
return 1

print(f"Found PR {pull_request.url} based on {pull_request.base.ref}")

rc_branch_regex = r"^rc/(?P<version>.*)$"
Expand Down Expand Up @@ -286,7 +302,7 @@ def main(args: 'argparse.Namespace') -> int:

action_workflow_run_url_regex = r"^https://(?P<github_url>[^/]+)/(?P<owner>[^/]+)/(?P<repo>[^/]+)/actions/runs/(?P<run_id>\d+)$"
action_workflow_job_run_url_regex = r"^https://(?P<github_url>[^/]+)/(?P<owner>[^/]+)/(?P<repo>[^/]+)/actions/runs/(?P<run_id>\d+)/job/(?P<job_id>\d+)$"

workflow_runs: List[WorkflowRun.WorkflowRun] = []
for check_run in check_runs: # type: ignore
check_run = cast(CheckRun.CheckRun, check_run)
Expand All @@ -306,7 +322,7 @@ def main(args: 'argparse.Namespace') -> int:
else:
print(f"Unable to handle checkrun {check_run.name} with id {check_run.id} with {check_run.details_url}")
return 1

print("Filtering workflow runs to only include the latest run for each workflow.")
workflow_runs_per_id: Dict[int, WorkflowRun.WorkflowRun] = {}
for workflow_run in workflow_runs:
Expand Down
30 changes: 30 additions & 0 deletions scripts/release/update_release_assets_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
from pathlib import Path
from tempfile import TemporaryDirectory
import yaml
from update_release_assets import ReleaseLayout

SCRIPT_PATH = Path(__file__)
TEST_DIR = SCRIPT_PATH.parent / 'test-data'

def test_release_layout():
spec = TEST_DIR / 'release-layout.yml'
release_layout = ReleaseLayout(spec)
with TemporaryDirectory() as tmp_dir:
tmp_path = Path(tmp_dir)
release_layout.make(tmp_path, [])

for artifact in yaml.safe_load(spec.read_text())['layout'].keys():
artifact_path = tmp_path / artifact
assert artifact_path.is_file()

if artifact == "hello-world.txt":
content = artifact_path.read_text()
assert content == "hello world!\n"
if artifact == "checksums.txt":
content = artifact_path.read_text()
# The hash of the hello-world.txt is deterministic, so we can assert it here.
assert "ecf701f727d9e2d77c4aa49ac6fbbcc997278aca010bddeeb961c10cf54d435a hello-world.txt" in content
# The has of the hello-world.zip is not deterministic, so we can't assert its hash.
assert "hello-world.zip" in content