Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@
<meta http-equiv="content-type" content="text/html; charset=UTF-8">
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
{%- if is_preview %}
<meta http-equiv="Cache-Control" content="max-age=300">
{%- endif %}
<style>
/* Base colors for Altinity */
:root {
Expand Down Expand Up @@ -152,7 +155,7 @@
</tr>
<tr>
<th class="hth no-sort">Build Report</th>
<td><a href="https://s3.amazonaws.com/{{ s3_bucket }}/{{ pr_number }}/{{ commit_sha }}/builds/report.html">Build Report</a></td>
<td>{% for job_name, link in build_report_links.items() %}<a href="{{ link }}">[{{ job_name }}]</a> {% endfor %}</td>
</tr>
<tr>
<th class="hth no-sort">Date</th>
Expand Down Expand Up @@ -223,6 +226,8 @@
element.className = element.className.replace(regex_dir, '') + dir
}
function getValue(element) {
var childWithSort = element.querySelector('[data-sort]')
if (childWithSort) return childWithSort.getAttribute('data-sort')
return (
(alt_sort && element.getAttribute('data-sort-alt')) ||
element.getAttribute('data-sort') || element.innerText
Expand Down
34 changes: 24 additions & 10 deletions .github/actions/create_workflow_report/create_workflow_report.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,9 @@ def query_dataframe_with_retry(
time.sleep(wait)


CVE_SEVERITY_ORDER = {"critical": 1, "high": 2, "medium": 3, "low": 4, "negligible": 5}


def get_commit_statuses(sha: str) -> pd.DataFrame:
"""
Fetch commit statuses for a given SHA and return as a pandas DataFrame.
Expand Down Expand Up @@ -431,7 +434,9 @@ def get_cves(pr_number, commit_sha):
Bucket=S3_BUCKET, Prefix=s3_prefix, Delimiter="/"
)
grype_result_dirs = [
content["Prefix"] for content in response.get("CommonPrefixes", [])
content["Prefix"]
for content in response.get("CommonPrefixes", [])
if isinstance(content, dict) and content.get("Prefix")
]

if len(grype_result_dirs) == 0:
Expand Down Expand Up @@ -461,12 +466,13 @@ def get_cves(pr_number, commit_sha):
return pd.DataFrame()

df = pd.DataFrame(rows).drop_duplicates()
df = df.sort_values(
by="severity",
key=lambda col: col.str.lower().map(
{"critical": 1, "high": 2, "medium": 3, "low": 4, "negligible": 5}
),
)

def _cve_sort_key(col):
if col.name == "severity":
return col.str.lower().map(CVE_SEVERITY_ORDER)
return col

df = df.sort_values(by=["severity", "docker_image"], key=_cve_sort_key)
return df


Expand Down Expand Up @@ -510,6 +516,9 @@ def format_results_as_html_table(results) -> str:
"Identifier": lambda i: url_to_html_link(
"https://nvd.nist.gov/vuln/detail/" + i
),
"Severity": lambda s: (
f'<span data-sort="{CVE_SEVERITY_ORDER.get(str(s).lower(), 6)}">{s}</span>'
),
},
escape=False,
border=0,
Expand Down Expand Up @@ -575,11 +584,16 @@ def main():
"pr_new_fails": [],
"checks_errors": get_checks_errors(db_client, args.commit_sha, branch_name),
"regression_fails": get_regression_fails(db_client, args.actions_run_url),
"docker_images_cves": (
[] if not args.cves else get_cves(args.pr_number, args.commit_sha)
),
"docker_images_cves": [],
}

try:
fail_results["docker_images_cves"] = (
[] if not args.cves else get_cves(args.pr_number, args.commit_sha)
)
except Exception as e:
print(f"Error in get_cves: {e}")

# get_cves returns ... in the case where no Grype result files were found.
# This might occur when run in preview mode.
cves_not_checked = not args.cves or fail_results["docker_images_cves"] is ...
Expand Down
13 changes: 7 additions & 6 deletions .github/workflows/release_branches.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# yamllint disable rule:comments-indentation
name: ReleaseBranchCI
run-name: "${{ github.event.inputs.workflow_name || ' ' }}"

env:
# Force the stdout and stderr streams to be unbuffered
Expand All @@ -21,9 +22,8 @@ on: # yamllint disable-line rule:truthy
- opened
branches:
- 'releases/*'
- 'stable-*'
push:
branches:
- 'releases/*'
tags:
- '*'
workflow_dispatch:
Expand Down Expand Up @@ -418,21 +418,21 @@ jobs:
secrets: inherit
with:
runner_type: altinity-regression-tester
commit: e1f4e6ae0d862388e113cbc74b8dab32b9db64fc
commit: b28bcd03f30440f25a17917e5005670c28e3a703
arch: release
build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
timeout_minutes: 300
timeout_minutes: 210
RegressionTestsAarch64:
needs: [RunConfig, BuilderDebAarch64]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.RunConfig.outputs.data).ci_settings.exclude_keywords, 'regression') && !contains(fromJson(needs.RunConfig.outputs.data).ci_settings.exclude_keywords, 'aarch64')}}
uses: ./.github/workflows/regression.yml
secrets: inherit
with:
runner_type: altinity-regression-tester-aarch64
commit: e1f4e6ae0d862388e113cbc74b8dab32b9db64fc
commit: b28bcd03f30440f25a17917e5005670c28e3a703
arch: aarch64
build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
timeout_minutes: 300
timeout_minutes: 210
SignRelease:
needs: [RunConfig, BuilderDebRelease]
if: ${{ !failure() && !cancelled() }}
Expand Down Expand Up @@ -478,6 +478,7 @@ jobs:
- RegressionTestsAarch64
- GrypeScan
- SignRelease
- SignAarch64
runs-on: [self-hosted, altinity-on-demand, altinity-style-checker-aarch64]
steps:
- name: Check out repository code
Expand Down
4 changes: 3 additions & 1 deletion .github/workflows/reusable_sign.yml
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,9 @@ jobs:
uses: actions/upload-artifact@v4
with:
name: ${{inputs.test_name}} signed-hashes
path: ${{ env.TEMP_PATH }}/*.gpg
path: |
${{ runner.temp }}/signed/*.gpg
${{ runner.temp }}/signed/signing_pubkey.asc
- name: Clean
if: always()
uses: ./.github/actions/clean
34 changes: 33 additions & 1 deletion tests/ci/build_download_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,16 @@ def read_build_urls(build_name: str, reports_path: Union[Path, str]) -> List[str
if artifact_report.is_file():
with open(artifact_report, "r", encoding="utf-8") as f:
return json.load(f)["build_urls"] # type: ignore
pr_info = None
try:
from pr_info import PRInfo # pylint: disable=import-outside-toplevel

pr_info = PRInfo()
except Exception as ex:
logger.warning("Failed to init PRInfo while selecting build report: %s", ex)

reports_by_sha = [] # type: List[List[str]]
fallback_reports = [] # type: List[List[str]]
for root, _, files in os.walk(reports_path):
for file in files:
if file.endswith(f"_{build_name}.json"):
Expand All @@ -162,7 +172,29 @@ def read_build_urls(build_name: str, reports_path: Union[Path, str]) -> List[str
os.path.join(root, file), "r", encoding="utf-8"
) as file_handler:
build_report = json.load(file_handler)
return build_report["build_urls"] # type: ignore
build_urls = build_report.get("build_urls", [])
if not isinstance(build_urls, list):
continue
fallback_reports.append(build_urls)
if pr_info and pr_info.sha and any(
f"/{pr_info.sha}/" in str(url) for url in build_urls
):
reports_by_sha.append(build_urls)

if reports_by_sha:
logger.info(
"Using build report matched by SHA [%s] for [%s]",
pr_info.sha if pr_info else "",
build_name,
)
return reports_by_sha[0]

if fallback_reports:
logger.warning(
"No SHA-matched build report found for [%s], fallback to first discovered",
build_name,
)
return fallback_reports[0]

logger.info("A build report is not found for %s", build_name)
return []
Expand Down
4 changes: 4 additions & 0 deletions tests/ci/ci.py
Original file line number Diff line number Diff line change
Expand Up @@ -303,6 +303,10 @@ def _pre_action(s3, job_name, batch, indata, pr_info):
# testing), otherwise reports won't be found
if not (pr_info.is_scheduled or pr_info.is_dispatched):
report_prefix = Utils.normalize_string(pr_info.head_ref)
elif isinstance(pr_info.ref, str) and pr_info.ref.startswith("refs/tags/"):
# For tag-triggered runs, use tag name as prefix to avoid downloading
# reports from other runs with the same digest.
report_prefix = Utils.normalize_string(pr_info.head_ref)
elif pr_info.is_pr:
report_prefix = str(pr_info.number)
print(
Expand Down
85 changes: 70 additions & 15 deletions tests/ci/sign_release.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,21 @@
import sys
import os
import logging
import subprocess
from env_helper import TEMP_PATH, REPO_COPY, REPORT_PATH
from s3_helper import S3Helper
from pr_info import PRInfo
from build_download_helper import download_builds_filter
from report import FAIL, OK, FAILURE, SUCCESS, JobReport, TestResult
from stopwatch import Stopwatch
import hashlib
from pathlib import Path

GPG_BINARY_SIGNING_KEY = os.getenv("GPG_BINARY_SIGNING_KEY")
GPG_BINARY_SIGNING_PASSPHRASE = os.getenv("GPG_BINARY_SIGNING_PASSPHRASE")

CHECK_NAME = os.getenv("CHECK_NAME", "Sign release")
SIGNING_PUBLIC_KEY_FILE = "signing_pubkey.asc"

def hash_file(file_path):
BLOCK_SIZE = 65536 # The size of each read from the file
Expand All @@ -32,22 +36,47 @@ def hash_file(file_path):

return hash_file_path

def sign_file(file_path):
def import_private_signing_key():
priv_key_file_path = 'priv.key'
with open(priv_key_file_path, 'x') as f:
with open(priv_key_file_path, 'w') as f:
f.write(GPG_BINARY_SIGNING_KEY)

out_file_path = f'{file_path}.gpg'
try:
subprocess.run(
f'echo {GPG_BINARY_SIGNING_PASSPHRASE} | gpg --batch --import {priv_key_file_path}',
shell=True,
check=True,
)
finally:
os.remove(priv_key_file_path)

os.system(f'echo {GPG_BINARY_SIGNING_PASSPHRASE} | gpg --batch --import {priv_key_file_path}')
os.system(f'gpg -o {out_file_path} --pinentry-mode=loopback --batch --yes --passphrase {GPG_BINARY_SIGNING_PASSPHRASE} --sign {file_path}')

def sign_file(file_path):
out_file_path = f'{file_path}.gpg'
subprocess.run(
f'gpg -o {out_file_path} --pinentry-mode=loopback --batch --yes --passphrase {GPG_BINARY_SIGNING_PASSPHRASE} --sign {file_path}',
shell=True,
check=True,
)
print(f"Signed {file_path}")
os.remove(priv_key_file_path)

return out_file_path


def export_public_signing_key(out_file_path: Path):
subprocess.run(
f"gpg --armor --output {out_file_path} --export",
shell=True,
check=True,
)
print(f"Exported signing public key to {out_file_path}")

def main():
stopwatch = Stopwatch()
reports_path = Path(REPORT_PATH)
test_results = []
state = SUCCESS
description = "Signed artifact hashes successfully"

if not os.path.exists(TEMP_PATH):
os.makedirs(TEMP_PATH)
Expand All @@ -65,15 +94,29 @@ def main():
# downloads `package_release` artifacts generated
download_builds_filter(CHECK_NAME, reports_path, Path(TEMP_PATH))

for f in os.listdir(TEMP_PATH):
full_path = os.path.join(TEMP_PATH, f)
if os.path.isdir(full_path):
continue
hashed_file_path = hash_file(full_path)
signed_file_path = sign_file(hashed_file_path)
s3_path = s3_path_prefix / os.path.basename(signed_file_path)
s3_helper.upload_build_file_to_s3(Path(signed_file_path), str(s3_path))
print(f'Uploaded file {signed_file_path} to {s3_path}')
try:
import_private_signing_key()
for f in os.listdir(TEMP_PATH):
full_path = os.path.join(TEMP_PATH, f)
if os.path.isdir(full_path):
continue
hashed_file_path = hash_file(full_path)
signed_file_path = sign_file(hashed_file_path)
s3_path = s3_path_prefix / os.path.basename(signed_file_path)
s3_helper.upload_build_file_to_s3(Path(signed_file_path), str(s3_path))
print(f'Uploaded file {signed_file_path} to {s3_path}')
test_results.append(TestResult(name=os.path.basename(full_path), status=OK))

public_key_path = Path(TEMP_PATH) / SIGNING_PUBLIC_KEY_FILE
export_public_signing_key(public_key_path)
s3_helper.upload_build_file_to_s3(
public_key_path, str(s3_path_prefix / SIGNING_PUBLIC_KEY_FILE)
)
test_results.append(TestResult(name=SIGNING_PUBLIC_KEY_FILE, status=OK))
except Exception as ex:
state = FAILURE
description = f"Failed to sign release artifacts: {ex}"
test_results.append(TestResult(name=CHECK_NAME, status=FAIL, raw_logs=str(ex)))

# Signed hashes are:
# clickhouse-client_22.3.15.2.altinitystable_amd64.deb.sha512.gpg clickhouse-keeper_22.3.15.2.altinitystable_x86_64.apk.sha512.gpg
Expand All @@ -91,6 +134,18 @@ def main():
# clickhouse-keeper_22.3.15.2.altinitystable_amd64.deb.sha512.gpg clickhouse-server-22.3.15.2.altinitystable.x86_64.rpm.sha512.gpg
# clickhouse-keeper-22.3.15.2.altinitystable-amd64.tgz.sha512.gpg clickhouse.sha512.gpg

JobReport(
description=description,
test_results=test_results,
status=state,
start_time=stopwatch.start_time_str,
duration=stopwatch.duration_seconds,
additional_files=[],
).dump()

if state == FAILURE:
sys.exit(1)

sys.exit(0)

if __name__ == "__main__":
Expand Down
Loading