3
0
Fork 0
mirror of https://github.com/Z3Prover/z3 synced 2026-03-16 02:00:00 +00:00
z3/.github/scripts/fetch-artifacts.sh
Angelica Moreira db46d52056
fix memory-safety-report to download artifacts via MCP tools (#8979)
gh CLI is not available inside AWF so the agent could not download
artifacts. Switch to GitHub MCP actions toolset for artifact URLs
and add helper scripts for download and parsing.
2026-03-15 10:12:49 -07:00

51 lines
1.3 KiB
Bash
Executable file

#!/usr/bin/env bash
# fetch-artifacts.sh download + extract ASan/UBSan artifact ZIPs.
#
# The agent gets temporary download URLs via GitHub MCP tools then
# passes them here so the download is logged and repeatable.
#
# usage: fetch-artifacts.sh <asan_url> [ubsan_url]
# output: /tmp/reports/{asan-reports,ubsan-reports}/
set -euo pipefail
REPORT_DIR="/tmp/reports"
LOG="/tmp/fetch-artifacts.log"
log() { printf '[%s] %s\n' "$(date -u +%H:%M:%S)" "$*" | tee -a "$LOG"; }
asan_url="${1:?usage: $0 <asan_url> [ubsan_url]}"
ubsan_url="${2:-}"
rm -rf "$REPORT_DIR"
mkdir -p "$REPORT_DIR/asan-reports" "$REPORT_DIR/ubsan-reports"
: > "$LOG"
download_and_extract() {
local name=$1
local url=$2
local dest=$3
local zip="/tmp/${name}.zip"
log "$name: downloading"
if ! curl -fsSL "$url" -o "$zip"; then
log "$name: download failed (curl exit $?)"
return 1
fi
log "$name: $(stat -c%s "$zip") bytes"
unzip -oq "$zip" -d "$dest"
log "$name: extracted $(ls -1 "$dest" | wc -l) files"
ls -1 "$dest" | while read -r f; do log " $f"; done
}
download_and_extract "asan" "$asan_url" "$REPORT_DIR/asan-reports"
if [ -n "$ubsan_url" ]; then
download_and_extract "ubsan" "$ubsan_url" "$REPORT_DIR/ubsan-reports"
else
log "ubsan: skipped (no url)"
fi
log "all done"
echo "$REPORT_DIR"