1+ # Bash strict mode (enabled only when executed directly, not sourced)
2+ if ! (return 0 2> /dev/null); then
3+ set -euo pipefail
4+ fi
5+
16# Expect host is Linux/x86_64, Linux/aarch64, macOS/arm64
27
38MACHINE_TYPE=$( uname -m)
@@ -15,12 +20,67 @@ check_platform()
1520 esac
1621}
1722
18- if [[ " ${OS_TYPE} " == " Linux" ] ]; then
23+ if [ " ${OS_TYPE} " = " Linux" ]; then
1924 PARALLEL=-j$( nproc)
2025else
2126 PARALLEL=-j$( sysctl -n hw.logicalcpu)
2227fi
2328
29+ # Color output helpers
30+ RED=' \033[0;31m'
31+ GREEN=' \033[0;32m'
32+ YELLOW=' \033[1;33m'
33+ NC=' \033[0m' # No Color
34+
35+ print_success ()
36+ {
37+ echo -e " ${GREEN} [SUCCESS]${NC} $1 "
38+ }
39+
40+ print_error ()
41+ {
42+ echo -e " ${RED} [ERROR]${NC} $1 " >&2
43+ }
44+
45+ print_warning ()
46+ {
47+ echo -e " ${YELLOW} [WARNING]${NC} $1 "
48+ }
49+
50+ # Assertion function for tests
51+ # Usage: ASSERT <condition> <error_message>
52+ ASSERT ()
53+ {
54+ local condition=$1
55+ shift
56+ local message=" $* "
57+
58+ if ! eval " ${condition} " ; then
59+ print_error " Assertion failed: ${message} "
60+ print_error " Condition: ${condition} "
61+ return 1
62+ fi
63+ }
64+
65+ # Cleanup function registry
66+ CLEANUP_FUNCS=()
67+
68+ register_cleanup ()
69+ {
70+ CLEANUP_FUNCS+=(" $1 " )
71+ }
72+
73+ cleanup ()
74+ {
75+ local func
76+ for func in " ${CLEANUP_FUNCS[@]-} " ; do
77+ [ -n " ${func} " ] || continue
78+ eval " ${func} " || true
79+ done
80+ }
81+
82+ trap cleanup EXIT
83+
2484# Universal download utility with curl/wget compatibility
2585# Provides consistent interface regardless of which tool is available
2686
@@ -49,7 +109,7 @@ download_to_stdout()
49109 local url=" $1 "
50110 case " $DOWNLOAD_TOOL " in
51111 curl)
52- curl -fsSL " $url "
112+ curl -fS --retry 5 --retry-delay 2 --retry-max-time 60 -sL " $url "
53113 ;;
54114 wget)
55115 wget -qO- " $url "
@@ -66,7 +126,7 @@ download_to_file()
66126 local output=" $2 "
67127 case " $DOWNLOAD_TOOL " in
68128 curl)
69- curl -fsSL -o " $output " " $url "
129+ curl -fS --retry 5 --retry-delay 2 --retry-max-time 60 -sL -o " $output " " $url "
70130 ;;
71131 wget)
72132 wget -q -O " $output " " $url "
@@ -88,7 +148,7 @@ download_with_headers()
88148 for header in " $@ " ; do
89149 headers+=(-H " $header " )
90150 done
91- curl -fsSL " ${headers[@]} " " $url "
151+ curl -fS --retry 5 --retry-delay 2 --retry-max-time 60 -sL " ${headers[@]} " " $url "
92152 ;;
93153 wget)
94154 for header in " $@ " ; do
@@ -107,7 +167,7 @@ download_silent()
107167 local url=" $1 "
108168 case " $DOWNLOAD_TOOL " in
109169 curl)
110- curl -fsSL " $url "
170+ curl -fS --retry 5 --retry-delay 2 --retry-max-time 60 -sL " $url "
111171 ;;
112172 wget)
113173 wget -qO- " $url "
@@ -124,7 +184,7 @@ download_with_progress()
124184 local output=" $2 "
125185 case " $DOWNLOAD_TOOL " in
126186 curl)
127- curl -fL -# -o " $output " " $url "
187+ curl -fS --retry 5 --retry-delay 2 --retry-max-time 60 -L -# -o " $output " " $url "
128188 ;;
129189 wget)
130190 wget -O " $output " " $url "
@@ -141,7 +201,7 @@ check_url()
141201 local url=" $1 "
142202 case " $DOWNLOAD_TOOL " in
143203 curl)
144- curl -fsSL --head " $url " > /dev/null 2>&1
204+ curl -fS --retry 5 --retry-delay 2 --retry-max-time 60 -sL --head " $url " > /dev/null 2>&1
145205 ;;
146206 wget)
147207 wget --spider -q " $url " 2> /dev/null
0 commit comments